_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
472852d62c7c4747dd62d82feed9c122727dae2f78bc3245d6b8a9020fbb9d49 | abella-prover/abella | typing.ml | (****************************************************************************)
Copyright ( C ) 2007 - 2009 Gacek
Copyright ( C ) 2013 - 2022 Inria ( Institut National de Recherche
(* en Informatique et en Automatique) *)
(* *)
This file is part of Abella .
(* *)
Abella is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
(* (at your option) any later version. *)
(* *)
Abella is distributed in the hope that it will be useful ,
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with Abella . If not , see < / > .
(****************************************************************************)
open Term
open Metaterm
open Extensions
open Unifyty
(** Untyped terms *)
type pos = Lexing.position * Lexing.position
type uterm =
| UCon of pos * string * ty
| ULam of pos * string * ty * uterm
| UApp of pos * uterm * uterm
let ghost : pos = (Lexing.dummy_pos, Lexing.dummy_pos)
let rec forget_term ?(cx=[]) t =
match observe (hnorm t) with
| Var v -> UCon (ghost, v.name, v.ty)
| Lam ([], t) -> forget_term ~cx t
| Lam ((x, xty) :: cx, t) ->
ULam (ghost, x, xty, forget_term ~cx:((x, xty) :: cx) t)
| App (f, ts) ->
List.fold_left begin fun f t ->
UApp (ghost, f, forget_term ~cx t)
end (forget_term ~cx f) ts
| DB n -> begin
try
let (x, xty) = List.nth cx (n - 1) in
UCon (ghost, x, xty)
with Failure _ -> bugf "forget_term called with too small a context"
end
| _ ->
bugf "forget_term called on: %s" (term_to_string t)
let get_pos t =
match t with
| UCon(p, _, _) -> p
| ULam(p, _, _, _) -> p
| UApp(p, _, _) -> p
let change_pos p t =
match t with
| UCon(_, id, ty) -> UCon(p, id, ty)
| ULam(_, id, ty, body) -> ULam(p, id, ty, body)
| UApp(_, t1, t2) -> UApp(p, t1, t2)
let predefined id pos =
UCon(pos, id, Term.fresh_tyvar ())
let binop id t1 t2 =
let pos = (fst (get_pos t1), snd (get_pos t2)) in
UApp(pos, UApp(pos, predefined id pos, t1), t2)
let uterm_head_name t =
let rec aux = function
| UCon(_, id, _) -> id
| UApp(_, h, _) -> aux h
| ULam _ -> assert false
in
aux t
*
type umetaterm =
| UTrue
| UFalse
| UEq of uterm * uterm
| UAsyncObj of uterm * uterm * restriction
| USyncObj of uterm * uterm * uterm * restriction
| UArrow of umetaterm * umetaterm
| UBinding of binder * (id * ty) list * umetaterm
| UOr of umetaterm * umetaterm
| UAnd of umetaterm * umetaterm
| UPred of uterm * restriction
let apply_sub_tyctx s tyctx =
List.map (fun (id, ty) -> (id, apply_sub_ty s ty)) tyctx
let ids_to_fresh_tyctx ids =
List.map (fun id -> (id, fresh_tyvar ())) ids
let tyctx_to_ctx tyctx =
List.map (fun (id, ty) -> (id, const id ty)) tyctx
let tyctx_to_nominal_ctx tyctx =
List.map (fun (id, ty) -> (id, nominal_var id ty)) tyctx
(** Tables / Signatures *)
type ktable = (string * knd) list
type pty = Poly of string list * ty
type ctable = (string * pty) list
type sign = ktable * ctable
(** Kinds *)
let add_types (ktable, ctable) ids knd =
List.iter begin fun id ->
begin
try
let knd' = List.assoc id ktable in
if knd <> knd' then
failwithf "Type constructor %s has inconsistent kind declarations" id
with
| Not_found -> ()
end ;
if is_capital_name id then
failwithf "Types may not begin with a capital letter: %s" id;
end ids ;
((List.map (fun id -> (id, knd)) ids) @ ktable, ctable)
let lookup_type (ktable, _) id =
List.assoc id ktable
(** Constants *)
let kind_check sign ty =
let rec aux = function
| Ty(tys, aty) ->
List.iter aux tys;
match aty with
| Tygenvar _
| Typtr {contents = TV _} -> ()
| Tycons(cty,args) ->
let knd =
try lookup_type sign cty
with
| Not_found -> failwithf "Unknown type constructor: %s" cty
in
let arity = karity knd in
let nargs = List.length args in
if not (nargs = arity) then
failwithf "%s expects %i arguments but has %i" cty arity nargs ;
List.iter aux args
| Typtr {contents = TT _} -> assert false
in aux (observe_ty ty)
let kind_check_poly (ktable,ctable) (Poly(_ids, ty)) =
kind_check (ktable,ctable) ty
let eq_pty pty1 pty2 =
match pty1,pty2 with
| Poly(ids1, ty1), Poly(ids2,ty2) ->
List.length ids1 = List.length ids2 &&
begin
let tyvars = List.map (fun _ -> Term.fresh_tyvar ()) ids1 in
let sub1 = List.map2 (fun id ty -> (id,ty)) ids1 tyvars in
let sub2 = List.map2 (fun id ty -> (id,ty)) ids2 tyvars in
let ty1' = apply_sub_ty sub1 ty1 in
let ty2' = apply_sub_ty sub2 ty2 in
eq_ty ty1' ty2'
end
let check_const (ktable, ctable) (id, pty) =
begin try
let pty' = List.assoc id ctable in
if not (eq_pty pty pty') then
failwithf "Constant %s has inconsistent type declarations" id
with
| Not_found -> ()
end ;
if is_capital_name id then
failwithf "Constants may not begin with a capital letter: %s" id ;
kind_check_poly (ktable, ctable) pty
let add_poly_consts (ktable, ctable) idptys =
List.iter (check_const (ktable, ctable)) idptys ;
(ktable, idptys @ ctable)
let get_typaram ty =
let params = ref [] in
iter_ty begin fun aty ->
match aty with
| Tygenvar v ->
if is_capital_name v then
params := v::(!params)
| _ -> ()
end ty;
!params
let get_typarams tys = List.flatten_map get_typaram tys
let add_consts sign idtys =
let typarams = idtys |> List.map snd |> List.map get_typaram in
let idptys = List.map2
(fun (id, ty) pas -> (id, Poly(pas, ty))) idtys typarams in
add_poly_consts sign idptys
let freshen_ty (Poly(ids, ty)) =
let sub = ids_to_fresh_tyctx ids in
apply_sub_ty sub ty
let lookup_const (_, ctable) id =
try
freshen_ty (List.assoc id ctable)
with
| Not_found -> failwithf "Unknown constant: %s" id
* types
let rec desugar_aty aty =
match aty with
| Tycons (v,tys) ->
if v = "olist" && tys = [] then
Tycons ("list",[oty])
else
let tys = List.map desugar_ty tys in
Tycons (v,tys)
| Typtr {contents=TT _t} ->
assert false
| _ -> aty
and desugar_ty ty =
match (observe_ty ty) with
| Ty (tys, aty) ->
let tys = List.map desugar_ty tys in
let aty = desugar_aty aty in
Ty (tys,aty)
(** Pervasive signature *)
let k_member = "member"
let k_cons = "::"
let k_nil = "nil"
let pervasive_sign =
let aty = tybase (Tygenvar "A") in
let alistty = tybase (atyapp (atybase "list") aty) in
([("o", Knd 0); ("list", Knd 1); ("prop", Knd 0)],
[("pi", Poly(["A"], tyarrow [tyarrow [aty] oty] oty)) ;
("=>", Poly([], tyarrow [oty; oty] oty)) ;
("&", Poly([], tyarrow [oty; oty] oty)) ;
(k_cons, Poly(["A"], tyarrow [aty; alistty] alistty)) ;
(k_nil, Poly(["A"], alistty)) ])
let sign_to_tys sign =
List.map
(function (_, Poly(_ids, ty)) -> ty)
(snd sign)
let pervasive_sr =
List.fold_left Subordination.update Subordination.empty
(sign_to_tys pervasive_sign)
(** Typing for terms *)
type expected = ty
* type actual = ty
* ( \ * A constraint contains the position of the ' actual ' type * \ )
* type constraint_type = CFun | CArg
* type constraint_info = pos * constraint_type
* type constraints = ( expected * actual * ) list
* exception TypeInferenceFailure of constraint_info * expected * actual
* type actual = ty
* (\* A constraint contains the position of the 'actual' type *\)
* type constraint_type = CFun | CArg
* type constraint_info = pos * constraint_type
* type constraints = (expected * actual * constraint_info) list
* exception TypeInferenceFailure of constraint_info * expected * actual *)
let infer_type_and_constraints ~sign tyctx t =
let eqns = ref [] in
let add_constraint expected actual pos =
eqns := (expected, actual, pos) :: !eqns
in
let rec aux tyctx t =
match t with
| UCon(p, id, ty) ->
let ty' =
begin try
List.assoc id tyctx
with
| Not_found -> lookup_const sign id
end
in
add_constraint ty ty' (p, CArg) ;
ty
| ULam(_, id, ty, t) ->
tyarrow [ty] (aux ((id, ty) :: tyctx) t)
| UApp(_, t1, t2) ->
let ty1 = aux tyctx t1 in
let ty2 = aux tyctx t2 in
let (aty, rty) =
match ty1 with
| Ty([], _) ->
let aty = fresh_tyvar () in
let rty = fresh_tyvar () in
add_constraint (tyarrow [aty] rty) ty1 (get_pos t1, CFun) ;
(aty, rty)
| Ty(aty::atys, bty) ->
(aty, Ty(atys, bty))
in
add_constraint aty ty2 (get_pos t2, CArg) ;
rty
in
let ty = aux tyctx t in
(ty, List.rev !eqns)
let constraints_to_string eqns =
let aux (ty1, ty2, _) =
(ty_to_string ty1) ^ " = " ^ (ty_to_string ty2)
in
String.concat "\n" (List.map aux eqns)
let occurs v ty =
* let rec aux = function
* | Ty(tys , bty ) when bty = v - > true
* | Ty(tys , _ ) - > List.exists aux tys
* in
* aux ty
* let rec aux = function
* | Ty(tys, bty) when bty = v -> true
* | Ty(tys, _) -> List.exists aux tys
* in
* aux ty *)
let contains_tyvar ty =
let rec aux = function
| Ty (tys,aty) ->
let cv =
match aty with
| Tygenvar _ -> false
| Typtr {contents=TV _} -> true
| Tycons (_c,args) ->
List.exists aux args
| Typtr {contents=TT _} -> assert false
in
cv || List.exists aux tys
in aux (observe_ty ty)
let tid_ensure_fully_inferred ~sign (_id, ty) =
if contains_tyvar ty then
failwith "Types of variables are not fully determined" ;
kind_check sign ty
let term_ensure_fully_inferred ~sign t =
let rec aux t =
match observe (hnorm t) with
| Var v -> tid_ensure_fully_inferred ~sign (v.name, v.ty)
| DB _i -> ()
| App(h, args) -> aux h ; List.iter aux args
| Lam(_tys, body) -> aux body
| _ -> assert false
in
aux t
let metaterm_ensure_fully_inferred ~sign t =
let rec aux t =
match t with
| True | False -> ()
| And(a, b) | Or(a, b) | Arrow(a, b) -> aux a; aux b
| Binding(_, tids, body) ->
List.iter (tid_ensure_fully_inferred ~sign) tids ;
aux body
| Eq(a, b) ->
term_ensure_fully_inferred ~sign a ;
term_ensure_fully_inferred ~sign b
| Obj(obj, _) ->
Context.iter (term_ensure_fully_inferred ~sign) obj.context ;
begin match obj.mode with
| Async -> ()
| Sync focus ->
term_ensure_fully_inferred ~sign focus
end ;
term_ensure_fully_inferred ~sign obj.right
| Pred(p, _) ->
term_ensure_fully_inferred ~sign p
in
aux t
let apply_bind_constraints v ty eqns =
* ( fun ( x , y ) - > ( apply_bind_ty v ty x , apply_bind_ty v ty y ) ) eqns
*
* let apply_bind_sub v ty sub =
* ( fun ( x , y ) - > ( x , apply_bind_ty v ty y ) ) sub
*
* let unify_constraints eqns =
* let v vty s =
* ( v , vty ) : : ( apply_bind_sub v vty s )
* in
*
* ( \ * Unify a single constraint and call fail on failure * \ )
* let rec aux s ( , ty2 ) fail =
* let = apply_sub_ty s in
* let = apply_sub_ty s ty2 in
* match , ty2 with
* | _ , _ when = ty2 - > s
* ( [ ] , bty1 ) , _ when is_tyvar bty1 - >
* if occurs bty1 ty2 then
* fail s
* else
* s
* | _ , ( [ ] , bty2 ) when is_tyvar bty2 - >
* if occurs bty2 then
* fail s
* else
* add_sub bty2 s
* | Ty(ty1::tys1 , bty1 ) , , bty2 ) - >
* let s = aux s ( , ty2 ) fail in
* aux s ( Ty(tys1 , bty1 ) , , bty2 ) ) fail
* | , ty2 - > fail s
* in
*
* let unify_single_constraint s ( , ty2 , p ) =
* aux s ( , ty2 )
* ( fun s - > raise ( TypeInferenceFailure(p , apply_sub_ty s ,
* apply_sub_ty s ty2 ) ) )
* in
*
* List.fold_left unify_single_constraint [ ] eqns
* List.map (fun (x,y) -> (apply_bind_ty v ty x, apply_bind_ty v ty y)) eqns
*
* let apply_bind_sub v ty sub =
* List.map (fun (x,y) -> (x, apply_bind_ty v ty y)) sub
*
* let unify_constraints eqns =
* let add_sub v vty s =
* (v, vty) :: (apply_bind_sub v vty s)
* in
*
* (\* Unify a single constraint and call fail on failure *\)
* let rec aux s (ty1, ty2) fail =
* let ty1 = apply_sub_ty s ty1 in
* let ty2 = apply_sub_ty s ty2 in
* match ty1, ty2 with
* | _, _ when ty1 = ty2 -> s
* | Ty([], bty1), _ when is_tyvar bty1 ->
* if occurs bty1 ty2 then
* fail s
* else
* add_sub bty1 ty2 s
* | _, Ty([], bty2) when is_tyvar bty2 ->
* if occurs bty2 ty1 then
* fail s
* else
* add_sub bty2 ty1 s
* | Ty(ty1::tys1, bty1), Ty(ty2::tys2, bty2) ->
* let s = aux s (ty1, ty2) fail in
* aux s (Ty(tys1, bty1), Ty(tys2, bty2)) fail
* | ty1, ty2 -> fail s
* in
*
* let unify_single_constraint s (ty1, ty2, p) =
* aux s (ty1, ty2)
* (fun s -> raise (TypeInferenceFailure(p, apply_sub_ty s ty1,
* apply_sub_ty s ty2)))
* in
*
* List.fold_left unify_single_constraint [] eqns *)
let uterms_extract_if test ts =
let rec aux t =
match t with
| UCon(_, id, _) -> if test id then [id] else []
| ULam(_, id, _, t) -> List.remove id (aux t)
| UApp(_, t1, t2) -> (aux t1) @ (aux t2)
in
List.unique (List.flatten_map aux ts)
let uterm_nominals_to_tyctx t =
ids_to_fresh_tyctx (uterms_extract_if is_nominal_name [t])
let uterm_to_term t =
let rec aux t =
match t with
| UCon(_, id, ty) -> const id ty
| ULam(_, id, ty, t) -> abstract id ty (aux t)
| UApp(_, t1, t2) -> app (aux t1) [aux t2]
in
aux t
let uterm_to_string t =
term_to_string (uterm_to_term t)
let term_ensure_subordination sr t =
let rec aux tyctx t =
match observe (hnorm t) with
| Var v -> Subordination.ensure sr v.ty
| DB _i -> ()
| App(h, ts) -> aux tyctx h ; List.iter (aux tyctx) ts
| Lam(idtys, b) ->
Subordination.ensure sr (tc tyctx t) ;
aux (List.rev_app idtys tyctx) b
| _ -> assert false
in
aux [] t
let check_spec_logic_type ty =
iter_ty
(fun bty ->
if bty = propaty then
failwith "Cannot mention type 'prop' in the specification logic" ;
if bty = olistaty then
failwith "Cannot mention type 'list o' in the specification logic")
ty
let check_spec_logic_quantification_type ty =
check_spec_logic_type ty ;
iter_ty
(fun bty ->
if bty = oaty then
failwith "Cannot quantify over type o in the specification logic")
ty
let check_pi_quantification ts =
ignore
(map_vars
(fun v ->
if v.name = "pi" then
match observe_ty v.ty with
| Ty([Ty([tau], _)], _) ->
check_spec_logic_quantification_type tau
| _ -> assert false)
ts)
let get_tyvar_names ty =
* let rec aux = function
* ( tys , aty ) - >
* let ns = List.flatten_map aux tys in
* let ans =
* match aty with
* | Typtr { contents = TV v } - > [ v ]
* | Typtr { contents = TT _ } - > assert false
* | Tygenvar _ - > [ ]
* | Tycons ( c , args ) - >
* List.flatten_map aux tys
* in
* ns @ ans
* in List.unique ( aux ( observe_ty ty ) )
* let rec aux = function
* | Ty (tys, aty) ->
* let ns = List.flatten_map aux tys in
* let ans =
* match aty with
* | Typtr {contents=TV v} -> [v]
* | Typtr {contents=TT _} -> assert false
* | Tygenvar _ -> []
* | Tycons (c,args) ->
* List.flatten_map aux tys
* in
* ns @ ans
* in List.unique (aux (observe_ty ty)) *)
let type_uterm ?partial_infer ?expected_ty ~sr ~sign ~ctx t =
let nominal_tyctx = uterm_nominals_to_tyctx t in
let tyctx =
(List.map (fun (id, t) -> (id, tc [] t)) ctx)
@ nominal_tyctx
in
let (ty, eqns) = infer_type_and_constraints ~sign tyctx t in
let eqns =
match expected_ty with
| None -> eqns
| Some exp_ty -> (exp_ty, ty, (get_pos t, CArg)) :: eqns
in
unify_constraints eqns;
let ctx = ctx @ (tyctx_to_nominal_ctx nominal_tyctx) in
let result = replace_term_vars ctx (uterm_to_term t) in
(match partial_infer with
| None -> term_ensure_fully_inferred ~sign result
| Some _ -> ()) ;
term_ensure_subordination sr result ;
result
let rec has_capital_head t =
match t with
| UCon(_, v, _) -> is_capital_name v
| UApp(_, h, _) -> has_capital_head h
| _ -> false
let replace_underscores head body =
let names = uterms_extract_if is_capital_name (head::body) in
let used = ref (List.map (fun x -> (x, ())) names) in
let rec aux t =
match t with
| UCon(p, id, ty) when id = "_" ->
let id' = fresh_name "X" !used in
used := (id', ()) :: !used ;
UCon(p, id', ty)
| UCon _ -> t
| ULam(p, id, ty, t) ->
used := (id, ()) :: !used ;
ULam(p, id, ty, aux t)
| UApp(p, t1, t2) ->
let t1' = aux t1 in
let t2' = aux t2 in
UApp(p, t1', t2')
in
match List.map aux (head::body) with
| h::b -> (h, b)
| [] -> assert false
let clause_map : (string list * term) Itab.t ref = ref Itab.empty
let seen_name cname = Itab.mem cname !clause_map
let register_clause name clause =
(* Printf.printf "Note: registered %S : %s\n%!" name *)
( ) ;
clause_map := Itab.add name clause !clause_map
let lookup_clause cname =
if seen_name cname
then Some (Itab.find cname !clause_map)
else None
let generalize_tyvars t =
let tyvars = term_collect_tyvar_names t in
let tysub = List.map (fun id -> (id, tybase (Tygenvar id))) tyvars in
let t' = term_map_on_tys (apply_sub_ty_tyvar tysub) t in
(tyvars, t')
let print_clause cl =
let (vars, clause) = cl in
let vstr = String.concat "," vars in
let cstr = term_to_string clause in
Printf.eprintf "Typed clause: [%s] %s\n" vstr cstr
let type_uclause ~sr ~sign (cname, head, body) =
if has_capital_head head then
failwith "Clause has flexible (i.e., non-atomic) head" ;
let head, body = replace_underscores head body in
let cids = uterms_extract_if is_capital_name (head::body) in
let get_imp_form head body =
(let impfy imp f = (binop "=>" f imp) in
List.fold_left impfy head (List.rev body))
in
let imp_form = get_imp_form head body in
let get_pi_form ids body =
(let pify id pi =
let pos = get_pos pi in
let abs = ULam (pos, id, Term.fresh_tyvar (), pi) in
UApp (pos, predefined "pi" pos, abs)
in
List.fold_right pify ids body)
in
let pi_form = get_pi_form cids imp_form in
let result = type_uterm ~partial_infer:true ~sr ~sign ~ctx:[] pi_form in
let result = generalize_tyvars result in
(* print_clause result; *)
let _ = check_pi_quantification [snd result] in
begin match cname with
| None -> ()
| Some cname ->
if seen_name cname then
failwithf "Clause named %S already seeen" cname ;
register_clause cname result ;
end ;
result
let = in
let eqns =
List.fold_left ( fun acc p - >
let ( pty , peqns ) = infer_type_and_constraints ~sign in
acc @ peqns @ [ ( oty , pty , ( get_pos p , CArg ) ) ] )
[ ] ( head::body )
in
let sub = unify_constraints eqns in
let ctx = ( apply_sub_tyctx sub ) in
let convert p = replace_term_vars ctx ( uterm_to_term sub p ) in
let ( rhead , rbody ) = ( convert head , List.map convert body ) in
List.iter term_ensure_fully_inferred ( rhead::rbody ) ;
List.iter ( term_ensure_subordination sr ) ( rhead::rbody ) ;
check_pi_quantification ( rhead::rbody ) ;
( rhead , rbody )
let tyctx = ids_to_fresh_tyctx cids in
let eqns =
List.fold_left (fun acc p ->
let (pty, peqns) = infer_type_and_constraints ~sign tyctx p in
acc @ peqns @ [(oty, pty, (get_pos p, CArg))])
[] (head::body)
in
let sub = unify_constraints eqns in
let ctx = tyctx_to_ctx (apply_sub_tyctx sub tyctx) in
let convert p = replace_term_vars ctx (uterm_to_term sub p) in
let (rhead, rbody) = (convert head, List.map convert body) in
List.iter term_ensure_fully_inferred (rhead::rbody) ;
List.iter (term_ensure_subordination sr) (rhead::rbody) ;
check_pi_quantification (rhead::rbody) ;
(rhead, rbody)
*)
(** Typing for metaterms *)
let infer_constraints ~sign ~tyctx t =
let rec aux tyctx t =
match t with
| UTrue | UFalse -> []
| UEq(a, b) ->
let (aty, aeqns) = infer_type_and_constraints ~sign tyctx a in
let (bty, beqns) = infer_type_and_constraints ~sign tyctx b in
aeqns @ beqns @ [(aty, bty, (get_pos b, CArg))]
| UAsyncObj(l, g, _) ->
let (lty, leqns) = infer_type_and_constraints ~sign tyctx l in
let (gty, geqns) = infer_type_and_constraints ~sign tyctx g in
leqns @ geqns @ [(olistty, lty, (get_pos l, CArg));
(oty, gty, (get_pos g, CArg))]
| USyncObj(l, f, g, _) ->
let (lty, leqns) = infer_type_and_constraints ~sign tyctx l in
let (fty, feqns) = infer_type_and_constraints ~sign tyctx f in
let (gty, geqns) = infer_type_and_constraints ~sign tyctx g in
leqns @ feqns @ geqns @
[(olistty, lty, (get_pos l, CArg));
(oty, fty, (get_pos f, CArg));
(oty, gty, (get_pos g, CArg))]
| UArrow(a, b) | UOr(a, b) | UAnd(a, b) ->
(aux tyctx a) @ (aux tyctx b)
| UBinding(_, tids, body) ->
aux (List.rev_app tids tyctx) body
| UPred(p, _) ->
let (pty, peqns) = infer_type_and_constraints ~sign tyctx p in
peqns @ [(propty, pty, (get_pos p, CArg))]
in
aux tyctx t
let umetaterm_extract_if test t =
let rec aux t =
match t with
| UTrue | UFalse -> []
| UEq(a, b) ->
uterms_extract_if test [a; b]
| UPred(p, _) ->
uterms_extract_if test [p]
| UAsyncObj(l, g, _) ->
uterms_extract_if test [l; g]
| USyncObj(l, f, g, _) ->
uterms_extract_if test [l;f;g]
| UArrow(a, b) | UOr(a, b) | UAnd(a, b) ->
(aux a) @ (aux b)
| UBinding(_, tids, body) ->
List.remove_all (fun id -> List.mem_assoc id tids) (aux body)
in
List.unique (aux t)
let umetaterm_nominals_to_tyctx t =
ids_to_fresh_tyctx (umetaterm_extract_if is_nominal_name t)
let umetaterm_to_metaterm ?sign:_ t =
let rec aux t =
match t with
| UTrue -> True
| UFalse -> False
| UEq(a, b) -> Eq(uterm_to_term a, uterm_to_term b)
| UAsyncObj(l, g, r) ->
let context = Context.normalize [uterm_to_term l] in
let right = uterm_to_term g in
Obj({context ; right ; mode = Async}, r)
| USyncObj(l, f, g, r) ->
let context = Context.normalize [uterm_to_term l] in
let right = uterm_to_term g in
let mode = Sync (uterm_to_term f) in
Obj({context ; right ; mode}, r)
| UArrow(a, b) -> Arrow(aux a, aux b)
| UBinding(binder, tids, body) ->
(* let () = match sign with *)
(* | Some sign -> List.iter (fun (_, ty) -> kind_check_poly sign [] ty) tids *)
(* | None -> () *)
(* in *)
Binding(binder, tids, aux body)
| UOr(a, b) -> Or(aux a, aux b)
| UAnd(a, b) -> And(aux a, aux b)
| UPred(p, r) -> Pred(uterm_to_term p, r)
in
aux t
let umetaterm_to_string ?sign t =
metaterm_to_string (umetaterm_to_metaterm ?sign t)
let umetaterm_to_formatted_string ?sign t =
metaterm_to_formatted_string (umetaterm_to_metaterm ?sign t)
let check_meta_logic_quantification_type ty =
iter_ty
(fun bty ->
if bty = propaty then
failwith "Cannot quantify over type prop")
ty
let check_meta_quantification t =
let rec aux t =
match t with
| True | False | Eq _ | Obj _ | Pred _ -> ()
| And(a, b) | Or(a, b) | Arrow(a, b) -> aux a; aux b
| Binding(_, tids, body) ->
List.iter
check_meta_logic_quantification_type
(List.map snd tids) ;
aux body
in
aux t
let make_async obj =
match obj.mode with
| Async -> obj
| Sync focus ->
{ obj with
mode = Async ;
context = focus :: obj.context }
let metaterm_ensure_subordination sr t =
let rec aux t =
match t with
| True | False -> ()
| Eq(a, b) ->
term_ensure_subordination sr a ;
term_ensure_subordination sr b
| Obj(obj, _) ->
aux (async_to_member (make_async obj))
| Arrow(a, b) | Or(a, b) | And(a, b) ->
aux a ;
aux b
| Binding(_, tids, body) ->
List.iter (Subordination.ensure sr) (List.map snd tids) ;
aux body
| Pred(p, _) ->
term_ensure_subordination sr p
in
aux t
let type_umetaterm ~sr ~sign ?(ctx=[]) t =
let nominal_tyctx = umetaterm_nominals_to_tyctx t in
let tyctx =
(List.map (fun (id, t) -> (id, tc [] t)) ctx)
@ nominal_tyctx in
let eqns = infer_constraints ~sign ~tyctx t in
unify_constraints eqns;
let ctx = ctx @ (tyctx_to_nominal_ctx nominal_tyctx) in
let result = replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign t) in
metaterm_ensure_fully_inferred ~sign result ;
metaterm_ensure_subordination sr result ;
check_meta_quantification result ;
result
let type_udef ~sr ~sign (head, body) =
let cids = umetaterm_extract_if is_capital_name head in
let tyctx = ids_to_fresh_tyctx cids in
let eqns1 = infer_constraints ~sign ~tyctx head in
let eqns2 = infer_constraints ~sign ~tyctx body in
unify_constraints (eqns1 @ eqns2);
let ctx = tyctx_to_ctx tyctx in
let (rhead, rbody) =
(replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign head),
replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign body))
in
metaterm_ensure_fully_inferred ~sign rhead ;
metaterm_ensure_fully_inferred ~sign rbody ;
metaterm_ensure_subordination sr rhead ;
metaterm_ensure_subordination sr rbody ;
check_meta_quantification rbody ;
(rhead, rbody)
let type_udefs ~sr ~sign udefs =
List.map (type_udef ~sr ~sign) udefs
(** Utilities *)
let rec has_capital_head t =
match t with
| UCon(_, id, _) -> is_capital_name id
| ULam _ -> false
| UApp(_, t, _) -> has_capital_head t
(** globals *)
let sign : sign ref = State.rref pervasive_sign
let sr = State.rref pervasive_sr
| null | https://raw.githubusercontent.com/abella-prover/abella/7795951e17dc53d02b041f5b2ddb68161dd8ec2b/src/typing.ml | ocaml | **************************************************************************
en Informatique et en Automatique)
(at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
**************************************************************************
* Untyped terms
* Tables / Signatures
* Kinds
* Constants
* Pervasive signature
* Typing for terms
Printf.printf "Note: registered %S : %s\n%!" name
print_clause result;
* Typing for metaterms
let () = match sign with
| Some sign -> List.iter (fun (_, ty) -> kind_check_poly sign [] ty) tids
| None -> ()
in
* Utilities
* globals | Copyright ( C ) 2007 - 2009 Gacek
Copyright ( C ) 2013 - 2022 Inria ( Institut National de Recherche
This file is part of Abella .
Abella is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
Abella is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with Abella . If not , see < / > .
open Term
open Metaterm
open Extensions
open Unifyty
type pos = Lexing.position * Lexing.position
type uterm =
| UCon of pos * string * ty
| ULam of pos * string * ty * uterm
| UApp of pos * uterm * uterm
let ghost : pos = (Lexing.dummy_pos, Lexing.dummy_pos)
let rec forget_term ?(cx=[]) t =
match observe (hnorm t) with
| Var v -> UCon (ghost, v.name, v.ty)
| Lam ([], t) -> forget_term ~cx t
| Lam ((x, xty) :: cx, t) ->
ULam (ghost, x, xty, forget_term ~cx:((x, xty) :: cx) t)
| App (f, ts) ->
List.fold_left begin fun f t ->
UApp (ghost, f, forget_term ~cx t)
end (forget_term ~cx f) ts
| DB n -> begin
try
let (x, xty) = List.nth cx (n - 1) in
UCon (ghost, x, xty)
with Failure _ -> bugf "forget_term called with too small a context"
end
| _ ->
bugf "forget_term called on: %s" (term_to_string t)
let get_pos t =
match t with
| UCon(p, _, _) -> p
| ULam(p, _, _, _) -> p
| UApp(p, _, _) -> p
let change_pos p t =
match t with
| UCon(_, id, ty) -> UCon(p, id, ty)
| ULam(_, id, ty, body) -> ULam(p, id, ty, body)
| UApp(_, t1, t2) -> UApp(p, t1, t2)
let predefined id pos =
UCon(pos, id, Term.fresh_tyvar ())
let binop id t1 t2 =
let pos = (fst (get_pos t1), snd (get_pos t2)) in
UApp(pos, UApp(pos, predefined id pos, t1), t2)
let uterm_head_name t =
let rec aux = function
| UCon(_, id, _) -> id
| UApp(_, h, _) -> aux h
| ULam _ -> assert false
in
aux t
*
type umetaterm =
| UTrue
| UFalse
| UEq of uterm * uterm
| UAsyncObj of uterm * uterm * restriction
| USyncObj of uterm * uterm * uterm * restriction
| UArrow of umetaterm * umetaterm
| UBinding of binder * (id * ty) list * umetaterm
| UOr of umetaterm * umetaterm
| UAnd of umetaterm * umetaterm
| UPred of uterm * restriction
let apply_sub_tyctx s tyctx =
List.map (fun (id, ty) -> (id, apply_sub_ty s ty)) tyctx
let ids_to_fresh_tyctx ids =
List.map (fun id -> (id, fresh_tyvar ())) ids
let tyctx_to_ctx tyctx =
List.map (fun (id, ty) -> (id, const id ty)) tyctx
let tyctx_to_nominal_ctx tyctx =
List.map (fun (id, ty) -> (id, nominal_var id ty)) tyctx
type ktable = (string * knd) list
type pty = Poly of string list * ty
type ctable = (string * pty) list
type sign = ktable * ctable
let add_types (ktable, ctable) ids knd =
List.iter begin fun id ->
begin
try
let knd' = List.assoc id ktable in
if knd <> knd' then
failwithf "Type constructor %s has inconsistent kind declarations" id
with
| Not_found -> ()
end ;
if is_capital_name id then
failwithf "Types may not begin with a capital letter: %s" id;
end ids ;
((List.map (fun id -> (id, knd)) ids) @ ktable, ctable)
let lookup_type (ktable, _) id =
List.assoc id ktable
let kind_check sign ty =
let rec aux = function
| Ty(tys, aty) ->
List.iter aux tys;
match aty with
| Tygenvar _
| Typtr {contents = TV _} -> ()
| Tycons(cty,args) ->
let knd =
try lookup_type sign cty
with
| Not_found -> failwithf "Unknown type constructor: %s" cty
in
let arity = karity knd in
let nargs = List.length args in
if not (nargs = arity) then
failwithf "%s expects %i arguments but has %i" cty arity nargs ;
List.iter aux args
| Typtr {contents = TT _} -> assert false
in aux (observe_ty ty)
let kind_check_poly (ktable,ctable) (Poly(_ids, ty)) =
kind_check (ktable,ctable) ty
let eq_pty pty1 pty2 =
match pty1,pty2 with
| Poly(ids1, ty1), Poly(ids2,ty2) ->
List.length ids1 = List.length ids2 &&
begin
let tyvars = List.map (fun _ -> Term.fresh_tyvar ()) ids1 in
let sub1 = List.map2 (fun id ty -> (id,ty)) ids1 tyvars in
let sub2 = List.map2 (fun id ty -> (id,ty)) ids2 tyvars in
let ty1' = apply_sub_ty sub1 ty1 in
let ty2' = apply_sub_ty sub2 ty2 in
eq_ty ty1' ty2'
end
let check_const (ktable, ctable) (id, pty) =
begin try
let pty' = List.assoc id ctable in
if not (eq_pty pty pty') then
failwithf "Constant %s has inconsistent type declarations" id
with
| Not_found -> ()
end ;
if is_capital_name id then
failwithf "Constants may not begin with a capital letter: %s" id ;
kind_check_poly (ktable, ctable) pty
let add_poly_consts (ktable, ctable) idptys =
List.iter (check_const (ktable, ctable)) idptys ;
(ktable, idptys @ ctable)
let get_typaram ty =
let params = ref [] in
iter_ty begin fun aty ->
match aty with
| Tygenvar v ->
if is_capital_name v then
params := v::(!params)
| _ -> ()
end ty;
!params
let get_typarams tys = List.flatten_map get_typaram tys
let add_consts sign idtys =
let typarams = idtys |> List.map snd |> List.map get_typaram in
let idptys = List.map2
(fun (id, ty) pas -> (id, Poly(pas, ty))) idtys typarams in
add_poly_consts sign idptys
let freshen_ty (Poly(ids, ty)) =
let sub = ids_to_fresh_tyctx ids in
apply_sub_ty sub ty
let lookup_const (_, ctable) id =
try
freshen_ty (List.assoc id ctable)
with
| Not_found -> failwithf "Unknown constant: %s" id
* types
let rec desugar_aty aty =
match aty with
| Tycons (v,tys) ->
if v = "olist" && tys = [] then
Tycons ("list",[oty])
else
let tys = List.map desugar_ty tys in
Tycons (v,tys)
| Typtr {contents=TT _t} ->
assert false
| _ -> aty
and desugar_ty ty =
match (observe_ty ty) with
| Ty (tys, aty) ->
let tys = List.map desugar_ty tys in
let aty = desugar_aty aty in
Ty (tys,aty)
let k_member = "member"
let k_cons = "::"
let k_nil = "nil"
let pervasive_sign =
let aty = tybase (Tygenvar "A") in
let alistty = tybase (atyapp (atybase "list") aty) in
([("o", Knd 0); ("list", Knd 1); ("prop", Knd 0)],
[("pi", Poly(["A"], tyarrow [tyarrow [aty] oty] oty)) ;
("=>", Poly([], tyarrow [oty; oty] oty)) ;
("&", Poly([], tyarrow [oty; oty] oty)) ;
(k_cons, Poly(["A"], tyarrow [aty; alistty] alistty)) ;
(k_nil, Poly(["A"], alistty)) ])
let sign_to_tys sign =
List.map
(function (_, Poly(_ids, ty)) -> ty)
(snd sign)
let pervasive_sr =
List.fold_left Subordination.update Subordination.empty
(sign_to_tys pervasive_sign)
type expected = ty
* type actual = ty
* ( \ * A constraint contains the position of the ' actual ' type * \ )
* type constraint_type = CFun | CArg
* type constraint_info = pos * constraint_type
* type constraints = ( expected * actual * ) list
* exception TypeInferenceFailure of constraint_info * expected * actual
* type actual = ty
* (\* A constraint contains the position of the 'actual' type *\)
* type constraint_type = CFun | CArg
* type constraint_info = pos * constraint_type
* type constraints = (expected * actual * constraint_info) list
* exception TypeInferenceFailure of constraint_info * expected * actual *)
let infer_type_and_constraints ~sign tyctx t =
let eqns = ref [] in
let add_constraint expected actual pos =
eqns := (expected, actual, pos) :: !eqns
in
let rec aux tyctx t =
match t with
| UCon(p, id, ty) ->
let ty' =
begin try
List.assoc id tyctx
with
| Not_found -> lookup_const sign id
end
in
add_constraint ty ty' (p, CArg) ;
ty
| ULam(_, id, ty, t) ->
tyarrow [ty] (aux ((id, ty) :: tyctx) t)
| UApp(_, t1, t2) ->
let ty1 = aux tyctx t1 in
let ty2 = aux tyctx t2 in
let (aty, rty) =
match ty1 with
| Ty([], _) ->
let aty = fresh_tyvar () in
let rty = fresh_tyvar () in
add_constraint (tyarrow [aty] rty) ty1 (get_pos t1, CFun) ;
(aty, rty)
| Ty(aty::atys, bty) ->
(aty, Ty(atys, bty))
in
add_constraint aty ty2 (get_pos t2, CArg) ;
rty
in
let ty = aux tyctx t in
(ty, List.rev !eqns)
let constraints_to_string eqns =
let aux (ty1, ty2, _) =
(ty_to_string ty1) ^ " = " ^ (ty_to_string ty2)
in
String.concat "\n" (List.map aux eqns)
let occurs v ty =
* let rec aux = function
* | Ty(tys , bty ) when bty = v - > true
* | Ty(tys , _ ) - > List.exists aux tys
* in
* aux ty
* let rec aux = function
* | Ty(tys, bty) when bty = v -> true
* | Ty(tys, _) -> List.exists aux tys
* in
* aux ty *)
let contains_tyvar ty =
let rec aux = function
| Ty (tys,aty) ->
let cv =
match aty with
| Tygenvar _ -> false
| Typtr {contents=TV _} -> true
| Tycons (_c,args) ->
List.exists aux args
| Typtr {contents=TT _} -> assert false
in
cv || List.exists aux tys
in aux (observe_ty ty)
let tid_ensure_fully_inferred ~sign (_id, ty) =
if contains_tyvar ty then
failwith "Types of variables are not fully determined" ;
kind_check sign ty
let term_ensure_fully_inferred ~sign t =
let rec aux t =
match observe (hnorm t) with
| Var v -> tid_ensure_fully_inferred ~sign (v.name, v.ty)
| DB _i -> ()
| App(h, args) -> aux h ; List.iter aux args
| Lam(_tys, body) -> aux body
| _ -> assert false
in
aux t
let metaterm_ensure_fully_inferred ~sign t =
let rec aux t =
match t with
| True | False -> ()
| And(a, b) | Or(a, b) | Arrow(a, b) -> aux a; aux b
| Binding(_, tids, body) ->
List.iter (tid_ensure_fully_inferred ~sign) tids ;
aux body
| Eq(a, b) ->
term_ensure_fully_inferred ~sign a ;
term_ensure_fully_inferred ~sign b
| Obj(obj, _) ->
Context.iter (term_ensure_fully_inferred ~sign) obj.context ;
begin match obj.mode with
| Async -> ()
| Sync focus ->
term_ensure_fully_inferred ~sign focus
end ;
term_ensure_fully_inferred ~sign obj.right
| Pred(p, _) ->
term_ensure_fully_inferred ~sign p
in
aux t
let apply_bind_constraints v ty eqns =
* ( fun ( x , y ) - > ( apply_bind_ty v ty x , apply_bind_ty v ty y ) ) eqns
*
* let apply_bind_sub v ty sub =
* ( fun ( x , y ) - > ( x , apply_bind_ty v ty y ) ) sub
*
* let unify_constraints eqns =
* let v vty s =
* ( v , vty ) : : ( apply_bind_sub v vty s )
* in
*
* ( \ * Unify a single constraint and call fail on failure * \ )
* let rec aux s ( , ty2 ) fail =
* let = apply_sub_ty s in
* let = apply_sub_ty s ty2 in
* match , ty2 with
* | _ , _ when = ty2 - > s
* ( [ ] , bty1 ) , _ when is_tyvar bty1 - >
* if occurs bty1 ty2 then
* fail s
* else
* s
* | _ , ( [ ] , bty2 ) when is_tyvar bty2 - >
* if occurs bty2 then
* fail s
* else
* add_sub bty2 s
* | Ty(ty1::tys1 , bty1 ) , , bty2 ) - >
* let s = aux s ( , ty2 ) fail in
* aux s ( Ty(tys1 , bty1 ) , , bty2 ) ) fail
* | , ty2 - > fail s
* in
*
* let unify_single_constraint s ( , ty2 , p ) =
* aux s ( , ty2 )
* ( fun s - > raise ( TypeInferenceFailure(p , apply_sub_ty s ,
* apply_sub_ty s ty2 ) ) )
* in
*
* List.fold_left unify_single_constraint [ ] eqns
* List.map (fun (x,y) -> (apply_bind_ty v ty x, apply_bind_ty v ty y)) eqns
*
* let apply_bind_sub v ty sub =
* List.map (fun (x,y) -> (x, apply_bind_ty v ty y)) sub
*
* let unify_constraints eqns =
* let add_sub v vty s =
* (v, vty) :: (apply_bind_sub v vty s)
* in
*
* (\* Unify a single constraint and call fail on failure *\)
* let rec aux s (ty1, ty2) fail =
* let ty1 = apply_sub_ty s ty1 in
* let ty2 = apply_sub_ty s ty2 in
* match ty1, ty2 with
* | _, _ when ty1 = ty2 -> s
* | Ty([], bty1), _ when is_tyvar bty1 ->
* if occurs bty1 ty2 then
* fail s
* else
* add_sub bty1 ty2 s
* | _, Ty([], bty2) when is_tyvar bty2 ->
* if occurs bty2 ty1 then
* fail s
* else
* add_sub bty2 ty1 s
* | Ty(ty1::tys1, bty1), Ty(ty2::tys2, bty2) ->
* let s = aux s (ty1, ty2) fail in
* aux s (Ty(tys1, bty1), Ty(tys2, bty2)) fail
* | ty1, ty2 -> fail s
* in
*
* let unify_single_constraint s (ty1, ty2, p) =
* aux s (ty1, ty2)
* (fun s -> raise (TypeInferenceFailure(p, apply_sub_ty s ty1,
* apply_sub_ty s ty2)))
* in
*
* List.fold_left unify_single_constraint [] eqns *)
let uterms_extract_if test ts =
let rec aux t =
match t with
| UCon(_, id, _) -> if test id then [id] else []
| ULam(_, id, _, t) -> List.remove id (aux t)
| UApp(_, t1, t2) -> (aux t1) @ (aux t2)
in
List.unique (List.flatten_map aux ts)
let uterm_nominals_to_tyctx t =
ids_to_fresh_tyctx (uterms_extract_if is_nominal_name [t])
let uterm_to_term t =
let rec aux t =
match t with
| UCon(_, id, ty) -> const id ty
| ULam(_, id, ty, t) -> abstract id ty (aux t)
| UApp(_, t1, t2) -> app (aux t1) [aux t2]
in
aux t
let uterm_to_string t =
term_to_string (uterm_to_term t)
let term_ensure_subordination sr t =
let rec aux tyctx t =
match observe (hnorm t) with
| Var v -> Subordination.ensure sr v.ty
| DB _i -> ()
| App(h, ts) -> aux tyctx h ; List.iter (aux tyctx) ts
| Lam(idtys, b) ->
Subordination.ensure sr (tc tyctx t) ;
aux (List.rev_app idtys tyctx) b
| _ -> assert false
in
aux [] t
let check_spec_logic_type ty =
iter_ty
(fun bty ->
if bty = propaty then
failwith "Cannot mention type 'prop' in the specification logic" ;
if bty = olistaty then
failwith "Cannot mention type 'list o' in the specification logic")
ty
let check_spec_logic_quantification_type ty =
check_spec_logic_type ty ;
iter_ty
(fun bty ->
if bty = oaty then
failwith "Cannot quantify over type o in the specification logic")
ty
let check_pi_quantification ts =
ignore
(map_vars
(fun v ->
if v.name = "pi" then
match observe_ty v.ty with
| Ty([Ty([tau], _)], _) ->
check_spec_logic_quantification_type tau
| _ -> assert false)
ts)
let get_tyvar_names ty =
* let rec aux = function
* ( tys , aty ) - >
* let ns = List.flatten_map aux tys in
* let ans =
* match aty with
* | Typtr { contents = TV v } - > [ v ]
* | Typtr { contents = TT _ } - > assert false
* | Tygenvar _ - > [ ]
* | Tycons ( c , args ) - >
* List.flatten_map aux tys
* in
* ns @ ans
* in List.unique ( aux ( observe_ty ty ) )
* let rec aux = function
* | Ty (tys, aty) ->
* let ns = List.flatten_map aux tys in
* let ans =
* match aty with
* | Typtr {contents=TV v} -> [v]
* | Typtr {contents=TT _} -> assert false
* | Tygenvar _ -> []
* | Tycons (c,args) ->
* List.flatten_map aux tys
* in
* ns @ ans
* in List.unique (aux (observe_ty ty)) *)
let type_uterm ?partial_infer ?expected_ty ~sr ~sign ~ctx t =
let nominal_tyctx = uterm_nominals_to_tyctx t in
let tyctx =
(List.map (fun (id, t) -> (id, tc [] t)) ctx)
@ nominal_tyctx
in
let (ty, eqns) = infer_type_and_constraints ~sign tyctx t in
let eqns =
match expected_ty with
| None -> eqns
| Some exp_ty -> (exp_ty, ty, (get_pos t, CArg)) :: eqns
in
unify_constraints eqns;
let ctx = ctx @ (tyctx_to_nominal_ctx nominal_tyctx) in
let result = replace_term_vars ctx (uterm_to_term t) in
(match partial_infer with
| None -> term_ensure_fully_inferred ~sign result
| Some _ -> ()) ;
term_ensure_subordination sr result ;
result
let rec has_capital_head t =
match t with
| UCon(_, v, _) -> is_capital_name v
| UApp(_, h, _) -> has_capital_head h
| _ -> false
let replace_underscores head body =
let names = uterms_extract_if is_capital_name (head::body) in
let used = ref (List.map (fun x -> (x, ())) names) in
let rec aux t =
match t with
| UCon(p, id, ty) when id = "_" ->
let id' = fresh_name "X" !used in
used := (id', ()) :: !used ;
UCon(p, id', ty)
| UCon _ -> t
| ULam(p, id, ty, t) ->
used := (id, ()) :: !used ;
ULam(p, id, ty, aux t)
| UApp(p, t1, t2) ->
let t1' = aux t1 in
let t2' = aux t2 in
UApp(p, t1', t2')
in
match List.map aux (head::body) with
| h::b -> (h, b)
| [] -> assert false
let clause_map : (string list * term) Itab.t ref = ref Itab.empty
let seen_name cname = Itab.mem cname !clause_map
let register_clause name clause =
( ) ;
clause_map := Itab.add name clause !clause_map
let lookup_clause cname =
if seen_name cname
then Some (Itab.find cname !clause_map)
else None
let generalize_tyvars t =
let tyvars = term_collect_tyvar_names t in
let tysub = List.map (fun id -> (id, tybase (Tygenvar id))) tyvars in
let t' = term_map_on_tys (apply_sub_ty_tyvar tysub) t in
(tyvars, t')
let print_clause cl =
let (vars, clause) = cl in
let vstr = String.concat "," vars in
let cstr = term_to_string clause in
Printf.eprintf "Typed clause: [%s] %s\n" vstr cstr
let type_uclause ~sr ~sign (cname, head, body) =
if has_capital_head head then
failwith "Clause has flexible (i.e., non-atomic) head" ;
let head, body = replace_underscores head body in
let cids = uterms_extract_if is_capital_name (head::body) in
let get_imp_form head body =
(let impfy imp f = (binop "=>" f imp) in
List.fold_left impfy head (List.rev body))
in
let imp_form = get_imp_form head body in
let get_pi_form ids body =
(let pify id pi =
let pos = get_pos pi in
let abs = ULam (pos, id, Term.fresh_tyvar (), pi) in
UApp (pos, predefined "pi" pos, abs)
in
List.fold_right pify ids body)
in
let pi_form = get_pi_form cids imp_form in
let result = type_uterm ~partial_infer:true ~sr ~sign ~ctx:[] pi_form in
let result = generalize_tyvars result in
let _ = check_pi_quantification [snd result] in
begin match cname with
| None -> ()
| Some cname ->
if seen_name cname then
failwithf "Clause named %S already seeen" cname ;
register_clause cname result ;
end ;
result
let = in
let eqns =
List.fold_left ( fun acc p - >
let ( pty , peqns ) = infer_type_and_constraints ~sign in
acc @ peqns @ [ ( oty , pty , ( get_pos p , CArg ) ) ] )
[ ] ( head::body )
in
let sub = unify_constraints eqns in
let ctx = ( apply_sub_tyctx sub ) in
let convert p = replace_term_vars ctx ( uterm_to_term sub p ) in
let ( rhead , rbody ) = ( convert head , List.map convert body ) in
List.iter term_ensure_fully_inferred ( rhead::rbody ) ;
List.iter ( term_ensure_subordination sr ) ( rhead::rbody ) ;
check_pi_quantification ( rhead::rbody ) ;
( rhead , rbody )
let tyctx = ids_to_fresh_tyctx cids in
let eqns =
List.fold_left (fun acc p ->
let (pty, peqns) = infer_type_and_constraints ~sign tyctx p in
acc @ peqns @ [(oty, pty, (get_pos p, CArg))])
[] (head::body)
in
let sub = unify_constraints eqns in
let ctx = tyctx_to_ctx (apply_sub_tyctx sub tyctx) in
let convert p = replace_term_vars ctx (uterm_to_term sub p) in
let (rhead, rbody) = (convert head, List.map convert body) in
List.iter term_ensure_fully_inferred (rhead::rbody) ;
List.iter (term_ensure_subordination sr) (rhead::rbody) ;
check_pi_quantification (rhead::rbody) ;
(rhead, rbody)
*)
let infer_constraints ~sign ~tyctx t =
let rec aux tyctx t =
match t with
| UTrue | UFalse -> []
| UEq(a, b) ->
let (aty, aeqns) = infer_type_and_constraints ~sign tyctx a in
let (bty, beqns) = infer_type_and_constraints ~sign tyctx b in
aeqns @ beqns @ [(aty, bty, (get_pos b, CArg))]
| UAsyncObj(l, g, _) ->
let (lty, leqns) = infer_type_and_constraints ~sign tyctx l in
let (gty, geqns) = infer_type_and_constraints ~sign tyctx g in
leqns @ geqns @ [(olistty, lty, (get_pos l, CArg));
(oty, gty, (get_pos g, CArg))]
| USyncObj(l, f, g, _) ->
let (lty, leqns) = infer_type_and_constraints ~sign tyctx l in
let (fty, feqns) = infer_type_and_constraints ~sign tyctx f in
let (gty, geqns) = infer_type_and_constraints ~sign tyctx g in
leqns @ feqns @ geqns @
[(olistty, lty, (get_pos l, CArg));
(oty, fty, (get_pos f, CArg));
(oty, gty, (get_pos g, CArg))]
| UArrow(a, b) | UOr(a, b) | UAnd(a, b) ->
(aux tyctx a) @ (aux tyctx b)
| UBinding(_, tids, body) ->
aux (List.rev_app tids tyctx) body
| UPred(p, _) ->
let (pty, peqns) = infer_type_and_constraints ~sign tyctx p in
peqns @ [(propty, pty, (get_pos p, CArg))]
in
aux tyctx t
let umetaterm_extract_if test t =
let rec aux t =
match t with
| UTrue | UFalse -> []
| UEq(a, b) ->
uterms_extract_if test [a; b]
| UPred(p, _) ->
uterms_extract_if test [p]
| UAsyncObj(l, g, _) ->
uterms_extract_if test [l; g]
| USyncObj(l, f, g, _) ->
uterms_extract_if test [l;f;g]
| UArrow(a, b) | UOr(a, b) | UAnd(a, b) ->
(aux a) @ (aux b)
| UBinding(_, tids, body) ->
List.remove_all (fun id -> List.mem_assoc id tids) (aux body)
in
List.unique (aux t)
let umetaterm_nominals_to_tyctx t =
ids_to_fresh_tyctx (umetaterm_extract_if is_nominal_name t)
let umetaterm_to_metaterm ?sign:_ t =
let rec aux t =
match t with
| UTrue -> True
| UFalse -> False
| UEq(a, b) -> Eq(uterm_to_term a, uterm_to_term b)
| UAsyncObj(l, g, r) ->
let context = Context.normalize [uterm_to_term l] in
let right = uterm_to_term g in
Obj({context ; right ; mode = Async}, r)
| USyncObj(l, f, g, r) ->
let context = Context.normalize [uterm_to_term l] in
let right = uterm_to_term g in
let mode = Sync (uterm_to_term f) in
Obj({context ; right ; mode}, r)
| UArrow(a, b) -> Arrow(aux a, aux b)
| UBinding(binder, tids, body) ->
Binding(binder, tids, aux body)
| UOr(a, b) -> Or(aux a, aux b)
| UAnd(a, b) -> And(aux a, aux b)
| UPred(p, r) -> Pred(uterm_to_term p, r)
in
aux t
let umetaterm_to_string ?sign t =
metaterm_to_string (umetaterm_to_metaterm ?sign t)
let umetaterm_to_formatted_string ?sign t =
metaterm_to_formatted_string (umetaterm_to_metaterm ?sign t)
let check_meta_logic_quantification_type ty =
iter_ty
(fun bty ->
if bty = propaty then
failwith "Cannot quantify over type prop")
ty
let check_meta_quantification t =
let rec aux t =
match t with
| True | False | Eq _ | Obj _ | Pred _ -> ()
| And(a, b) | Or(a, b) | Arrow(a, b) -> aux a; aux b
| Binding(_, tids, body) ->
List.iter
check_meta_logic_quantification_type
(List.map snd tids) ;
aux body
in
aux t
let make_async obj =
match obj.mode with
| Async -> obj
| Sync focus ->
{ obj with
mode = Async ;
context = focus :: obj.context }
let metaterm_ensure_subordination sr t =
let rec aux t =
match t with
| True | False -> ()
| Eq(a, b) ->
term_ensure_subordination sr a ;
term_ensure_subordination sr b
| Obj(obj, _) ->
aux (async_to_member (make_async obj))
| Arrow(a, b) | Or(a, b) | And(a, b) ->
aux a ;
aux b
| Binding(_, tids, body) ->
List.iter (Subordination.ensure sr) (List.map snd tids) ;
aux body
| Pred(p, _) ->
term_ensure_subordination sr p
in
aux t
let type_umetaterm ~sr ~sign ?(ctx=[]) t =
let nominal_tyctx = umetaterm_nominals_to_tyctx t in
let tyctx =
(List.map (fun (id, t) -> (id, tc [] t)) ctx)
@ nominal_tyctx in
let eqns = infer_constraints ~sign ~tyctx t in
unify_constraints eqns;
let ctx = ctx @ (tyctx_to_nominal_ctx nominal_tyctx) in
let result = replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign t) in
metaterm_ensure_fully_inferred ~sign result ;
metaterm_ensure_subordination sr result ;
check_meta_quantification result ;
result
let type_udef ~sr ~sign (head, body) =
let cids = umetaterm_extract_if is_capital_name head in
let tyctx = ids_to_fresh_tyctx cids in
let eqns1 = infer_constraints ~sign ~tyctx head in
let eqns2 = infer_constraints ~sign ~tyctx body in
unify_constraints (eqns1 @ eqns2);
let ctx = tyctx_to_ctx tyctx in
let (rhead, rbody) =
(replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign head),
replace_metaterm_vars ctx (umetaterm_to_metaterm ~sign body))
in
metaterm_ensure_fully_inferred ~sign rhead ;
metaterm_ensure_fully_inferred ~sign rbody ;
metaterm_ensure_subordination sr rhead ;
metaterm_ensure_subordination sr rbody ;
check_meta_quantification rbody ;
(rhead, rbody)
let type_udefs ~sr ~sign udefs =
List.map (type_udef ~sr ~sign) udefs
let rec has_capital_head t =
match t with
| UCon(_, id, _) -> is_capital_name id
| ULam _ -> false
| UApp(_, t, _) -> has_capital_head t
let sign : sign ref = State.rref pervasive_sign
let sr = State.rref pervasive_sr
|
446b36217cee0e012e46f0162c7ba3323d8acbb5d1b5c75a2d6513eff73862d2 | data61/Mirza | GS1Orphans.hs | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -Wno - orphans #
module Mirza.Common.GS1Orphans where
import Data.GS1.EventId
import Servant (ToHttpApiData)
deriving instance ToHttpApiData EventId
| null | https://raw.githubusercontent.com/data61/Mirza/24e5ccddfc307cceebcc5ce26d35e91020b8ee10/projects/mirza-common-haskell/src/Mirza/Common/GS1Orphans.hs | haskell | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -Wno - orphans #
module Mirza.Common.GS1Orphans where
import Data.GS1.EventId
import Servant (ToHttpApiData)
deriving instance ToHttpApiData EventId
| |
e1f7198d2f4544c408178e002941c75d32d766160ce46924a0fc55ffa7a7c37d | mfelleisen/7GUI | 7state.rkt | #lang racket
(provide
SYNTAX
#; (define-state state:id state0:expr propagate:expr)
-- defines ( define state ) ...
;; -- re-defines set! for state ... so that any changes to state ... invoke propagate ...
define-state
#; (define-state* (state:id state0:expr propagate:expr) ...)
( define - state state propagate ) ...
define-state*
SYNTAX
#; (set! x (values e0 e ...))
;; evaluate e0 and e to a list of values, use e0's value as the new value
;; for x and propagate all of these values to the propagation function.
#; (set! x (stop e))
;; do not propagate this change to state variable x
stop)
;; ---------------------------------------------------------------------------------------------------
(require (for-syntax syntax/parse))
;; ---------------------------------------------------------------------------------------------------
(define-syntax (define-state* stx)
(syntax-parse stx
[(_ (state:id state0:expr f:expr) ...) #'(begin (define-state state state0 f) ...)]))
(define-syntax (define-state stx)
(syntax-parse stx
[(_ state:id state0:expr f:expr)
#'(begin
(define g f)
(define state-field state0)
(define-getter/setter (state state-field g)))]))
(define-syntax (define-getter/setter stx)
(syntax-parse stx
[(_ (state state-field f) ...)
#'(begin (define-syntax state (generate-set-state #'state-field #'f)) ...)]))
(define-for-syntax (generate-set-state state-field f)
(with-syntax ([state-field state-field][f f])
(make-set!-transformer
(lambda (stx)
(syntax-parse stx
#:literals (stop values)
[x:id #'state-field]
[(set! x (stop e)) #'(set! state-field e)]
[(set! x (values e0 e ...))
#'(call-with-values
(λ () (apply values (list e0 e ...)))
(λ (y . r) (set! state-field y) (apply f state-field r)))]
[(set! x e) #'(begin (set! state-field e) (f state-field))])))))
(define-syntax (stop stx) (raise-syntax-error #f "used out of context"))
| null | https://raw.githubusercontent.com/mfelleisen/7GUI/e3631e78ab12306ad81b560443913afa4b156dec/Macros/7state.rkt | racket | (define-state state:id state0:expr propagate:expr)
-- re-defines set! for state ... so that any changes to state ... invoke propagate ...
(define-state* (state:id state0:expr propagate:expr) ...)
(set! x (values e0 e ...))
evaluate e0 and e to a list of values, use e0's value as the new value
for x and propagate all of these values to the propagation function.
(set! x (stop e))
do not propagate this change to state variable x
---------------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------------- | #lang racket
(provide
SYNTAX
-- defines ( define state ) ...
define-state
( define - state state propagate ) ...
define-state*
SYNTAX
stop)
(require (for-syntax syntax/parse))
(define-syntax (define-state* stx)
(syntax-parse stx
[(_ (state:id state0:expr f:expr) ...) #'(begin (define-state state state0 f) ...)]))
(define-syntax (define-state stx)
(syntax-parse stx
[(_ state:id state0:expr f:expr)
#'(begin
(define g f)
(define state-field state0)
(define-getter/setter (state state-field g)))]))
(define-syntax (define-getter/setter stx)
(syntax-parse stx
[(_ (state state-field f) ...)
#'(begin (define-syntax state (generate-set-state #'state-field #'f)) ...)]))
(define-for-syntax (generate-set-state state-field f)
(with-syntax ([state-field state-field][f f])
(make-set!-transformer
(lambda (stx)
(syntax-parse stx
#:literals (stop values)
[x:id #'state-field]
[(set! x (stop e)) #'(set! state-field e)]
[(set! x (values e0 e ...))
#'(call-with-values
(λ () (apply values (list e0 e ...)))
(λ (y . r) (set! state-field y) (apply f state-field r)))]
[(set! x e) #'(begin (set! state-field e) (f state-field))])))))
(define-syntax (stop stx) (raise-syntax-error #f "used out of context"))
|
883a3a90f371e3bd2172d032d1ff89c613e6fc02c7f4c9f1fa2ac310c2de8bcb | 2600hz/kazoo | kzd_storage.erl | %%%-----------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz
%%% @doc
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kzd_storage).
-export([new/0]).
-export([attachments/1, attachments/2, set_attachments/2]).
-export([connections/1, connections/2, set_connections/2]).
-export([id/1, id/2, set_id/2]).
-export([plan/1, plan/2, set_plan/2]).
-export([index/2, index/3, set_index/3]).
-include("kz_documents.hrl").
-type doc() :: kz_json:object().
-export_type([doc/0]).
-define(SCHEMA, <<"storage">>).
-spec new() -> doc().
new() ->
kz_json_schema:default_object(?SCHEMA).
-spec attachments(doc()) -> kz_term:api_object().
attachments(Doc) ->
attachments(Doc, 'undefined').
-spec attachments(doc(), Default) -> kz_json:object() | Default.
attachments(Doc, Default) ->
kz_json:get_json_value([<<"attachments">>], Doc, Default).
-spec set_attachments(doc(), kz_json:object()) -> doc().
set_attachments(Doc, Attachments) ->
kz_json:set_value([<<"attachments">>], Attachments, Doc).
-spec connections(doc()) -> kz_term:api_object().
connections(Doc) ->
connections(Doc, 'undefined').
-spec connections(doc(), Default) -> kz_json:object() | Default.
connections(Doc, Default) ->
kz_json:get_json_value([<<"connections">>], Doc, Default).
-spec set_connections(doc(), kz_json:object()) -> doc().
set_connections(Doc, Connections) ->
kz_json:set_value([<<"connections">>], Connections, Doc).
-spec id(doc()) -> kz_term:api_binary().
id(Doc) ->
id(Doc, 'undefined').
-spec id(doc(), Default) -> binary() | Default.
id(Doc, Default) ->
kz_json:get_binary_value([<<"id">>], Doc, Default).
-spec set_id(doc(), binary()) -> doc().
set_id(Doc, Id) ->
kz_json:set_value([<<"id">>], Id, Doc).
-spec plan(doc()) -> kz_term:api_object().
plan(Doc) ->
plan(Doc, 'undefined').
-spec plan(doc(), Default) -> kz_json:object() | Default.
plan(Doc, Default) ->
kz_json:get_json_value([<<"plan">>], Doc, Default).
-spec set_plan(doc(), kz_json:object()) -> doc().
set_plan(Doc, Plan) ->
kz_json:set_value([<<"plan">>], Plan, Doc).
-spec index(doc(), kz_json:key()) -> any().
index(Doc, Index) ->
index(Doc, Index, 'undefined').
-spec index(doc(), kz_json:key(), Default) -> any() | Default.
index(Doc, Index, Default) ->
kz_json:get_value([Index], Doc, Default).
-spec set_index(doc(), kz_json:key(), any()) -> doc().
set_index(Doc, Index, Value) ->
kz_json:set_value([Index], Value, Doc).
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_documents/src/kzd_storage.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
----------------------------------------------------------------------------- | ( C ) 2010 - 2020 , 2600Hz
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kzd_storage).
-export([new/0]).
-export([attachments/1, attachments/2, set_attachments/2]).
-export([connections/1, connections/2, set_connections/2]).
-export([id/1, id/2, set_id/2]).
-export([plan/1, plan/2, set_plan/2]).
-export([index/2, index/3, set_index/3]).
-include("kz_documents.hrl").
-type doc() :: kz_json:object().
-export_type([doc/0]).
-define(SCHEMA, <<"storage">>).
-spec new() -> doc().
new() ->
kz_json_schema:default_object(?SCHEMA).
-spec attachments(doc()) -> kz_term:api_object().
attachments(Doc) ->
attachments(Doc, 'undefined').
-spec attachments(doc(), Default) -> kz_json:object() | Default.
attachments(Doc, Default) ->
kz_json:get_json_value([<<"attachments">>], Doc, Default).
-spec set_attachments(doc(), kz_json:object()) -> doc().
set_attachments(Doc, Attachments) ->
kz_json:set_value([<<"attachments">>], Attachments, Doc).
-spec connections(doc()) -> kz_term:api_object().
connections(Doc) ->
connections(Doc, 'undefined').
-spec connections(doc(), Default) -> kz_json:object() | Default.
connections(Doc, Default) ->
kz_json:get_json_value([<<"connections">>], Doc, Default).
-spec set_connections(doc(), kz_json:object()) -> doc().
set_connections(Doc, Connections) ->
kz_json:set_value([<<"connections">>], Connections, Doc).
-spec id(doc()) -> kz_term:api_binary().
id(Doc) ->
id(Doc, 'undefined').
-spec id(doc(), Default) -> binary() | Default.
id(Doc, Default) ->
kz_json:get_binary_value([<<"id">>], Doc, Default).
-spec set_id(doc(), binary()) -> doc().
set_id(Doc, Id) ->
kz_json:set_value([<<"id">>], Id, Doc).
-spec plan(doc()) -> kz_term:api_object().
plan(Doc) ->
plan(Doc, 'undefined').
-spec plan(doc(), Default) -> kz_json:object() | Default.
plan(Doc, Default) ->
kz_json:get_json_value([<<"plan">>], Doc, Default).
-spec set_plan(doc(), kz_json:object()) -> doc().
set_plan(Doc, Plan) ->
kz_json:set_value([<<"plan">>], Plan, Doc).
-spec index(doc(), kz_json:key()) -> any().
index(Doc, Index) ->
index(Doc, Index, 'undefined').
-spec index(doc(), kz_json:key(), Default) -> any() | Default.
index(Doc, Index, Default) ->
kz_json:get_value([Index], Doc, Default).
-spec set_index(doc(), kz_json:key(), any()) -> doc().
set_index(Doc, Index, Value) ->
kz_json:set_value([Index], Value, Doc).
|
b989423af1ea332ef557750cd8bcb22ca74d94e780b6aca0a0b619a97eb8e621 | ghcjs/jsaddle-dom | CSSStyleRule.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.CSSStyleRule
(setSelectorText, getSelectorText, getSelectorTextUnsafe,
getSelectorTextUnchecked, getStyle, CSSStyleRule(..),
gTypeCSSStyleRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
setSelectorText ::
(MonadDOM m, ToJSString val) => CSSStyleRule -> Maybe val -> m ()
setSelectorText self val
= liftDOM (self ^. jss "selectorText" (toJSVal val))
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorText ::
(MonadDOM m, FromJSString result) =>
CSSStyleRule -> m (Maybe result)
getSelectorText self
= liftDOM ((self ^. js "selectorText") >>= fromMaybeJSString)
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorTextUnsafe ::
(MonadDOM m, HasCallStack, FromJSString result) =>
CSSStyleRule -> m result
getSelectorTextUnsafe self
= liftDOM
(((self ^. js "selectorText") >>= fromMaybeJSString) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorTextUnchecked ::
(MonadDOM m, FromJSString result) => CSSStyleRule -> m result
getSelectorTextUnchecked self
= liftDOM ((self ^. js "selectorText") >>= fromJSValUnchecked)
| < -US/docs/Web/API/CSSStyleRule.style Mozilla CSSStyleRule.style documentation >
getStyle :: (MonadDOM m) => CSSStyleRule -> m CSSStyleDeclaration
getStyle self
= liftDOM ((self ^. js "style") >>= fromJSValUnchecked)
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/CSSStyleRule.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.CSSStyleRule
(setSelectorText, getSelectorText, getSelectorTextUnsafe,
getSelectorTextUnchecked, getStyle, CSSStyleRule(..),
gTypeCSSStyleRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
setSelectorText ::
(MonadDOM m, ToJSString val) => CSSStyleRule -> Maybe val -> m ()
setSelectorText self val
= liftDOM (self ^. jss "selectorText" (toJSVal val))
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorText ::
(MonadDOM m, FromJSString result) =>
CSSStyleRule -> m (Maybe result)
getSelectorText self
= liftDOM ((self ^. js "selectorText") >>= fromMaybeJSString)
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorTextUnsafe ::
(MonadDOM m, HasCallStack, FromJSString result) =>
CSSStyleRule -> m result
getSelectorTextUnsafe self
= liftDOM
(((self ^. js "selectorText") >>= fromMaybeJSString) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSStyleRule.selectorText Mozilla CSSStyleRule.selectorText documentation >
getSelectorTextUnchecked ::
(MonadDOM m, FromJSString result) => CSSStyleRule -> m result
getSelectorTextUnchecked self
= liftDOM ((self ^. js "selectorText") >>= fromJSValUnchecked)
| < -US/docs/Web/API/CSSStyleRule.style Mozilla CSSStyleRule.style documentation >
getStyle :: (MonadDOM m) => CSSStyleRule -> m CSSStyleDeclaration
getStyle self
= liftDOM ((self ^. js "style") >>= fromJSValUnchecked)
|
99ff354912864aca1b051858e78963dcb697a0d4985959e6ee2c1990b56c32ac | tud-fop/vanda-haskell | IO.hs | -----------------------------------------------------------------------------
-- |
Module : . Util . IO
Copyright : ( c ) Technische Universität Dresden 2014 - 2016
-- License : BSD-style
--
-- Maintainer :
-- Stability : unknown
-- Portability : portable
-----------------------------------------------------------------------------
module Vanda.Util.IO
( handleOnDemand
, createForkerWithWaiter
)where
import Control.Concurrent
import Control.Concurrent.STM -- package stm
(atomically, modifyTVar', newTVarIO, readTVar, retry, writeTVar)
import Control.Exception (finally)
import Control.Monad
import Data.Foldable (for_, traverse_)
import Data.Traversable (for)
import System.Posix.Signals
| The call @handleOnDemand mn mt signals worker handler@ runs @worker@ and
-- passes an action to fix/update a (partial) result. If there currently is a
-- fixed result, then @handler@ is called with that result on the following
-- events:
--
-- * termination of worker,
-- * if @mn = 'Just' n@, then for every @n@-th fixed result,
-- * if @mt = 'Just' t@, then every @t@ microseconds,
-- * any 'Signal' from @signals@,
-- * exception in worker (the exception is reraised); this includes 'sigINT',
-- if it is not handled by @worker@ and it is not element of @signals@.
--
-- For each result @handler@ is called at most once.
It is guaranteed that there is only one thread in @handler@.
handleOnDemand
:: Maybe Int -- ^ handler interval in number of results
-> Maybe Int -- ^ handler interval in microseconds
-> [Signal] -- ^ signals on which handler is called
-> ((a -> IO ()) -> IO ()) -- ^ worker
-> (a -> IO ()) -- ^ handler
-> IO ()
handleOnDemand mn mt signals worker handler = do
(fork, wait) <- createForkerWithWaiter
varResult <- newEmptyMVar
varCount <- newMVar (1 :: Int) -- this also acts as mutex in update
mutex <- newMVar ()
let handlerMutexed = withMVar mutex . const . handler
let handlerVar = tryTakeMVar varResult >>= traverse_ handlerMutexed
let update result = do
i <- takeMVar varCount
_ <- tryTakeMVar varResult
if maybe False ((0 ==) . (i `mod`)) mn
then void $ fork $ handlerMutexed result
else putMVar varResult result
putMVar varCount $! succ i
for_ signals $ \ s -> installHandler s (Catch handlerVar) Nothing
mtidTimer <- for mt $ \ t ->
forkIO $ forever $ void $ threadDelay t >> fork handlerVar
finally (worker update) $ do
for_ mtidTimer killThread
handlerVar
wait
-- | Returns a function @fork@ and an action @wait@.
--
Use @fork@ to spawn new threads and use @wait@ to block until all those
-- spawned threads terminate.
--
-- Spawning new threads using @fork@ will fail as soon as @wait@ returned.
--
-- /Examples:/
--
-- Spawn a thread, do something else, wait for the thread’s termination.
--
-- > do (fork, wait) <- createForkerWithWaiter
> void $ fork $ sub - thread - action
-- > main-thread-action
-- > wait
--
Using @fork@ after @wait@ will always fail .
--
-- > do (fork, wait) <- createForkerWithWaiter
-- > wait
> void $ fork $ never - executed - action
--
-- Race condition: @sub-sub-thread-action@ may or may not be run, depending
on whether @fork@ or @wait@ is executed first . Note that @wait@ does not
-- wait for the thread created with 'forkIO'.
--
-- > do (fork, wait) <- createForkerWithWaiter
> void $ forkIO $ do
-- > sub-thread-action
> void $ fork $ sub - sub - thread - action
-- > main-thread-action
-- > wait
--
-- Deadlock: A thread waiting for itself to terminate.
--
-- > do (fork, wait) <- createForkerWithWaiter
> void $ fork $ wait
createForkerWithWaiter :: IO (IO () -> IO (Maybe ThreadId), IO ())
createForkerWithWaiter = do
nTVar <- newTVarIO (0 :: Int)
-- We count the number of running forks in this variable. The waiter will
-- set this variable to a negative number as soon as there is no running
-- fork. This prevents any future fork.
let fork act = do
abort <- atomically $ do
n <- readTVar nTVar
if n < 0 then return True
else writeTVar nTVar (succ n) >> return False
if abort then return Nothing
else fmap Just
$ forkFinally act
$ const $ atomically $ modifyTVar' nTVar pred
let wait = atomically $ do
n <- readTVar nTVar
if n > 0 then retry
else writeTVar nTVar (pred n)
return (fork, wait)
testHandleOnDemand : : IO ( )
testHandleOnDemand = do
putStrLn " = = = main thread started = = = "
handleOnDemand ( Just 3 ) ( Just 7000000 ) [ sigUSR1 ] ( worker [ 1 .. 10 ] ) handler
putStrLn " = = = main thread ended gracefully = = = "
where
worker : : [ Int ] - > ( Int - > IO ( ) ) - > IO ( )
worker [ ] _ = return ( )
worker ( i : is ) update = do
" Iteration " + + show i + + " started "
threadDelay 1000000
update i
putStrLn $ " Iteration " + + show i + + " done "
worker is update
handler : : Int - > IO ( )
handler i = do
putStrLn $ shift + + " Handling " + + show i + + " started "
threadDelay 3000000
putStrLn $ shift + + " Handling " + + show i + + " done "
shift = replicate 25 ' '
testCreateForkerWithWaiter : : IO ( )
testCreateForkerWithWaiter = do
( fork , wait ) < - createForkerWithWaiter
mtid < - fork $ do
threadDelay 10000
putStrLnWithTId " Forked successfully . "
threadDelay 2000000
putStrLnWithTId " Attempting another fork ... "
void $ fork $ do
threadDelay 10000
putStrLnWithTId " Forked successfully . "
threadDelay 2000000
putStrLnWithTId " Done . "
putStrLnWithTId " Done . "
threadDelay 1000000
-- traverse _ mtid
putStrLnWithTId $ show mtid
putStrLnWithTId " Waiting for threads to finish ... "
wait
putStrLnWithTId " All threads finished . "
fork ( putStrLnWithTId " Unreachable " ) > > = putStrLnWithTId . show
putStrLnWithTId : : String - > IO ( )
putStrLnWithTId cs = do
tid < - myThreadId
putStrLn $ show tid + + " : " + + cs
testHandleOnDemand :: IO ()
testHandleOnDemand = do
putStrLn "=== main thread started ==="
handleOnDemand (Just 3) (Just 7000000) [sigUSR1] (worker [1 .. 10]) handler
putStrLn "=== main thread ended gracefully ==="
where
worker :: [Int] -> (Int -> IO ()) -> IO ()
worker [] _ = return ()
worker (i : is) update = do
putStrLn $ "Iteration " ++ show i ++ " started"
threadDelay 1000000
update i
putStrLn $ "Iteration " ++ show i ++ " done"
worker is update
handler :: Int -> IO ()
handler i = do
putStrLn $ shift ++ "Handling " ++ show i ++ " started"
threadDelay 3000000
putStrLn $ shift ++ "Handling " ++ show i ++ " done"
shift = replicate 25 ' '
testCreateForkerWithWaiter :: IO ()
testCreateForkerWithWaiter = do
(fork, wait) <- createForkerWithWaiter
mtid <- fork $ do
threadDelay 10000
putStrLnWithTId "Forked successfully."
threadDelay 2000000
putStrLnWithTId "Attempting another fork ..."
void $ fork $ do
threadDelay 10000
putStrLnWithTId "Forked successfully."
threadDelay 2000000
putStrLnWithTId "Done."
putStrLnWithTId "Done."
threadDelay 1000000
-- traverse_ killThread mtid
putStrLnWithTId $ show mtid
putStrLnWithTId "Waiting for threads to finish ..."
wait
putStrLnWithTId "All threads finished."
fork (putStrLnWithTId "Unreachable") >>= putStrLnWithTId . show
putStrLnWithTId :: String -> IO ()
putStrLnWithTId cs = do
tid <- myThreadId
putStrLn $ show tid ++ ": " ++ cs
-}
| null | https://raw.githubusercontent.com/tud-fop/vanda-haskell/3214966361b6dbf178155950c94423eee7f9453e/library/Vanda/Util/IO.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style
Maintainer :
Stability : unknown
Portability : portable
---------------------------------------------------------------------------
package stm
passes an action to fix/update a (partial) result. If there currently is a
fixed result, then @handler@ is called with that result on the following
events:
* termination of worker,
* if @mn = 'Just' n@, then for every @n@-th fixed result,
* if @mt = 'Just' t@, then every @t@ microseconds,
* any 'Signal' from @signals@,
* exception in worker (the exception is reraised); this includes 'sigINT',
if it is not handled by @worker@ and it is not element of @signals@.
For each result @handler@ is called at most once.
^ handler interval in number of results
^ handler interval in microseconds
^ signals on which handler is called
^ worker
^ handler
this also acts as mutex in update
| Returns a function @fork@ and an action @wait@.
spawned threads terminate.
Spawning new threads using @fork@ will fail as soon as @wait@ returned.
/Examples:/
Spawn a thread, do something else, wait for the thread’s termination.
> do (fork, wait) <- createForkerWithWaiter
> main-thread-action
> wait
> do (fork, wait) <- createForkerWithWaiter
> wait
Race condition: @sub-sub-thread-action@ may or may not be run, depending
wait for the thread created with 'forkIO'.
> do (fork, wait) <- createForkerWithWaiter
> sub-thread-action
> main-thread-action
> wait
Deadlock: A thread waiting for itself to terminate.
> do (fork, wait) <- createForkerWithWaiter
We count the number of running forks in this variable. The waiter will
set this variable to a negative number as soon as there is no running
fork. This prevents any future fork.
traverse _ mtid
traverse_ killThread mtid | Module : . Util . IO
Copyright : ( c ) Technische Universität Dresden 2014 - 2016
module Vanda.Util.IO
( handleOnDemand
, createForkerWithWaiter
)where
import Control.Concurrent
(atomically, modifyTVar', newTVarIO, readTVar, retry, writeTVar)
import Control.Exception (finally)
import Control.Monad
import Data.Foldable (for_, traverse_)
import Data.Traversable (for)
import System.Posix.Signals
| The call @handleOnDemand mn mt signals worker handler@ runs @worker@ and
It is guaranteed that there is only one thread in @handler@.
handleOnDemand
-> IO ()
handleOnDemand mn mt signals worker handler = do
(fork, wait) <- createForkerWithWaiter
varResult <- newEmptyMVar
mutex <- newMVar ()
let handlerMutexed = withMVar mutex . const . handler
let handlerVar = tryTakeMVar varResult >>= traverse_ handlerMutexed
let update result = do
i <- takeMVar varCount
_ <- tryTakeMVar varResult
if maybe False ((0 ==) . (i `mod`)) mn
then void $ fork $ handlerMutexed result
else putMVar varResult result
putMVar varCount $! succ i
for_ signals $ \ s -> installHandler s (Catch handlerVar) Nothing
mtidTimer <- for mt $ \ t ->
forkIO $ forever $ void $ threadDelay t >> fork handlerVar
finally (worker update) $ do
for_ mtidTimer killThread
handlerVar
wait
Use @fork@ to spawn new threads and use @wait@ to block until all those
> void $ fork $ sub - thread - action
Using @fork@ after @wait@ will always fail .
> void $ fork $ never - executed - action
on whether @fork@ or @wait@ is executed first . Note that @wait@ does not
> void $ forkIO $ do
> void $ fork $ sub - sub - thread - action
> void $ fork $ wait
createForkerWithWaiter :: IO (IO () -> IO (Maybe ThreadId), IO ())
createForkerWithWaiter = do
nTVar <- newTVarIO (0 :: Int)
let fork act = do
abort <- atomically $ do
n <- readTVar nTVar
if n < 0 then return True
else writeTVar nTVar (succ n) >> return False
if abort then return Nothing
else fmap Just
$ forkFinally act
$ const $ atomically $ modifyTVar' nTVar pred
let wait = atomically $ do
n <- readTVar nTVar
if n > 0 then retry
else writeTVar nTVar (pred n)
return (fork, wait)
testHandleOnDemand : : IO ( )
testHandleOnDemand = do
putStrLn " = = = main thread started = = = "
handleOnDemand ( Just 3 ) ( Just 7000000 ) [ sigUSR1 ] ( worker [ 1 .. 10 ] ) handler
putStrLn " = = = main thread ended gracefully = = = "
where
worker : : [ Int ] - > ( Int - > IO ( ) ) - > IO ( )
worker [ ] _ = return ( )
worker ( i : is ) update = do
" Iteration " + + show i + + " started "
threadDelay 1000000
update i
putStrLn $ " Iteration " + + show i + + " done "
worker is update
handler : : Int - > IO ( )
handler i = do
putStrLn $ shift + + " Handling " + + show i + + " started "
threadDelay 3000000
putStrLn $ shift + + " Handling " + + show i + + " done "
shift = replicate 25 ' '
testCreateForkerWithWaiter : : IO ( )
testCreateForkerWithWaiter = do
( fork , wait ) < - createForkerWithWaiter
mtid < - fork $ do
threadDelay 10000
putStrLnWithTId " Forked successfully . "
threadDelay 2000000
putStrLnWithTId " Attempting another fork ... "
void $ fork $ do
threadDelay 10000
putStrLnWithTId " Forked successfully . "
threadDelay 2000000
putStrLnWithTId " Done . "
putStrLnWithTId " Done . "
threadDelay 1000000
putStrLnWithTId $ show mtid
putStrLnWithTId " Waiting for threads to finish ... "
wait
putStrLnWithTId " All threads finished . "
fork ( putStrLnWithTId " Unreachable " ) > > = putStrLnWithTId . show
putStrLnWithTId : : String - > IO ( )
putStrLnWithTId cs = do
tid < - myThreadId
putStrLn $ show tid + + " : " + + cs
testHandleOnDemand :: IO ()
testHandleOnDemand = do
putStrLn "=== main thread started ==="
handleOnDemand (Just 3) (Just 7000000) [sigUSR1] (worker [1 .. 10]) handler
putStrLn "=== main thread ended gracefully ==="
where
worker :: [Int] -> (Int -> IO ()) -> IO ()
worker [] _ = return ()
worker (i : is) update = do
putStrLn $ "Iteration " ++ show i ++ " started"
threadDelay 1000000
update i
putStrLn $ "Iteration " ++ show i ++ " done"
worker is update
handler :: Int -> IO ()
handler i = do
putStrLn $ shift ++ "Handling " ++ show i ++ " started"
threadDelay 3000000
putStrLn $ shift ++ "Handling " ++ show i ++ " done"
shift = replicate 25 ' '
testCreateForkerWithWaiter :: IO ()
testCreateForkerWithWaiter = do
(fork, wait) <- createForkerWithWaiter
mtid <- fork $ do
threadDelay 10000
putStrLnWithTId "Forked successfully."
threadDelay 2000000
putStrLnWithTId "Attempting another fork ..."
void $ fork $ do
threadDelay 10000
putStrLnWithTId "Forked successfully."
threadDelay 2000000
putStrLnWithTId "Done."
putStrLnWithTId "Done."
threadDelay 1000000
putStrLnWithTId $ show mtid
putStrLnWithTId "Waiting for threads to finish ..."
wait
putStrLnWithTId "All threads finished."
fork (putStrLnWithTId "Unreachable") >>= putStrLnWithTId . show
putStrLnWithTId :: String -> IO ()
putStrLnWithTId cs = do
tid <- myThreadId
putStrLn $ show tid ++ ": " ++ cs
-}
|
fdd7381252bc61f327ca378de394cc328588aa34a6860583aa930d7b570df684 | alura-cursos/datomic-introducao | core.clj | (ns ecommerce.core
(:use clojure.pprint)
(:require [datomic.api :as d]
[ecommerce.db :as db]
[ecommerce.model :as model]))
(def conn (db/abre-conexao))
(db/cria-schema conn)
(let [computador (model/novo-produto "Computador Novo", "/computador_novo", 2500.10M)]
(d/transact conn [computador]))
o banco no instante que executou a linha
(def db (d/db conn))
(d/q '[:find ?entidade
:where [?entidade :produto/nome]] db)
(let [celular (model/novo-produto "Celular Caro", "/celular", 888888.10M)]
(d/transact conn [celular]))
tirando uma nova fotografia ( SNAPSHOT ) do banco
(def db (d/db conn))
(d/q '[:find ?entidade
:where [?entidade :produto/nome]] db)
| null | https://raw.githubusercontent.com/alura-cursos/datomic-introducao/cfb214135fed0670ee90090b63a38d8287673ac6/aula1.4/ecommerce/src/ecommerce/core.clj | clojure | (ns ecommerce.core
(:use clojure.pprint)
(:require [datomic.api :as d]
[ecommerce.db :as db]
[ecommerce.model :as model]))
(def conn (db/abre-conexao))
(db/cria-schema conn)
(let [computador (model/novo-produto "Computador Novo", "/computador_novo", 2500.10M)]
(d/transact conn [computador]))
o banco no instante que executou a linha
(def db (d/db conn))
(d/q '[:find ?entidade
:where [?entidade :produto/nome]] db)
(let [celular (model/novo-produto "Celular Caro", "/celular", 888888.10M)]
(d/transact conn [celular]))
tirando uma nova fotografia ( SNAPSHOT ) do banco
(def db (d/db conn))
(d/q '[:find ?entidade
:where [?entidade :produto/nome]] db)
| |
6fefc789d5b29916dff10f882dd934e96007ef4fcf582597356d34d833091972 | icicle-lang/icicle-ambiata | Eval.hs | | Evaluate Avalanche programs
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DoAndIfThenElse #-}
module Icicle.Avalanche.Eval (
evalProgram
, evalStmt
, RuntimeError
) where
import Icicle.Avalanche.Statement.Statement
import Icicle.Avalanche.Statement.Simp.Melt
import Icicle.Avalanche.Program
import Icicle.Common.Base
import Icicle.Common.Eval
import Icicle.Common.NanEq
import Icicle.Common.Type
import Icicle.Common.Value
import qualified Icicle.Common.Exp as XV
import Icicle.Data (AsAt(..))
import Icicle.Data.Name
import GHC.Generics (Generic)
import P
import Data.List (zip)
import qualified Data.Map as Map
import Data.Hashable (Hashable)
import Icicle.Internal.Pretty
-- | Store history information about the accumulators
type AccumulatorHeap n
= Map.Map (Name n) BaseValue
| What can go wrong evaluating an Avalanche
data RuntimeError a n p
= RuntimeErrorNoAccumulator (Name n)
| RuntimeErrorAccumulator (XV.RuntimeError a n p)
| RuntimeErrorLoop (XV.RuntimeError a n p)
| RuntimeErrorLoopAccumulatorBad (Name n)
| RuntimeErrorIfNotBool BaseValue
| RuntimeErrorForeachNotInt BaseValue BaseValue
| RuntimeErrorForeachTypeMismatch [(Name n, ValType)] ValType BaseValue
| RuntimeErrorOutputTypeMismatch OutputId ValType [BaseValue]
| RuntimeErrorNotBaseValue (Value a n p)
| RuntimeErrorAccumulatorLatestNotInt BaseValue
| RuntimeErrorOutOfScope (Name n)
deriving (Eq, Show, Generic, NanEq)
instance (Pretty n, Pretty p) => Pretty (RuntimeError a n p) where
pretty (RuntimeErrorNoAccumulator n)
= "No accumulator:" <+> pretty n
pretty (RuntimeErrorAccumulator p)
= pretty p
pretty (RuntimeErrorLoop p)
= pretty p
pretty (RuntimeErrorLoopAccumulatorBad n)
= "Bad loop accumulator:" <+> pretty n
pretty (RuntimeErrorIfNotBool p)
= "Value should be a bool but isn't" <+> (pretty p)
pretty (RuntimeErrorForeachNotInt p p')
= "Foreach not ints:" <+> pretty p <+> pretty p'
pretty (RuntimeErrorForeachTypeMismatch ns ty v)
= "Foreach type error: bindings = " <+> align (vsep (fmap pretty ns)) <> line <>
" type = " <+> pretty ty <> line <>
" value = " <+> pretty v
pretty (RuntimeErrorOutputTypeMismatch n ty vs)
= "Output type error: name = " <+> pretty n <> line <>
" type = " <+> pretty ty <> line <>
" values = " <+> align (vsep (fmap pretty vs))
pretty (RuntimeErrorNotBaseValue p)
= "Value isn't a base value:" <+> (pretty p)
pretty (RuntimeErrorAccumulatorLatestNotInt p)
= "Accumulator Latest needs an integer, got" <+> pretty p
pretty (RuntimeErrorOutOfScope n)
= "Name went out of scope unexpectedly:" <+> pretty n
-- | Extract base value; return an error if it's a closure
baseValue :: Value a n p -> Either (RuntimeError a n p) BaseValue
baseValue v
= getBaseValue (RuntimeErrorNotBaseValue v) v
-- | Evaluate an entire program
-- with given primitive evaluator and values
evalProgram
:: (Hashable n, Eq n, Show n, Show p, Show a)
=> XV.EvalPrim a n p
-> EvalContext
-> [AsAt BaseValue]
-> Program a n p
-> Either (RuntimeError a n p) [(OutputId, BaseValue)]
evalProgram evalPrim ctx values p
Precomputations are just expressions
-- Keep evaluating the same loop for every value
-- with accumulator and scalar heaps threaded through
let stmts = statements p
let xh = Map.fromList
[ (bindtime p, VBase $ VTime $ evalSnapshotTime ctx)
, (maxMapSize p, VBase $ VInt $ evalMaxMapSize ctx) ]
let ah = Map.empty
snd <$> evalStmt evalPrim xh values ah stmts
-- | Evaluate a single statement for a single value
evalStmt
:: (Hashable n, Eq n, Show n, Show p, Show a)
=> XV.EvalPrim a n p
-> Heap a n p
-> [AsAt BaseValue]
-> AccumulatorHeap n
-> Statement a n p
-> Either (RuntimeError a n p) (AccumulatorHeap n, [(OutputId, BaseValue)])
evalStmt evalPrim xh values ah stmt
= case stmt of
If x stmts elses
-> do v <- eval x >>= baseValue
case v of
-- Run "then" or "else"?
VBool True
-> go' stmts
VBool False
-> go' elses
_-> Left (RuntimeErrorIfNotBool v)
-- Evaluate and insert the value into the heap.
Let n x stmts
-> do v <- eval x
go (Map.insert n v xh) ah stmts
While t acc _ to stmts
-> do tov <- eval to >>= baseValue
let check WhileEq = (==)
check WhileNe = (/=)
let evalLoop curr@(ah',out) end
= do accv <- maybeToRight (RuntimeErrorLoopAccumulatorBad acc)
$ Map.lookup acc ah'
if check t accv end
then do next <- appendOutputs out
<$> go xh ah' stmts
evalLoop next end
else return curr
evalLoop (ah, mempty) tov
ForeachInts t n from to stmts
-> do fromv <- eval from >>= baseValue
tov <- eval to >>= baseValue
let evalLoop (ah',out) index
= appendOutputs out
<$> go (Map.insert n (VBase $ VInt index) xh) ah' stmts
case (fromv, tov) of
(VInt fromi, VInt toi)
-> -- Open-closed interval [from,to)
ie " foreach i in 0 to 0 " does not run
foldM evalLoop
(ah, mempty)
(case t of
ForeachStepUp -> [fromi, fromi + 1 .. toi-1]
ForeachStepDown -> [fromi, fromi - 1 .. toi+1])
_
-> Left $ RuntimeErrorForeachNotInt fromv tov
-- Allow unmelted foreach
-- (i.e. where ty == ty' and we only have a singleton list of bindings)
ForeachFacts (FactBinds ntime [(n, ty)]) ty' stmts
| ty == ty'
-> do let evalInput (ah',out) inp = do
let v0 = atFact inp
v1 = VTime (atTime inp)
vv = VPair v0 v1
input' = Map.insert n (VBase vv)
$ Map.insert ntime (VBase v1) xh
appendOutputs out <$> evalStmt evalPrim input' [] ah' stmts
foldM evalInput (ah,mempty) values
ForeachFacts (FactBinds ntime ns) ty stmts
-> do let evalInput (ah',out) inp = do
let v0 = atFact inp
v1 = VTime (atTime inp)
vv = VPair v0 v1
mvs = meltValue vv ty
input1 = Map.insert ntime (VBase v1) xh
case mvs of
Nothing
-> Left (RuntimeErrorForeachTypeMismatch ns ty vv)
Just vs
| length vs /= length ns
-> Left (RuntimeErrorForeachTypeMismatch ns ty vv)
| otherwise
, nvs <- zip (fmap fst ns) vs
, input' <- foldr (\(n, v) -> Map.insert n (VBase v)) input1 nvs
-> appendOutputs out <$> evalStmt evalPrim input' [] ah' stmts
foldM evalInput (ah,mempty) values
Block []
-> returnHeap ah
Block [s]
-> go' s
Block (s:ss)
-> do (ah',out) <- go xh ah s
appendOutputs out <$> go xh ah' (Block ss)
InitAccumulator (Accumulator n _ x) stmts
-> do v <- eval x >>= baseValue
let ah' = Map.insert n v ah
go xh ah' stmts
-- Read from an accumulator
Read n acc _ stmts
-> do -- Get the current value and apply the function
v <- case Map.lookup acc ah of
Just vacc
-> return $ VBase vacc
_
-> Left (RuntimeErrorLoopAccumulatorBad acc)
go (Map.insert n v xh) ah stmts
-- Update accumulator
Write n x
-> do v <- eval x >>= baseValue
returnHeap (Map.insert n v ah)
Output n t xts
-> do vs <- traverse ((baseValue =<<) . eval . fst) xts
case (vs, unmeltValue vs t) of
--
If this Avalanche program has been through the melting
-- transform and everything worked properly then `unmeltValue`
-- will return `Just v`, otherwise it will return `Nothing`.
--
` Nothing ` could mean that we have an invalid Avalanche program
-- or a bug in `unmeltValue`, but if `vs` only contains a single
-- value, then it probably means that it was a value that didn't
-- need unmelting because the program has not been through the
-- melting transform yet.
--
(_, Just v) -> return (ah, [(n, v)])
(v:[], Nothing) -> return (ah, [(n, v)])
(_, Nothing) -> Left (RuntimeErrorOutputTypeMismatch n t vs)
where
-- Go through all the substatements
go xh' = evalStmt evalPrim xh' values
go' = go xh ah
appendOutputs out (ah', out')
= (ah', out <> out')
returnHeap ah'
= return (ah', mempty)
Raise Exp error to Avalanche
eval = first RuntimeErrorLoop
. XV.eval evalPrim xh
| null | https://raw.githubusercontent.com/icicle-lang/icicle-ambiata/9b9cc45a75f66603007e4db7e5f3ba908cae2df2/icicle-compiler/src/Icicle/Avalanche/Eval.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE PatternGuards #
# LANGUAGE OverloadedStrings #
# LANGUAGE DoAndIfThenElse #
| Store history information about the accumulators
| Extract base value; return an error if it's a closure
| Evaluate an entire program
with given primitive evaluator and values
Keep evaluating the same loop for every value
with accumulator and scalar heaps threaded through
| Evaluate a single statement for a single value
Run "then" or "else"?
Evaluate and insert the value into the heap.
Open-closed interval [from,to)
Allow unmelted foreach
(i.e. where ty == ty' and we only have a singleton list of bindings)
Read from an accumulator
Get the current value and apply the function
Update accumulator
transform and everything worked properly then `unmeltValue`
will return `Just v`, otherwise it will return `Nothing`.
or a bug in `unmeltValue`, but if `vs` only contains a single
value, then it probably means that it was a value that didn't
need unmelting because the program has not been through the
melting transform yet.
Go through all the substatements | | Evaluate Avalanche programs
# LANGUAGE DeriveGeneric #
# LANGUAGE NoImplicitPrelude #
module Icicle.Avalanche.Eval (
evalProgram
, evalStmt
, RuntimeError
) where
import Icicle.Avalanche.Statement.Statement
import Icicle.Avalanche.Statement.Simp.Melt
import Icicle.Avalanche.Program
import Icicle.Common.Base
import Icicle.Common.Eval
import Icicle.Common.NanEq
import Icicle.Common.Type
import Icicle.Common.Value
import qualified Icicle.Common.Exp as XV
import Icicle.Data (AsAt(..))
import Icicle.Data.Name
import GHC.Generics (Generic)
import P
import Data.List (zip)
import qualified Data.Map as Map
import Data.Hashable (Hashable)
import Icicle.Internal.Pretty
type AccumulatorHeap n
= Map.Map (Name n) BaseValue
| What can go wrong evaluating an Avalanche
data RuntimeError a n p
= RuntimeErrorNoAccumulator (Name n)
| RuntimeErrorAccumulator (XV.RuntimeError a n p)
| RuntimeErrorLoop (XV.RuntimeError a n p)
| RuntimeErrorLoopAccumulatorBad (Name n)
| RuntimeErrorIfNotBool BaseValue
| RuntimeErrorForeachNotInt BaseValue BaseValue
| RuntimeErrorForeachTypeMismatch [(Name n, ValType)] ValType BaseValue
| RuntimeErrorOutputTypeMismatch OutputId ValType [BaseValue]
| RuntimeErrorNotBaseValue (Value a n p)
| RuntimeErrorAccumulatorLatestNotInt BaseValue
| RuntimeErrorOutOfScope (Name n)
deriving (Eq, Show, Generic, NanEq)
instance (Pretty n, Pretty p) => Pretty (RuntimeError a n p) where
pretty (RuntimeErrorNoAccumulator n)
= "No accumulator:" <+> pretty n
pretty (RuntimeErrorAccumulator p)
= pretty p
pretty (RuntimeErrorLoop p)
= pretty p
pretty (RuntimeErrorLoopAccumulatorBad n)
= "Bad loop accumulator:" <+> pretty n
pretty (RuntimeErrorIfNotBool p)
= "Value should be a bool but isn't" <+> (pretty p)
pretty (RuntimeErrorForeachNotInt p p')
= "Foreach not ints:" <+> pretty p <+> pretty p'
pretty (RuntimeErrorForeachTypeMismatch ns ty v)
= "Foreach type error: bindings = " <+> align (vsep (fmap pretty ns)) <> line <>
" type = " <+> pretty ty <> line <>
" value = " <+> pretty v
pretty (RuntimeErrorOutputTypeMismatch n ty vs)
= "Output type error: name = " <+> pretty n <> line <>
" type = " <+> pretty ty <> line <>
" values = " <+> align (vsep (fmap pretty vs))
pretty (RuntimeErrorNotBaseValue p)
= "Value isn't a base value:" <+> (pretty p)
pretty (RuntimeErrorAccumulatorLatestNotInt p)
= "Accumulator Latest needs an integer, got" <+> pretty p
pretty (RuntimeErrorOutOfScope n)
= "Name went out of scope unexpectedly:" <+> pretty n
baseValue :: Value a n p -> Either (RuntimeError a n p) BaseValue
baseValue v
= getBaseValue (RuntimeErrorNotBaseValue v) v
evalProgram
:: (Hashable n, Eq n, Show n, Show p, Show a)
=> XV.EvalPrim a n p
-> EvalContext
-> [AsAt BaseValue]
-> Program a n p
-> Either (RuntimeError a n p) [(OutputId, BaseValue)]
evalProgram evalPrim ctx values p
Precomputations are just expressions
let stmts = statements p
let xh = Map.fromList
[ (bindtime p, VBase $ VTime $ evalSnapshotTime ctx)
, (maxMapSize p, VBase $ VInt $ evalMaxMapSize ctx) ]
let ah = Map.empty
snd <$> evalStmt evalPrim xh values ah stmts
evalStmt
:: (Hashable n, Eq n, Show n, Show p, Show a)
=> XV.EvalPrim a n p
-> Heap a n p
-> [AsAt BaseValue]
-> AccumulatorHeap n
-> Statement a n p
-> Either (RuntimeError a n p) (AccumulatorHeap n, [(OutputId, BaseValue)])
evalStmt evalPrim xh values ah stmt
= case stmt of
If x stmts elses
-> do v <- eval x >>= baseValue
case v of
VBool True
-> go' stmts
VBool False
-> go' elses
_-> Left (RuntimeErrorIfNotBool v)
Let n x stmts
-> do v <- eval x
go (Map.insert n v xh) ah stmts
While t acc _ to stmts
-> do tov <- eval to >>= baseValue
let check WhileEq = (==)
check WhileNe = (/=)
let evalLoop curr@(ah',out) end
= do accv <- maybeToRight (RuntimeErrorLoopAccumulatorBad acc)
$ Map.lookup acc ah'
if check t accv end
then do next <- appendOutputs out
<$> go xh ah' stmts
evalLoop next end
else return curr
evalLoop (ah, mempty) tov
ForeachInts t n from to stmts
-> do fromv <- eval from >>= baseValue
tov <- eval to >>= baseValue
let evalLoop (ah',out) index
= appendOutputs out
<$> go (Map.insert n (VBase $ VInt index) xh) ah' stmts
case (fromv, tov) of
(VInt fromi, VInt toi)
ie " foreach i in 0 to 0 " does not run
foldM evalLoop
(ah, mempty)
(case t of
ForeachStepUp -> [fromi, fromi + 1 .. toi-1]
ForeachStepDown -> [fromi, fromi - 1 .. toi+1])
_
-> Left $ RuntimeErrorForeachNotInt fromv tov
ForeachFacts (FactBinds ntime [(n, ty)]) ty' stmts
| ty == ty'
-> do let evalInput (ah',out) inp = do
let v0 = atFact inp
v1 = VTime (atTime inp)
vv = VPair v0 v1
input' = Map.insert n (VBase vv)
$ Map.insert ntime (VBase v1) xh
appendOutputs out <$> evalStmt evalPrim input' [] ah' stmts
foldM evalInput (ah,mempty) values
ForeachFacts (FactBinds ntime ns) ty stmts
-> do let evalInput (ah',out) inp = do
let v0 = atFact inp
v1 = VTime (atTime inp)
vv = VPair v0 v1
mvs = meltValue vv ty
input1 = Map.insert ntime (VBase v1) xh
case mvs of
Nothing
-> Left (RuntimeErrorForeachTypeMismatch ns ty vv)
Just vs
| length vs /= length ns
-> Left (RuntimeErrorForeachTypeMismatch ns ty vv)
| otherwise
, nvs <- zip (fmap fst ns) vs
, input' <- foldr (\(n, v) -> Map.insert n (VBase v)) input1 nvs
-> appendOutputs out <$> evalStmt evalPrim input' [] ah' stmts
foldM evalInput (ah,mempty) values
Block []
-> returnHeap ah
Block [s]
-> go' s
Block (s:ss)
-> do (ah',out) <- go xh ah s
appendOutputs out <$> go xh ah' (Block ss)
InitAccumulator (Accumulator n _ x) stmts
-> do v <- eval x >>= baseValue
let ah' = Map.insert n v ah
go xh ah' stmts
Read n acc _ stmts
v <- case Map.lookup acc ah of
Just vacc
-> return $ VBase vacc
_
-> Left (RuntimeErrorLoopAccumulatorBad acc)
go (Map.insert n v xh) ah stmts
Write n x
-> do v <- eval x >>= baseValue
returnHeap (Map.insert n v ah)
Output n t xts
-> do vs <- traverse ((baseValue =<<) . eval . fst) xts
case (vs, unmeltValue vs t) of
If this Avalanche program has been through the melting
` Nothing ` could mean that we have an invalid Avalanche program
(_, Just v) -> return (ah, [(n, v)])
(v:[], Nothing) -> return (ah, [(n, v)])
(_, Nothing) -> Left (RuntimeErrorOutputTypeMismatch n t vs)
where
go xh' = evalStmt evalPrim xh' values
go' = go xh ah
appendOutputs out (ah', out')
= (ah', out <> out')
returnHeap ah'
= return (ah', mempty)
Raise Exp error to Avalanche
eval = first RuntimeErrorLoop
. XV.eval evalPrim xh
|
3814aad9bbf0479c9e1ac8bb11cd31d473903ef4ae7c91702c700136c02fda85 | joinr/spork | animation.clj | (ns spork.cljgui.animation
(:use [spork.cljgui [gui behavior scenegraph]])
(:import [java.awt Graphics2D]
[java.awt.event ActionListener]
[javax.swing Timer JPanel]))
;what we really should do is extend the drawing to IShapes
;this will let us maintain the state of the shape records, and call their
;draw-shape functions....
(def greencircle (make-sprite (->relative-circle :green 100) :bitmask 0 0))
(def bluecircle (make-sprite (->relative-circle :blue 100) :bitmask 0 0))
(def greenandblue
(compose-scene
(translate-scene -35 35 greencircle)
(translate-scene 35 -35 bluecircle)))
(defn compose-b [scene1b scene2b]
(lift2 compose-scene scene1b scene2b))
(defn translate-b [xb yb sceneb]
(lift3 translate-scene xb yb sceneb))
(defn scale-b [xb yb sceneb]
(lift3 scale-scene xb yb sceneb))
(defn rotate-b [thetab sceneb]
(lift2 rotate-scene thetab sceneb))
(defn fade-b [alphab sceneb]
(lift2 fade-scene alphab sceneb))
(defn offset-scale-b [scale xb yb sceneb]
(let [[scaledx scaledy] (map #(map-b * scale-b %) [xb yb])]
(scale-b
(translate-b scaledx scaledy sceneb))))
(defn orbit [radius speed drawb]
(let [pos (lift2 * wiggle (forever radius))]
(->> drawb
(translate-b pos (wait 0.5 pos))
(faster speed))))
use FRP to compose a bunch of functional drawings ...
;we take an initial drawing behavior (animation) and
;a number of trails back we want to go. The idea is that
;we compute a new behavior, which is the composition of the
;drawing behavior, with multiple "past" versions of itself.
;This allows us to draw a trail. We use n to denote the number of
;items that are trailing the original. Note, if N is large, or if we
;have an infinite handler, that would me we're just statically grawing
;the shape as a function of time. This could lead to a space leak, so
;we might want to handle that separately....
(defn trail
"Trail returns a behavior that creates a time-shifted duplicate which appears
to follow the input behavior."
[drawb]
(compose-b
drawb
(wait 0.25 drawb)))
(defn fading-trail [fade-rate tdelay drawb]
(compose-b
drawb
(fade-b (forever fade-rate) (wait tdelay drawb))))
;allow for multiple trails....n back, on the caveat that the intervals
;exist over the domain. I.e., as we go back in "time", if t is invalid (i.e.
;only positive values are allowed for t), then we stop trailing.
;note....interval will ultimately be multiplied by pi, so any real "difference"
for wait should be relative to pi . In this case , 2PI is a full cycle .
Something that tricked me was that intuitive intervals ( i.e. 1 , 2 , 3 )
;seemd fine, and should produce cool trails of drawings that are animated
1 , 2 , and 3 time - steps behind the other ... leaving a nice trail .
;However, the primary mover function driving most of the animation right now,
;wiggle, projects time into a sin wave, multiplying the input value by pi.
Thus , an interval of 20 produces the same effect as 2PI , or any other even
multiple , while 1 produces the same effect as PI , as does 19 , or any odd
multiple of PI . Therefore , we need to think relative to pi ( for now ) to get
;meaningful transforms out of this. Really, it's time/cycle. That's what
;interval should be....
;Another way around this is to just divide the cycle into n intervals, and fill
;em. Yea, let's do that....
This is equivalent to dividing the cycle , and taking the first n values .
(defn n-trail
"Returns a behavior that is the composition of n duplicates of the
drawingbehavior, where the nth duplicate is delayed - time shifted - by
(+ (/ 1 (- n 1)) (/ 1 n)"
[n drawb]
(let [interval (/ 1 n)
trails (map (fn [w] (fade-b (forever (- 1 w)) (wait (negate w) drawb)))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn n-trail-solid
"Returns a behavior that is the composition of n duplicates of the
drawingbehavior, where the nth duplicate is delayed - time shifted - by
(+ (/ 1 (- n 1)) (/ 1 n)"
[n drawb]
(let [interval (/ 1 n)
trails (map (fn [w] (wait (negate w) drawb))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn rand-trail
"Create n duplicates of the drawingbehavior, where the nth duplicate
is delayed - time shifted - by a random value between 0 and 1."
[n drawb]
(let [trails (map (fn [w] (fade-b (forever (- 1 w))
(wait (negate w) drawb)))
(take (dec n) (repeatedly (fn [] (rand)))))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn n-decaying [n drawb]
(let [interval (/ 1 n)
trails (map (fn [w]
(scale-b
(forever (- 1 w))
(forever (- 1 w))
(fade-b
(forever (- 1 w)) (wait (negate w) drawb))))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
;Animation utilities and demos....
(defn now [] (System/currentTimeMillis))
(defn af [anim]
(let [[w h] [600 600]
midw (/ w 2)
midh (/ h 2)
starttime (ref (now))
^JPanel panel
(paintpanel w h
(fn [^Graphics2D g]
(let [elapsed (/ (- (now) @starttime) 1000.0)
scene (translate-scene midw midh
(->> @anim
(read-value elapsed)))]
(do
(clear-background g (get-gui-color :white) w h)
;(.translate g (int midw) (int midh))
(render scene g)))))
;(.translate g (int (negate midw));have to move back....
( int ( negate ) ) ) ) ) ) )
x (add-watch anim :newanimation
(fn [k r oldstate newstate]
(if-not (or (nil? panel) (not (.isVisible panel)))
(.repaint panel)
(remove-watch r k))))
about 60 fps
(proxy [ActionListener] []
(actionPerformed [e]
(.repaint panel))))]
(do (.start tmr)
(toggle-top (display (empty-frame)
(stack
(label "Functional Reactive Animation!")
panel))))))
(defn animation-app [initframe]
(let [mutableanimation (ref initframe)]
(do (run-app (fn [] (af mutableanimation)))
(fn [newanim]
(dosync (ref-set mutableanimation newanim))))))
( def stillframe ( forever ) )
(def stillframe (forever greenandblue))
(def activeanimator (ref nil))
(defn animator? []
(not= nil @activeanimator))
(defn get-animator []
(if (animator?)
@activeanimator
(dosync (ref-set activeanimator (animation-app stillframe))
@activeanimator)))
(defn animator [scenebehavior]
((get-animator) scenebehavior))
(defn reset []
(animator stillframe))
(defn vertical []
(animator (translate-b (forever 0) wiggle100 stillframe)))
(defn horizontal []
(animator (translate-b wiggle100 (forever 0) stillframe)))
(defn diagonal []
(animator (translate-b wiggle100 wiggle100 stillframe)))
(defn horizontal-trail []
(animator (trail (translate-b wiggle100 (forever 0) stillframe))))
(defn horizontal-two []
(animator (n-trail 2 (translate-b wiggle100 (forever 0) stillframe))))
(defn rotating []
(animator (orbit 100 1 stillframe )))
(defn rotate-trail []
(animator (compose-b
(n-trail 10 (orbit 100 1 stillframe))
stillframe)))
(defn horizontal-n
([n] (animator (->> stillframe
(translate-b wiggle100 (forever 0))
(n-trail n))))
([] (horizontal-n 10)))
(defn random-trail
([n] (animator (->> stillframe
(translate-b wiggle100 (forever 0))
(rand-trail n))))
([] (random-trail 10)))
(defn skating []
(->> stillframe
(orbit 100 1)
(translate-b wiggle100 (forever 0))
(n-trail 20)
(animator)))
(defn figure8 []
(->> stillframe
(orbit 30 0.5)
(translate-b wiggle100 (forever 0))
(n-trail 20)
(animator)))
(defn rotation []
(animator
(->> stillframe
(rotate-b (wiggle-between (* 2 pi))))))
(defn tumble []
(->> stillframe
(rotate-b (wiggle-between (* 2 pi)))
(translate-b wiggle100 (forever 0.0))
(n-trail 20)
(animator)))
(defn waltz [n]
(->> stillframe
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail n)
(faster 0.5)
(animator)))
(defn waltz-solid [n]
(->> stillframe
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator)))
(defn wigglewaggle []
(->> stillframe
(translate-b (waggle-between 100) (wiggle-between 100))
(animator)))
(defn separate []
(let [green (->> (forever greencircle)
(translate-b (wiggle-between 300) (waggle-between 300))
(n-trail 15))
blue (->> (forever bluecircle)
(translate-b (waggle-between 200) (wiggle-between 200))
(n-trail 15))]
(->> (compose-b green blue)
(animator))))
(defn scaled []
(->> stillframe
(scale-b wiggle wiggle)
(translate-b wiggle100 waggle100)
(animator)))
(defn decaying []
(let
[green (->> (forever greencircle)
(translate-b (wiggle-between 300) (waggle-between 300))
(n-decaying 15))
blue (->> (forever bluecircle)
(translate-b (waggle-between 200) (wiggle-between 200))
(n-decaying 15))]
(->> (compose-b green blue)
(animator))))
(defn round-particles [n size]
(let [particle (make-sprite (->relative-circle :red size) :bitmask 0 0)]
(->>
(forever particle)
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator))))
(defn square-particles [n size]
(->>
(forever (->relative-rectangle :red size))
;(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator)))
| null | https://raw.githubusercontent.com/joinr/spork/bb80eddadf90bf92745bf5315217e25a99fbf9d6/obe/cljgui/animation.clj | clojure | what we really should do is extend the drawing to IShapes
this will let us maintain the state of the shape records, and call their
draw-shape functions....
we take an initial drawing behavior (animation) and
a number of trails back we want to go. The idea is that
we compute a new behavior, which is the composition of the
drawing behavior, with multiple "past" versions of itself.
This allows us to draw a trail. We use n to denote the number of
items that are trailing the original. Note, if N is large, or if we
have an infinite handler, that would me we're just statically grawing
the shape as a function of time. This could lead to a space leak, so
we might want to handle that separately....
allow for multiple trails....n back, on the caveat that the intervals
exist over the domain. I.e., as we go back in "time", if t is invalid (i.e.
only positive values are allowed for t), then we stop trailing.
note....interval will ultimately be multiplied by pi, so any real "difference"
seemd fine, and should produce cool trails of drawings that are animated
However, the primary mover function driving most of the animation right now,
wiggle, projects time into a sin wave, multiplying the input value by pi.
meaningful transforms out of this. Really, it's time/cycle. That's what
interval should be....
Another way around this is to just divide the cycle into n intervals, and fill
em. Yea, let's do that....
Animation utilities and demos....
(.translate g (int midw) (int midh))
(.translate g (int (negate midw));have to move back....
(rotate-b (wiggle-between pi)) | (ns spork.cljgui.animation
(:use [spork.cljgui [gui behavior scenegraph]])
(:import [java.awt Graphics2D]
[java.awt.event ActionListener]
[javax.swing Timer JPanel]))
(def greencircle (make-sprite (->relative-circle :green 100) :bitmask 0 0))
(def bluecircle (make-sprite (->relative-circle :blue 100) :bitmask 0 0))
(def greenandblue
(compose-scene
(translate-scene -35 35 greencircle)
(translate-scene 35 -35 bluecircle)))
(defn compose-b [scene1b scene2b]
(lift2 compose-scene scene1b scene2b))
(defn translate-b [xb yb sceneb]
(lift3 translate-scene xb yb sceneb))
(defn scale-b [xb yb sceneb]
(lift3 scale-scene xb yb sceneb))
(defn rotate-b [thetab sceneb]
(lift2 rotate-scene thetab sceneb))
(defn fade-b [alphab sceneb]
(lift2 fade-scene alphab sceneb))
(defn offset-scale-b [scale xb yb sceneb]
(let [[scaledx scaledy] (map #(map-b * scale-b %) [xb yb])]
(scale-b
(translate-b scaledx scaledy sceneb))))
(defn orbit [radius speed drawb]
(let [pos (lift2 * wiggle (forever radius))]
(->> drawb
(translate-b pos (wait 0.5 pos))
(faster speed))))
use FRP to compose a bunch of functional drawings ...
(defn trail
"Trail returns a behavior that creates a time-shifted duplicate which appears
to follow the input behavior."
[drawb]
(compose-b
drawb
(wait 0.25 drawb)))
(defn fading-trail [fade-rate tdelay drawb]
(compose-b
drawb
(fade-b (forever fade-rate) (wait tdelay drawb))))
for wait should be relative to pi . In this case , 2PI is a full cycle .
Something that tricked me was that intuitive intervals ( i.e. 1 , 2 , 3 )
1 , 2 , and 3 time - steps behind the other ... leaving a nice trail .
Thus , an interval of 20 produces the same effect as 2PI , or any other even
multiple , while 1 produces the same effect as PI , as does 19 , or any odd
multiple of PI . Therefore , we need to think relative to pi ( for now ) to get
This is equivalent to dividing the cycle , and taking the first n values .
(defn n-trail
"Returns a behavior that is the composition of n duplicates of the
drawingbehavior, where the nth duplicate is delayed - time shifted - by
(+ (/ 1 (- n 1)) (/ 1 n)"
[n drawb]
(let [interval (/ 1 n)
trails (map (fn [w] (fade-b (forever (- 1 w)) (wait (negate w) drawb)))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn n-trail-solid
"Returns a behavior that is the composition of n duplicates of the
drawingbehavior, where the nth duplicate is delayed - time shifted - by
(+ (/ 1 (- n 1)) (/ 1 n)"
[n drawb]
(let [interval (/ 1 n)
trails (map (fn [w] (wait (negate w) drawb))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn rand-trail
"Create n duplicates of the drawingbehavior, where the nth duplicate
is delayed - time shifted - by a random value between 0 and 1."
[n drawb]
(let [trails (map (fn [w] (fade-b (forever (- 1 w))
(wait (negate w) drawb)))
(take (dec n) (repeatedly (fn [] (rand)))))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn n-decaying [n drawb]
(let [interval (/ 1 n)
trails (map (fn [w]
(scale-b
(forever (- 1 w))
(forever (- 1 w))
(fade-b
(forever (- 1 w)) (wait (negate w) drawb))))
(take (dec n) (iterate #(+ interval %) interval)))
composition (reduce compose-b (concat (reverse trails) [drawb]))]
(sample
(fn [bc] (composition bc)))))
(defn now [] (System/currentTimeMillis))
(defn af [anim]
(let [[w h] [600 600]
midw (/ w 2)
midh (/ h 2)
starttime (ref (now))
^JPanel panel
(paintpanel w h
(fn [^Graphics2D g]
(let [elapsed (/ (- (now) @starttime) 1000.0)
scene (translate-scene midw midh
(->> @anim
(read-value elapsed)))]
(do
(clear-background g (get-gui-color :white) w h)
(render scene g)))))
( int ( negate ) ) ) ) ) ) )
x (add-watch anim :newanimation
(fn [k r oldstate newstate]
(if-not (or (nil? panel) (not (.isVisible panel)))
(.repaint panel)
(remove-watch r k))))
about 60 fps
(proxy [ActionListener] []
(actionPerformed [e]
(.repaint panel))))]
(do (.start tmr)
(toggle-top (display (empty-frame)
(stack
(label "Functional Reactive Animation!")
panel))))))
(defn animation-app [initframe]
(let [mutableanimation (ref initframe)]
(do (run-app (fn [] (af mutableanimation)))
(fn [newanim]
(dosync (ref-set mutableanimation newanim))))))
( def stillframe ( forever ) )
(def stillframe (forever greenandblue))
(def activeanimator (ref nil))
(defn animator? []
(not= nil @activeanimator))
(defn get-animator []
(if (animator?)
@activeanimator
(dosync (ref-set activeanimator (animation-app stillframe))
@activeanimator)))
(defn animator [scenebehavior]
((get-animator) scenebehavior))
(defn reset []
(animator stillframe))
(defn vertical []
(animator (translate-b (forever 0) wiggle100 stillframe)))
(defn horizontal []
(animator (translate-b wiggle100 (forever 0) stillframe)))
(defn diagonal []
(animator (translate-b wiggle100 wiggle100 stillframe)))
(defn horizontal-trail []
(animator (trail (translate-b wiggle100 (forever 0) stillframe))))
(defn horizontal-two []
(animator (n-trail 2 (translate-b wiggle100 (forever 0) stillframe))))
(defn rotating []
(animator (orbit 100 1 stillframe )))
(defn rotate-trail []
(animator (compose-b
(n-trail 10 (orbit 100 1 stillframe))
stillframe)))
(defn horizontal-n
([n] (animator (->> stillframe
(translate-b wiggle100 (forever 0))
(n-trail n))))
([] (horizontal-n 10)))
(defn random-trail
([n] (animator (->> stillframe
(translate-b wiggle100 (forever 0))
(rand-trail n))))
([] (random-trail 10)))
(defn skating []
(->> stillframe
(orbit 100 1)
(translate-b wiggle100 (forever 0))
(n-trail 20)
(animator)))
(defn figure8 []
(->> stillframe
(orbit 30 0.5)
(translate-b wiggle100 (forever 0))
(n-trail 20)
(animator)))
(defn rotation []
(animator
(->> stillframe
(rotate-b (wiggle-between (* 2 pi))))))
(defn tumble []
(->> stillframe
(rotate-b (wiggle-between (* 2 pi)))
(translate-b wiggle100 (forever 0.0))
(n-trail 20)
(animator)))
(defn waltz [n]
(->> stillframe
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail n)
(faster 0.5)
(animator)))
(defn waltz-solid [n]
(->> stillframe
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator)))
(defn wigglewaggle []
(->> stillframe
(translate-b (waggle-between 100) (wiggle-between 100))
(animator)))
(defn separate []
(let [green (->> (forever greencircle)
(translate-b (wiggle-between 300) (waggle-between 300))
(n-trail 15))
blue (->> (forever bluecircle)
(translate-b (waggle-between 200) (wiggle-between 200))
(n-trail 15))]
(->> (compose-b green blue)
(animator))))
(defn scaled []
(->> stillframe
(scale-b wiggle wiggle)
(translate-b wiggle100 waggle100)
(animator)))
(defn decaying []
(let
[green (->> (forever greencircle)
(translate-b (wiggle-between 300) (waggle-between 300))
(n-decaying 15))
blue (->> (forever bluecircle)
(translate-b (waggle-between 200) (wiggle-between 200))
(n-decaying 15))]
(->> (compose-b green blue)
(animator))))
(defn round-particles [n size]
(let [particle (make-sprite (->relative-circle :red size) :bitmask 0 0)]
(->>
(forever particle)
(rotate-b (wiggle-between pi))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator))))
(defn square-particles [n size]
(->>
(forever (->relative-rectangle :red size))
(orbit 100 0.8)
(n-trail-solid n)
(faster 0.5)
(animator)))
|
444f4164f3c630633e009a13dbfa57abaae7b8471ad32b66e3ce1f537e155b6f | r0man/google-maps-clj | projection.clj | (ns google.maps.projection
(:use google.maps.location))
(def tile-size 256)
(defstruct coords :x :y)
(defn tiles
"Returns the number of tiles for the zoom level."
[^Integer zoom]
(int (. Math pow 2 zoom)))
(defn circumference
"Returns the circumference for the zoom level."
[^Integer zoom] (* tile-size (tiles zoom)))
(defn radius
"Returns the radius for the zoom level."
[^Integer zoom]
(/ (circumference zoom) (* 2.0 Math/PI)))
(defn false-easting
"Returns the false easting for the zoom level."
[^Integer zoom] (int (/ (circumference zoom) -2.0)))
(defn false-northing
"Returns the false northing for the zoom level."
[^Integer zoom] (int (/ (circumference zoom) 2.0)))
(defn latitude->y-coord
"Returns the y coordinate of the latitude for the zoom level."
[^Double latitude ^Integer zoom]
(let [sinus (. Math sin (. Math toRadians latitude))
pixel (* (/ (radius zoom) 2.0) (. Math log (/ (+ 1.0 sinus) (- 1.0 sinus))))]
(int (+ 0.5 (* -1.0 (+ pixel (false-easting zoom)))))))
(defn longitude->x-coord
"Returns the x coordinate of the longitude for the zoom level."
[^Double longitude ^Integer zoom]
(int (+ 0.5 (* (radius zoom) (. Math toRadians longitude)) (false-northing zoom))))
(defn location->coords
"Returns the coordinates of the location for the zoom level."
[location ^Integer zoom]
{:x (longitude->x-coord (:longitude location) zoom)
:y (latitude->y-coord (:latitude location) zoom)})
(defn x-coord->longitude [^Integer x-coord ^Integer zoom]
"Returns the longitude of the x coordinate for the zoom level."
(let [degree (. Math toDegrees (/ (+ x-coord (false-northing zoom)) (radius zoom)))
rotation (. Math floor (/ (+ degree 180.0) 360.0))]
(- degree (* rotation 360.0))))
(defn y-coord->latitude [^Integer y-coord ^Integer zoom]
"Returns the latitude of the y coordinate for the zoom level."
(let [value (. Math exp (* -1.0 (/ (+ y-coord (false-easting zoom)) (radius zoom))))]
(* -1.0 (. Math toDegrees (- (/ Math/PI 2.0) (* 2.0 (. Math atan value)))))))
(defn coords->location
"Returns the location of the coordinates for the zoom level."
[coords ^Integer zoom]
(make-location
(y-coord->latitude (:y coords) zoom)
(x-coord->longitude (:x coords) zoom)))
(defn latitude-delta
"Returns the latitude delta between the y coordinates."
[^Integer y1 ^Integer y2 ^Integer zoom]
(- (y-coord->latitude y2 zoom) (y-coord->latitude y1 zoom)))
(defn longitude-delta
"Returns the longitude delta between the x coordinates."
[^Integer x1 ^Integer x2 ^Integer zoom]
(- (x-coord->longitude x2 zoom) (x-coord->longitude x1 zoom)))
(defn x-coord-delta
"Returns the x coordinate delta between the longitudes."
[^Double longitude1 ^Double longitude-2 ^Integer zoom]
(- (longitude->x-coord longitude-2 zoom) (longitude->x-coord longitude1 zoom)))
(defn y-coord-delta
"Returns the y coordinate delta between the latitudes."
[^Double latitude-1 ^Double latitude2 ^Integer zoom]
(- (latitude->y-coord latitude2 zoom) (latitude->y-coord latitude-1 zoom)))
(defn coord-delta [location-1 location-2 ^Integer zoom]
(struct
coords
(x-coord-delta (:longitude location-1) (:longitude location-2) zoom)
(y-coord-delta (:latitude location-1) (:latitude location-2) zoom)))
| null | https://raw.githubusercontent.com/r0man/google-maps-clj/75467cd366fad1250ac78f07e4796c254d09f36a/src/google/maps/projection.clj | clojure | (ns google.maps.projection
(:use google.maps.location))
(def tile-size 256)
(defstruct coords :x :y)
(defn tiles
"Returns the number of tiles for the zoom level."
[^Integer zoom]
(int (. Math pow 2 zoom)))
(defn circumference
"Returns the circumference for the zoom level."
[^Integer zoom] (* tile-size (tiles zoom)))
(defn radius
"Returns the radius for the zoom level."
[^Integer zoom]
(/ (circumference zoom) (* 2.0 Math/PI)))
(defn false-easting
"Returns the false easting for the zoom level."
[^Integer zoom] (int (/ (circumference zoom) -2.0)))
(defn false-northing
"Returns the false northing for the zoom level."
[^Integer zoom] (int (/ (circumference zoom) 2.0)))
(defn latitude->y-coord
"Returns the y coordinate of the latitude for the zoom level."
[^Double latitude ^Integer zoom]
(let [sinus (. Math sin (. Math toRadians latitude))
pixel (* (/ (radius zoom) 2.0) (. Math log (/ (+ 1.0 sinus) (- 1.0 sinus))))]
(int (+ 0.5 (* -1.0 (+ pixel (false-easting zoom)))))))
(defn longitude->x-coord
"Returns the x coordinate of the longitude for the zoom level."
[^Double longitude ^Integer zoom]
(int (+ 0.5 (* (radius zoom) (. Math toRadians longitude)) (false-northing zoom))))
(defn location->coords
"Returns the coordinates of the location for the zoom level."
[location ^Integer zoom]
{:x (longitude->x-coord (:longitude location) zoom)
:y (latitude->y-coord (:latitude location) zoom)})
(defn x-coord->longitude [^Integer x-coord ^Integer zoom]
"Returns the longitude of the x coordinate for the zoom level."
(let [degree (. Math toDegrees (/ (+ x-coord (false-northing zoom)) (radius zoom)))
rotation (. Math floor (/ (+ degree 180.0) 360.0))]
(- degree (* rotation 360.0))))
(defn y-coord->latitude [^Integer y-coord ^Integer zoom]
"Returns the latitude of the y coordinate for the zoom level."
(let [value (. Math exp (* -1.0 (/ (+ y-coord (false-easting zoom)) (radius zoom))))]
(* -1.0 (. Math toDegrees (- (/ Math/PI 2.0) (* 2.0 (. Math atan value)))))))
(defn coords->location
"Returns the location of the coordinates for the zoom level."
[coords ^Integer zoom]
(make-location
(y-coord->latitude (:y coords) zoom)
(x-coord->longitude (:x coords) zoom)))
(defn latitude-delta
"Returns the latitude delta between the y coordinates."
[^Integer y1 ^Integer y2 ^Integer zoom]
(- (y-coord->latitude y2 zoom) (y-coord->latitude y1 zoom)))
(defn longitude-delta
"Returns the longitude delta between the x coordinates."
[^Integer x1 ^Integer x2 ^Integer zoom]
(- (x-coord->longitude x2 zoom) (x-coord->longitude x1 zoom)))
(defn x-coord-delta
"Returns the x coordinate delta between the longitudes."
[^Double longitude1 ^Double longitude-2 ^Integer zoom]
(- (longitude->x-coord longitude-2 zoom) (longitude->x-coord longitude1 zoom)))
(defn y-coord-delta
"Returns the y coordinate delta between the latitudes."
[^Double latitude-1 ^Double latitude2 ^Integer zoom]
(- (latitude->y-coord latitude2 zoom) (latitude->y-coord latitude-1 zoom)))
(defn coord-delta [location-1 location-2 ^Integer zoom]
(struct
coords
(x-coord-delta (:longitude location-1) (:longitude location-2) zoom)
(y-coord-delta (:latitude location-1) (:latitude location-2) zoom)))
| |
05ad30b8aedd67b2d5c600df4704bb18457b042f92dc41f219b8efb8d7870f3f | squint-cljs/squint | resource.clj | (ns squint.resource
(:require [clojure.edn :as edn]
[clojure.java.io :as io]))
(defmacro edn-resource [f]
(list 'quote (edn/read-string (slurp (io/resource f)))))
| null | https://raw.githubusercontent.com/squint-cljs/squint/e0aac964bf00bd60cf5a3a5dbe0b89a588c5e505/resources/squint/resource.clj | clojure | (ns squint.resource
(:require [clojure.edn :as edn]
[clojure.java.io :as io]))
(defmacro edn-resource [f]
(list 'quote (edn/read-string (slurp (io/resource f)))))
| |
d2bb7675614dd02da2422a82b93629ef78373ec586d813c93ac6584cea2b429a | cdornan/keystore | PasswordStoreSchema.hs | {-# LANGUAGE QuasiQuotes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE BangPatterns #
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE StandaloneDeriving #
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Data.KeyStore.Types.PasswordStoreSchema
( passwordStoreSchema
, passwordStoreChangelog
) where
import Data.API.Parse
import Data.API.Types
import Data.API.Changes
passwordStoreSchema :: API
passwordStoreChangelog :: APIChangelog
(passwordStoreSchema, passwordStoreChangelog) = [apiWithChangelog|
//
// External Representation Only
//
// The builtin support for map-like types introduced in Aeson 1.0 has broken
// the mechanism for representing Map in this schema. In order to minimise the
// disruption and preserve the existing schema representation we have renamed
// all of the types in the schema that contain Map types. In the model these
// types are reconstructed just as they would have been in previous KeyStore
// editions and mapping functions have been introduced to convert between the
// two representations. The PasswordStore gets read with this representation,
// matching the representation of past keystore packages and gets
// converted into the internal type representation (with the maps) that the
// rest of the keystore code base expects.
z_ps :: PasswordStore_
= record
comment :: PasswordStoreComment
map :: PasswordMap_
setup :: UTC
z_pm :: PasswordMap_
// the password map, represented internally with a Map
// from PasswordName to Password
= record
map :: [NamePasswordAssoc_]
z_npa :: NamePasswordAssoc_
= record
name :: PasswordName
password :: Password_
z_pw :: Password_
// passwords may be simple, or be a collection of 'sessions',
// one of which is selected
= record
name :: PasswordName
text :: PasswordText
sessions :: SessionMap_
isOneShot :: Bool
primed :: boolean
setup :: UTC
z_smp :: SessionMap_
// collections of sessions are represented internally as a Map
// from SessionName to PasswordText
= record
map :: [SessionPasswordAssoc_]
z_spa :: SessionPasswordAssoc_
= record
name :: SessionName
session :: Session
//
// Classic Schema Definitions
//
ssn :: Session
// a session just consists of a password and the stup time
= record
name :: SessionName
password :: PasswordText
isOneShot :: Bool
setup :: UTC
pwsc :: PasswordStoreComment
// a short comment on the PasswordStore
= basic string
pnm :: PasswordName
// used to identify a password in the store
= basic string
ptx :: PasswordText
// used to contain the secret text of a Password
= basic string
snm :: SessionName
// used to identify the different sessions in a session password
= basic string
changes
// Initial version
version "0.0.0.1"
|]
| null | https://raw.githubusercontent.com/cdornan/keystore/e16103e75cf067baa3a939a1d9e79bd7af6942e7/src/Data/KeyStore/Types/PasswordStoreSchema.hs | haskell | # LANGUAGE QuasiQuotes #
# LANGUAGE DeriveGeneric #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE TemplateHaskell #
# LANGUAGE OverloadedStrings #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE DeriveGeneric #
# LANGUAGE OverloadedStrings # | # LANGUAGE RecordWildCards #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE BangPatterns #
# LANGUAGE StandaloneDeriving #
# LANGUAGE ExistentialQuantification #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -fno - warn - orphans #
module Data.KeyStore.Types.PasswordStoreSchema
( passwordStoreSchema
, passwordStoreChangelog
) where
import Data.API.Parse
import Data.API.Types
import Data.API.Changes
passwordStoreSchema :: API
passwordStoreChangelog :: APIChangelog
(passwordStoreSchema, passwordStoreChangelog) = [apiWithChangelog|
//
// External Representation Only
//
// The builtin support for map-like types introduced in Aeson 1.0 has broken
// the mechanism for representing Map in this schema. In order to minimise the
// disruption and preserve the existing schema representation we have renamed
// all of the types in the schema that contain Map types. In the model these
// types are reconstructed just as they would have been in previous KeyStore
// editions and mapping functions have been introduced to convert between the
// two representations. The PasswordStore gets read with this representation,
// matching the representation of past keystore packages and gets
// converted into the internal type representation (with the maps) that the
// rest of the keystore code base expects.
z_ps :: PasswordStore_
= record
comment :: PasswordStoreComment
map :: PasswordMap_
setup :: UTC
z_pm :: PasswordMap_
// the password map, represented internally with a Map
// from PasswordName to Password
= record
map :: [NamePasswordAssoc_]
z_npa :: NamePasswordAssoc_
= record
name :: PasswordName
password :: Password_
z_pw :: Password_
// passwords may be simple, or be a collection of 'sessions',
// one of which is selected
= record
name :: PasswordName
text :: PasswordText
sessions :: SessionMap_
isOneShot :: Bool
primed :: boolean
setup :: UTC
z_smp :: SessionMap_
// collections of sessions are represented internally as a Map
// from SessionName to PasswordText
= record
map :: [SessionPasswordAssoc_]
z_spa :: SessionPasswordAssoc_
= record
name :: SessionName
session :: Session
//
// Classic Schema Definitions
//
ssn :: Session
// a session just consists of a password and the stup time
= record
name :: SessionName
password :: PasswordText
isOneShot :: Bool
setup :: UTC
pwsc :: PasswordStoreComment
// a short comment on the PasswordStore
= basic string
pnm :: PasswordName
// used to identify a password in the store
= basic string
ptx :: PasswordText
// used to contain the secret text of a Password
= basic string
snm :: SessionName
// used to identify the different sessions in a session password
= basic string
changes
// Initial version
version "0.0.0.1"
|]
|
e41d66294fbc01ea8a68bf8f8f416d9150e469a2c0075214af01903442990f47 | dongcarl/guix | utils.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 , 2018 , 2019 , 2020 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2017 , 2019 , 2020 < >
Copyright © 2018 < >
Copyright © 2019 < >
Copyright © 2020 < 0x2b3bfa0+ >
Copyright © 2020 < >
Copyright © 2021 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix import utils)
#:use-module (guix base32)
#:use-module ((guix build download) #:prefix build:)
#:use-module ((gcrypt hash) #:hide (sha256))
#:use-module (guix http-client)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix utils)
#:use-module (guix packages)
#:use-module (guix discovery)
#:use-module (guix build-system)
#:use-module (guix gexp)
#:use-module (guix store)
#:use-module (guix download)
#:use-module (guix sets)
#:use-module (gnu packages)
#:use-module (ice-9 match)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 receive)
#:use-module (ice-9 regex)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-26)
#:use-module (srfi srfi-71)
#:export (factorize-uri
flatten
url-fetch
guix-hash-url
package-names->package-inputs
maybe-inputs
maybe-native-inputs
maybe-propagated-inputs
package->definition
spdx-string->license
license->symbol
snake-case
beautify-description
alist->package
read-lines
chunk-lines
guix-name
recursive-import))
(define (factorize-uri uri version)
"Factorize URI, a package tarball URI as a string, such that any occurrences
of the string VERSION is replaced by the symbol 'version."
(let ((version-rx (make-regexp (regexp-quote version))))
(match (regexp-exec version-rx uri)
(#f
uri)
(_
(let ((indices (fold-matches version-rx uri
'((0))
(lambda (m result)
(match result
(((start) rest ...)
`((,(match:end m))
(,start . ,(match:start m))
,@rest)))))))
(fold (lambda (index result)
(match index
((start)
(cons (substring uri start)
result))
((start . end)
(cons* (substring uri start end)
'version
result))))
'()
indices))))))
(define (flatten lst)
"Return a list that recursively concatenates all sub-lists of LST."
(fold-right
(match-lambda*
(((sub-list ...) memo)
(append (flatten sub-list) memo))
((elem memo)
(cons elem memo)))
'() lst))
(define (url-fetch url file-name)
"Save the contents of URL to FILE-NAME. Return #f on failure."
(parameterize ((current-output-port (current-error-port)))
(build:url-fetch url file-name)))
(define (guix-hash-url filename)
"Return the hash of FILENAME in nix-base32 format."
(bytevector->nix-base32-string (file-sha256 filename)))
(define (spdx-string->license str)
"Convert STR, a SPDX formatted license identifier, to a license object.
Return #f if STR does not match any known identifiers."
;; /
;; The psfl, gfl1.0, nmap, repoze
licenses does n't have SPDX identifiers
;;
;; Please update guix/licenses.scm when modifying
;; this list to avoid mismatches.
(match str
("AGPL-1.0" 'license:agpl1)
("AGPL-3.0" 'license:agpl3)
("Apache-1.1" 'license:asl1.1)
("Apache-2.0" 'license:asl2.0)
("BSL-1.0" 'license:boost1.0)
("0BSD" 'license:bsd-0)
("BSD-2-Clause-FreeBSD" 'license:bsd-2)
("BSD-3-Clause" 'license:bsd-3)
("BSD-4-Clause" 'license:bsd-4)
("CC0-1.0" 'license:cc0)
("CC-BY-2.0" 'license:cc-by2.0)
("CC-BY-3.0" 'license:cc-by3.0)
("CC-BY-SA-2.0" 'license:cc-by-sa2.0)
("CC-BY-SA-3.0" 'license:cc-by-sa3.0)
("CC-BY-SA-4.0" 'license:cc-by-sa4.0)
("CDDL-1.0" 'license:cddl1.0)
("CECILL-C" 'license:cecill-c)
("Artistic-2.0" 'license:artistic2.0)
("ClArtistic" 'license:clarified-artistic)
("CPL-1.0" 'license:cpl1.0)
("EPL-1.0" 'license:epl1.0)
("MIT" 'license:expat)
("FTL" 'license:freetype)
("GFDL-1.1" 'license:fdl1.1+)
("GFDL-1.2" 'license:fdl1.2+)
("GFDL-1.3" 'license:fdl1.3+)
("Giftware" 'license:giftware)
("GPL-1.0" 'license:gpl1)
("GPL-1.0+" 'license:gpl1+)
("GPL-2.0" 'license:gpl2)
("GPL-2.0+" 'license:gpl2+)
("GPL-3.0" 'license:gpl3)
("GPL-3.0+" 'license:gpl3+)
("ISC" 'license:isc)
("IJG" 'license:ijg)
("Imlib2" 'license:imlib2)
("IPA" 'license:ipa)
("IPL-1.0" 'license:ibmpl1.0)
("LAL-1.3" 'license:lal1.3)
("LGPL-2.0" 'license:lgpl2.0)
("LGPL-2.0+" 'license:lgpl2.0+)
("LGPL-2.1" 'license:lgpl2.1)
("LGPL-2.1+" 'license:lgpl2.1+)
("LGPL-3.0" 'license:lgpl3)
("LGPL-3.0+" 'license:lgpl3+)
("MPL-1.0" 'license:mpl1.0)
("MPL-1.1" 'license:mpl1.1)
("MPL-2.0" 'license:mpl2.0)
("MS-PL" 'license:ms-pl)
("NCSA" 'license:ncsa)
("OpenSSL" 'license:openssl)
("OLDAP-2.8" 'license:openldap2.8)
("CUA-OPL-1.0" 'license:cua-opl1.0)
("QPL-1.0" 'license:qpl)
("Ruby" 'license:ruby)
("SGI-B-2.0" 'license:sgifreeb2.0)
("OFL-1.1" 'license:silofl1.1)
("Sleepycat" 'license:sleepycat)
("TCL" 'license:tcl/tk)
("Unlicense" 'license:unlicense)
("Vim" 'license:vim)
("X11" 'license:x11)
("ZPL-2.1" 'license:zpl2.1)
("Zlib" 'license:zlib)
(_ #f)))
(define (license->symbol license)
"Convert license to a symbol representing the variable the object is bound
to in the (guix licenses) module, or #f if there is no such known license."
(define licenses
(module-map (lambda (sym var) `(,(variable-ref var) . ,sym))
(resolve-interface '(guix licenses) #:prefix 'license:)))
(assoc-ref licenses license))
(define (snake-case str)
"Return a downcased version of the string STR where underscores are replaced
with dashes."
(string-join (string-split (string-downcase str) #\_) "-"))
(define (beautify-description description)
"Improve the package DESCRIPTION by turning a beginning sentence fragment
into a proper sentence and by using two spaces between sentences."
(let ((cleaned (cond
((string-prefix? "A " description)
(string-append "This package provides a"
(substring description 1)))
((string-prefix? "Provides " description)
(string-append "This package provides"
(substring description
(string-length "Provides"))))
((string-prefix? "Functions " description)
(string-append "This package provides functions"
(substring description
(string-length "Functions"))))
(else description))))
;; Use double spacing between sentences
(regexp-substitute/global #f "\\. \\b"
cleaned 'pre ". " 'post)))
(define* (package-names->package-inputs names #:optional (output #f))
"Given a list of PACKAGE-NAMES or (PACKAGE-NAME VERSION) pairs, and an
optional OUTPUT, tries to generate a quoted list of inputs, as suitable to
use in an 'inputs' field of a package definition."
(define (make-input input version)
(cons* input (list 'unquote (string->symbol
(if version
(string-append input "-" version)
input)))
(or (and output (list output))
'())))
(map (match-lambda
((input version) (make-input input version))
(input (make-input input #f)))
names))
(define* (maybe-inputs package-names #:optional (output #f)
#:key (type #f))
"Given a list of PACKAGE-NAMES, tries to generate the 'inputs' field of a
package definition. TYPE can be used to specify the type of the inputs;
either the 'native or 'propagated symbols are accepted. Left unspecified, the
snippet generated is for regular inputs."
(let ((field-name (match type
('native 'native-inputs)
('propagated 'propagated-inputs)
(_ 'inputs))))
(match (package-names->package-inputs package-names output)
(()
'())
((package-inputs ...)
`((,field-name (,'quasiquote ,package-inputs)))))))
(define* (maybe-native-inputs package-names #:optional (output #f))
"Same as MAYBE-INPUTS, but for native inputs."
(maybe-inputs package-names output #:type 'native))
(define* (maybe-propagated-inputs package-names #:optional (output #f))
"Same as MAYBE-INPUTS, but for propagated inputs."
(maybe-inputs package-names output #:type 'propagated))
(define* (package->definition guix-package #:optional append-version?/string)
"If APPEND-VERSION?/STRING is #t, append the package's major+minor version.
If it is the symbol 'full, append the package's complete version. If
APPEND-VERSION?/string is a string, append this string."
(match guix-package
((or
('package ('name name) ('version version) . rest)
('let _ ('package ('name name) ('version version) . rest)))
`(define-public ,(string->symbol
(cond
((string? append-version?/string)
(string-append name "-" append-version?/string))
((eq? append-version?/string #t)
(string-append name "-" (version-major+minor version)))
((eq? 'full append-version?/string)
(string-append name "-" version))
(else name)))
,guix-package))))
(define (build-system-modules)
(all-modules (map (lambda (entry)
`(,entry . "guix/build-system"))
%load-path)))
(define (lookup-build-system-by-name name)
"Return a <build-system> value for the symbol NAME, representing the name of
the build system."
(fold-module-public-variables (lambda (obj result)
(if (and (build-system? obj)
(eq? name (build-system-name obj)))
obj result))
#f
(build-system-modules)))
(define (specs->package-lists specs)
"Convert each string in the SPECS list to a list of a package label and a
package value."
(map (lambda (spec)
(let-values (((pkg out) (specification->package+output spec)))
(match out
("out" (list (package-name pkg) pkg))
(_ (list (package-name pkg) pkg out)))))
specs))
(define (source-spec->object source)
"Generate an <origin> object from a SOURCE specification. The SOURCE can
either be a simple URL string, #F, or an alist containing entries for each of
the expected fields of an <origin> object."
(match source
((? string? source-url)
(let ((tarball (with-store store (download-to-store store source-url))))
(origin
(method url-fetch)
(uri source-url)
(sha256 (base32 (guix-hash-url tarball))))))
(#f #f)
(orig (let ((sha (match (assoc-ref orig "sha256")
((("base32" . value))
(base32 value))
(_ #f))))
(origin
(method (match (assoc-ref orig "method")
("url-fetch" (@ (guix download) url-fetch))
("git-fetch" (@ (guix git-download) git-fetch))
("svn-fetch" (@ (guix svn-download) svn-fetch))
("hg-fetch" (@ (guix hg-download) hg-fetch))
(_ #f)))
(uri (assoc-ref orig "uri"))
(sha256 sha))))))
(define* (alist->package meta #:optional (known-inputs '()))
"Return a package value generated from the alist META. If the list of
strings KNOWN-INPUTS is provided, do not treat the mentioned inputs as
specifications to look up and replace them with plain symbols instead."
(define (process-inputs which)
(let-values (((regular known)
(lset-diff+intersection
string=?
(vector->list (or (assoc-ref meta which) #()))
known-inputs)))
(append (specs->package-lists regular)
(map string->symbol known))))
(define (process-arguments arguments)
(append-map (match-lambda
((key . value)
(list (symbol->keyword (string->symbol key)) value)))
arguments))
(package
(name (assoc-ref meta "name"))
(version (assoc-ref meta "version"))
(source (source-spec->object (assoc-ref meta "source")))
(build-system
(lookup-build-system-by-name
(string->symbol (assoc-ref meta "build-system"))))
(arguments
(or (and=> (assoc-ref meta "arguments")
process-arguments)
'()))
(native-inputs (process-inputs "native-inputs"))
(inputs (process-inputs "inputs"))
(propagated-inputs (process-inputs "propagated-inputs"))
(home-page
(assoc-ref meta "home-page"))
(synopsis
(assoc-ref meta "synopsis"))
(description
(assoc-ref meta "description"))
(license
(match (assoc-ref meta "license")
(#f #f)
(l
(or (false-if-exception
(module-ref (resolve-interface '(guix licenses))
(string->symbol l)))
(false-if-exception
(module-ref (resolve-interface '(guix licenses) #:prefix 'license:)
(spdx-string->license l)))
(license:fsdg-compatible l)))))))
(define* (read-lines #:optional (port (current-input-port)))
"Read lines from PORT and return them as a list."
(let loop ((line (read-line port))
(lines '()))
(if (eof-object? line)
(reverse lines)
(loop (read-line port)
(cons line lines)))))
(define* (chunk-lines lines #:optional (pred string-null?))
"Return a list of chunks, each of which is a list of lines. The chunks are
separated by PRED."
(let loop ((rest lines)
(parts '()))
(receive (before after)
(break pred rest)
(let ((res (cons before parts)))
(if (null? after)
(reverse res)
(loop (cdr after) res))))))
(define (guix-name prefix name)
"Return a Guix package name for a given package name."
(string-append prefix (string-map (match-lambda
(#\_ #\-)
(#\. #\-)
(chr (char-downcase chr)))
name)))
(define (topological-sort nodes
node-dependencies
node-name)
"Perform a breadth-first traversal of the graph rooted at NODES, a list of
nodes, and return the list of nodes sorted in topological order. Call
NODE-DEPENDENCIES to obtain the dependencies of a node, and NODE-NAME to
obtain a node's uniquely identifying \"key\"."
(let loop ((nodes nodes)
(result '())
(visited (set)))
(match nodes
(()
result)
((head . tail)
(if (set-contains? visited (node-name head))
(loop tail result visited)
(let ((dependencies (node-dependencies head)))
(loop (append dependencies tail)
(cons head result)
(set-insert (node-name head) visited))))))))
(define* (recursive-import package-name
#:key repo->guix-package guix-name version repo
#:allow-other-keys)
"Return a list of package expressions for PACKAGE-NAME and all its
dependencies, sorted in topological order. For each package,
call (REPO->GUIX-PACKAGE NAME :KEYS version repo), which should return a
package expression and a list of dependencies; call (GUIX-NAME PACKAGE-NAME)
to obtain the Guix package name corresponding to the upstream name."
(define-record-type <node>
(make-node name version package dependencies)
node?
(name node-name)
(version node-version)
(package node-package)
(dependencies node-dependencies))
(define (exists? name version)
(not (null? (find-packages-by-name (guix-name name) version))))
(define (lookup-node name version)
(let* ((package dependencies (repo->guix-package name
#:version version
#:repo repo))
(normalized-deps (map (match-lambda
((name version) (list name version))
(name (list name #f))) dependencies)))
(make-node name version package normalized-deps)))
(map node-package
(topological-sort (list (lookup-node package-name version))
(lambda (node)
(map (lambda (name-version)
(apply lookup-node name-version))
(remove (lambda (name-version)
(apply exists? name-version))
(node-dependencies node))))
(lambda (node)
(string-append
(node-name node)
(or (node-version node) ""))))))
| null | https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/guix/import/utils.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
/
The psfl, gfl1.0, nmap, repoze
Please update guix/licenses.scm when modifying
this list to avoid mismatches.
Use double spacing between sentences
call (GUIX-NAME PACKAGE-NAME) | Copyright © 2012 , 2013 , 2018 , 2019 , 2020 < >
Copyright © 2016 < >
Copyright © 2016 < >
Copyright © 2017 , 2019 , 2020 < >
Copyright © 2018 < >
Copyright © 2019 < >
Copyright © 2020 < 0x2b3bfa0+ >
Copyright © 2020 < >
Copyright © 2021 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix import utils)
#:use-module (guix base32)
#:use-module ((guix build download) #:prefix build:)
#:use-module ((gcrypt hash) #:hide (sha256))
#:use-module (guix http-client)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix utils)
#:use-module (guix packages)
#:use-module (guix discovery)
#:use-module (guix build-system)
#:use-module (guix gexp)
#:use-module (guix store)
#:use-module (guix download)
#:use-module (guix sets)
#:use-module (gnu packages)
#:use-module (ice-9 match)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 receive)
#:use-module (ice-9 regex)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-26)
#:use-module (srfi srfi-71)
#:export (factorize-uri
flatten
url-fetch
guix-hash-url
package-names->package-inputs
maybe-inputs
maybe-native-inputs
maybe-propagated-inputs
package->definition
spdx-string->license
license->symbol
snake-case
beautify-description
alist->package
read-lines
chunk-lines
guix-name
recursive-import))
(define (factorize-uri uri version)
"Factorize URI, a package tarball URI as a string, such that any occurrences
of the string VERSION is replaced by the symbol 'version."
(let ((version-rx (make-regexp (regexp-quote version))))
(match (regexp-exec version-rx uri)
(#f
uri)
(_
(let ((indices (fold-matches version-rx uri
'((0))
(lambda (m result)
(match result
(((start) rest ...)
`((,(match:end m))
(,start . ,(match:start m))
,@rest)))))))
(fold (lambda (index result)
(match index
((start)
(cons (substring uri start)
result))
((start . end)
(cons* (substring uri start end)
'version
result))))
'()
indices))))))
(define (flatten lst)
"Return a list that recursively concatenates all sub-lists of LST."
(fold-right
(match-lambda*
(((sub-list ...) memo)
(append (flatten sub-list) memo))
((elem memo)
(cons elem memo)))
'() lst))
(define (url-fetch url file-name)
"Save the contents of URL to FILE-NAME. Return #f on failure."
(parameterize ((current-output-port (current-error-port)))
(build:url-fetch url file-name)))
(define (guix-hash-url filename)
"Return the hash of FILENAME in nix-base32 format."
(bytevector->nix-base32-string (file-sha256 filename)))
(define (spdx-string->license str)
"Convert STR, a SPDX formatted license identifier, to a license object.
Return #f if STR does not match any known identifiers."
licenses does n't have SPDX identifiers
(match str
("AGPL-1.0" 'license:agpl1)
("AGPL-3.0" 'license:agpl3)
("Apache-1.1" 'license:asl1.1)
("Apache-2.0" 'license:asl2.0)
("BSL-1.0" 'license:boost1.0)
("0BSD" 'license:bsd-0)
("BSD-2-Clause-FreeBSD" 'license:bsd-2)
("BSD-3-Clause" 'license:bsd-3)
("BSD-4-Clause" 'license:bsd-4)
("CC0-1.0" 'license:cc0)
("CC-BY-2.0" 'license:cc-by2.0)
("CC-BY-3.0" 'license:cc-by3.0)
("CC-BY-SA-2.0" 'license:cc-by-sa2.0)
("CC-BY-SA-3.0" 'license:cc-by-sa3.0)
("CC-BY-SA-4.0" 'license:cc-by-sa4.0)
("CDDL-1.0" 'license:cddl1.0)
("CECILL-C" 'license:cecill-c)
("Artistic-2.0" 'license:artistic2.0)
("ClArtistic" 'license:clarified-artistic)
("CPL-1.0" 'license:cpl1.0)
("EPL-1.0" 'license:epl1.0)
("MIT" 'license:expat)
("FTL" 'license:freetype)
("GFDL-1.1" 'license:fdl1.1+)
("GFDL-1.2" 'license:fdl1.2+)
("GFDL-1.3" 'license:fdl1.3+)
("Giftware" 'license:giftware)
("GPL-1.0" 'license:gpl1)
("GPL-1.0+" 'license:gpl1+)
("GPL-2.0" 'license:gpl2)
("GPL-2.0+" 'license:gpl2+)
("GPL-3.0" 'license:gpl3)
("GPL-3.0+" 'license:gpl3+)
("ISC" 'license:isc)
("IJG" 'license:ijg)
("Imlib2" 'license:imlib2)
("IPA" 'license:ipa)
("IPL-1.0" 'license:ibmpl1.0)
("LAL-1.3" 'license:lal1.3)
("LGPL-2.0" 'license:lgpl2.0)
("LGPL-2.0+" 'license:lgpl2.0+)
("LGPL-2.1" 'license:lgpl2.1)
("LGPL-2.1+" 'license:lgpl2.1+)
("LGPL-3.0" 'license:lgpl3)
("LGPL-3.0+" 'license:lgpl3+)
("MPL-1.0" 'license:mpl1.0)
("MPL-1.1" 'license:mpl1.1)
("MPL-2.0" 'license:mpl2.0)
("MS-PL" 'license:ms-pl)
("NCSA" 'license:ncsa)
("OpenSSL" 'license:openssl)
("OLDAP-2.8" 'license:openldap2.8)
("CUA-OPL-1.0" 'license:cua-opl1.0)
("QPL-1.0" 'license:qpl)
("Ruby" 'license:ruby)
("SGI-B-2.0" 'license:sgifreeb2.0)
("OFL-1.1" 'license:silofl1.1)
("Sleepycat" 'license:sleepycat)
("TCL" 'license:tcl/tk)
("Unlicense" 'license:unlicense)
("Vim" 'license:vim)
("X11" 'license:x11)
("ZPL-2.1" 'license:zpl2.1)
("Zlib" 'license:zlib)
(_ #f)))
(define (license->symbol license)
"Convert license to a symbol representing the variable the object is bound
to in the (guix licenses) module, or #f if there is no such known license."
(define licenses
(module-map (lambda (sym var) `(,(variable-ref var) . ,sym))
(resolve-interface '(guix licenses) #:prefix 'license:)))
(assoc-ref licenses license))
(define (snake-case str)
"Return a downcased version of the string STR where underscores are replaced
with dashes."
(string-join (string-split (string-downcase str) #\_) "-"))
(define (beautify-description description)
"Improve the package DESCRIPTION by turning a beginning sentence fragment
into a proper sentence and by using two spaces between sentences."
(let ((cleaned (cond
((string-prefix? "A " description)
(string-append "This package provides a"
(substring description 1)))
((string-prefix? "Provides " description)
(string-append "This package provides"
(substring description
(string-length "Provides"))))
((string-prefix? "Functions " description)
(string-append "This package provides functions"
(substring description
(string-length "Functions"))))
(else description))))
(regexp-substitute/global #f "\\. \\b"
cleaned 'pre ". " 'post)))
(define* (package-names->package-inputs names #:optional (output #f))
"Given a list of PACKAGE-NAMES or (PACKAGE-NAME VERSION) pairs, and an
optional OUTPUT, tries to generate a quoted list of inputs, as suitable to
use in an 'inputs' field of a package definition."
(define (make-input input version)
(cons* input (list 'unquote (string->symbol
(if version
(string-append input "-" version)
input)))
(or (and output (list output))
'())))
(map (match-lambda
((input version) (make-input input version))
(input (make-input input #f)))
names))
(define* (maybe-inputs package-names #:optional (output #f)
#:key (type #f))
"Given a list of PACKAGE-NAMES, tries to generate the 'inputs' field of a
either the 'native or 'propagated symbols are accepted. Left unspecified, the
snippet generated is for regular inputs."
(let ((field-name (match type
('native 'native-inputs)
('propagated 'propagated-inputs)
(_ 'inputs))))
(match (package-names->package-inputs package-names output)
(()
'())
((package-inputs ...)
`((,field-name (,'quasiquote ,package-inputs)))))))
(define* (maybe-native-inputs package-names #:optional (output #f))
"Same as MAYBE-INPUTS, but for native inputs."
(maybe-inputs package-names output #:type 'native))
(define* (maybe-propagated-inputs package-names #:optional (output #f))
"Same as MAYBE-INPUTS, but for propagated inputs."
(maybe-inputs package-names output #:type 'propagated))
(define* (package->definition guix-package #:optional append-version?/string)
"If APPEND-VERSION?/STRING is #t, append the package's major+minor version.
If it is the symbol 'full, append the package's complete version. If
APPEND-VERSION?/string is a string, append this string."
(match guix-package
((or
('package ('name name) ('version version) . rest)
('let _ ('package ('name name) ('version version) . rest)))
`(define-public ,(string->symbol
(cond
((string? append-version?/string)
(string-append name "-" append-version?/string))
((eq? append-version?/string #t)
(string-append name "-" (version-major+minor version)))
((eq? 'full append-version?/string)
(string-append name "-" version))
(else name)))
,guix-package))))
(define (build-system-modules)
(all-modules (map (lambda (entry)
`(,entry . "guix/build-system"))
%load-path)))
(define (lookup-build-system-by-name name)
"Return a <build-system> value for the symbol NAME, representing the name of
the build system."
(fold-module-public-variables (lambda (obj result)
(if (and (build-system? obj)
(eq? name (build-system-name obj)))
obj result))
#f
(build-system-modules)))
(define (specs->package-lists specs)
"Convert each string in the SPECS list to a list of a package label and a
package value."
(map (lambda (spec)
(let-values (((pkg out) (specification->package+output spec)))
(match out
("out" (list (package-name pkg) pkg))
(_ (list (package-name pkg) pkg out)))))
specs))
(define (source-spec->object source)
"Generate an <origin> object from a SOURCE specification. The SOURCE can
either be a simple URL string, #F, or an alist containing entries for each of
the expected fields of an <origin> object."
(match source
((? string? source-url)
(let ((tarball (with-store store (download-to-store store source-url))))
(origin
(method url-fetch)
(uri source-url)
(sha256 (base32 (guix-hash-url tarball))))))
(#f #f)
(orig (let ((sha (match (assoc-ref orig "sha256")
((("base32" . value))
(base32 value))
(_ #f))))
(origin
(method (match (assoc-ref orig "method")
("url-fetch" (@ (guix download) url-fetch))
("git-fetch" (@ (guix git-download) git-fetch))
("svn-fetch" (@ (guix svn-download) svn-fetch))
("hg-fetch" (@ (guix hg-download) hg-fetch))
(_ #f)))
(uri (assoc-ref orig "uri"))
(sha256 sha))))))
(define* (alist->package meta #:optional (known-inputs '()))
"Return a package value generated from the alist META. If the list of
strings KNOWN-INPUTS is provided, do not treat the mentioned inputs as
specifications to look up and replace them with plain symbols instead."
(define (process-inputs which)
(let-values (((regular known)
(lset-diff+intersection
string=?
(vector->list (or (assoc-ref meta which) #()))
known-inputs)))
(append (specs->package-lists regular)
(map string->symbol known))))
(define (process-arguments arguments)
(append-map (match-lambda
((key . value)
(list (symbol->keyword (string->symbol key)) value)))
arguments))
(package
(name (assoc-ref meta "name"))
(version (assoc-ref meta "version"))
(source (source-spec->object (assoc-ref meta "source")))
(build-system
(lookup-build-system-by-name
(string->symbol (assoc-ref meta "build-system"))))
(arguments
(or (and=> (assoc-ref meta "arguments")
process-arguments)
'()))
(native-inputs (process-inputs "native-inputs"))
(inputs (process-inputs "inputs"))
(propagated-inputs (process-inputs "propagated-inputs"))
(home-page
(assoc-ref meta "home-page"))
(synopsis
(assoc-ref meta "synopsis"))
(description
(assoc-ref meta "description"))
(license
(match (assoc-ref meta "license")
(#f #f)
(l
(or (false-if-exception
(module-ref (resolve-interface '(guix licenses))
(string->symbol l)))
(false-if-exception
(module-ref (resolve-interface '(guix licenses) #:prefix 'license:)
(spdx-string->license l)))
(license:fsdg-compatible l)))))))
(define* (read-lines #:optional (port (current-input-port)))
"Read lines from PORT and return them as a list."
(let loop ((line (read-line port))
(lines '()))
(if (eof-object? line)
(reverse lines)
(loop (read-line port)
(cons line lines)))))
(define* (chunk-lines lines #:optional (pred string-null?))
"Return a list of chunks, each of which is a list of lines. The chunks are
separated by PRED."
(let loop ((rest lines)
(parts '()))
(receive (before after)
(break pred rest)
(let ((res (cons before parts)))
(if (null? after)
(reverse res)
(loop (cdr after) res))))))
(define (guix-name prefix name)
"Return a Guix package name for a given package name."
(string-append prefix (string-map (match-lambda
(#\_ #\-)
(#\. #\-)
(chr (char-downcase chr)))
name)))
(define (topological-sort nodes
node-dependencies
node-name)
"Perform a breadth-first traversal of the graph rooted at NODES, a list of
nodes, and return the list of nodes sorted in topological order. Call
NODE-DEPENDENCIES to obtain the dependencies of a node, and NODE-NAME to
obtain a node's uniquely identifying \"key\"."
(let loop ((nodes nodes)
(result '())
(visited (set)))
(match nodes
(()
result)
((head . tail)
(if (set-contains? visited (node-name head))
(loop tail result visited)
(let ((dependencies (node-dependencies head)))
(loop (append dependencies tail)
(cons head result)
(set-insert (node-name head) visited))))))))
(define* (recursive-import package-name
#:key repo->guix-package guix-name version repo
#:allow-other-keys)
"Return a list of package expressions for PACKAGE-NAME and all its
dependencies, sorted in topological order. For each package,
call (REPO->GUIX-PACKAGE NAME :KEYS version repo), which should return a
to obtain the Guix package name corresponding to the upstream name."
(define-record-type <node>
(make-node name version package dependencies)
node?
(name node-name)
(version node-version)
(package node-package)
(dependencies node-dependencies))
(define (exists? name version)
(not (null? (find-packages-by-name (guix-name name) version))))
(define (lookup-node name version)
(let* ((package dependencies (repo->guix-package name
#:version version
#:repo repo))
(normalized-deps (map (match-lambda
((name version) (list name version))
(name (list name #f))) dependencies)))
(make-node name version package normalized-deps)))
(map node-package
(topological-sort (list (lookup-node package-name version))
(lambda (node)
(map (lambda (name-version)
(apply lookup-node name-version))
(remove (lambda (name-version)
(apply exists? name-version))
(node-dependencies node))))
(lambda (node)
(string-append
(node-name node)
(or (node-version node) ""))))))
|
19b5651c2f41fc55f4e73406e2e88ca4e787f3e3b5140f4de923eb827f8a3125 | BillHallahan/G2 | Test.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Tasty
import Test.Tasty.HUnit ( testCase, assertBool, assertFailure )
import Test.Tasty.Options
import Test.Tasty.Runners
import G2.Config
import G2.Interface
import G2.Language as G2
import Control.Exception
import Data.Maybe
import Data.Proxy
import Data.Tagged
import qualified Data.Text as T
import System.Environment
import System.FilePath
import Type.Reflection (Typeable)
import PeanoTest
import HigherOrderMathTest
import GetNthTest
import DefuncTest
import CaseTest
import Expr
import Simplifications
import Typing
import UnionFindTests
import UFMapTests
import RewriteVerify.RewriteVerifyTest
import G2.Translation
import InputOutputTest
import Reqs
import TestUtils
import qualified Data.Map.Lazy as M
-- Run with no arguments for default test cases.
-- All default test cases should pass.
-- Run with flag '--test-options="todo yes"' to run test cases corresponding to to-be-fixed bugs.
main :: IO ()
main = do
as <- getArgs
let todo = "--todo" `elem` as
defaultMainWithIngredients
(defaultIngredients ++
[TestReporter
[ Option (Proxy :: Proxy ToDo) ]
(\_ _ -> Just (\_ -> return (\_ -> return False)))
])
(if todo then todoTests else tests)
tests :: TestTree
tests = testGroup "Tests"
[ sampleTests
, testFileTests
, baseTests
, primTests
, exprTests
, typingTests
, simplificationTests
, ufMapQuickcheck
, unionFindQuickcheck
, rewriteTests
]
timeout :: Timeout
timeout = mkTimeout 1
-- Test based on examples that are also good for demos
sampleTests :: TestTree
sampleTests = testGroup "Samples"
[
checkExprAssert "tests/Samples/Peano.hs" 900 (Just "equalsFour") "add"
[RForAll $ not . peano_4_out, AtLeast 10]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 900 (Just "fstIsEvenAddToFour") (Just "fstIsTwo") "add"
[RExists peano_0_4, RExists peano_4_0, Exactly 2]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 1200 (Just "multiplyToFour") (Just "equalsFour") "add"
[RExists peano_1_4_5, RExists peano_4_1_5, Exactly 2]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 750 (Just "eqEachOtherAndAddTo4") Nothing "add"
[RForAll peano_2_2, Exactly 1]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 600 (Just "equalsFour") Nothing "add"
[ RExists peano_0_4
, RExists peano_1_3
, RExists peano_2_2
, RExists peano_3_1
, RExists peano_4_0
, Exactly 5]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 750 (Just "equalsFour") Nothing "multiply"
[ RExists peano_1_4
, RExists peano_2_2
, RExists peano_4_1
, Exactly 3]
, checkExprAssume "tests/Samples/HigherOrderMath.hs" 800 (Just "isTrue0") "notNegativeAt0NegativeAt1"
[RExists negativeSquareRes, AtLeast 1]
, checkExprAssume "tests/Samples/HigherOrderMath.hs" 600 (Just "isTrue1") "fixed"
[ RExists abs2NonNeg
, RExists squareRes
, RExists fourthPowerRes
, RForAll allabs2NonNeg
, AtLeast 4]
, checkExpr "tests/Samples/HigherOrderMath.hs" 600 "fixed" [ RExists abs2NonNeg
, RExists squareRes
, RExists fourthPowerRes
, AtLeast 4]
, checkExprAssumeAssert "tests/Samples/HigherOrderMath.hs" 600 (Just "isTrue2") Nothing "sameFloatArgLarger"
[ RExists addRes
, RExists subRes
, AtLeast 2]
, checkExpr "tests/Samples/HigherOrderMath.hs" 600 "functionSatisfies" [RExists functionSatisfiesRes, AtLeast 1]
, checkExpr "tests/Samples/HigherOrderMath.hs" 1000 "approxSqrt" [AtLeast 2]
The below test fails because Z3 returns unknown .
, checkExprAssume " tests / Samples / HigherOrderMath.hs " 1200 ( Just " isTrue2 " ) " sameFloatArgLarger " 2
-- [ RExists approxSqrtRes
-- , RExists pythagoreanRes
, AtLeast 2 ]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "lessThan91") Nothing "mccarthy"
[ RForAll (\[App _ (Lit (LitInt x)), _] -> x <= 100)
, AtLeast 1]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 400 (Just "greaterThan10Less") Nothing "mccarthy"
[ RForAll (\[App _ (Lit (LitInt x)), _] -> x > 100)
, AtLeast 1]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "lessThanNot91") Nothing "mccarthy" [Exactly 0]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "greaterThanNot10Less") Nothing "mccarthy"
[Exactly 0]
, checkInputOutput "tests/Samples/GetNth.hs" "getNth" 600 [AtLeast 10]
, checkInputOutputs "tests/Samples/GetNthPoly.hs" [ ("getNthInt", 600, [AtLeast 10])
, ("getNthX", 600, [AtLeast 10])
, ("getNthPeano", 600, [AtLeast 10])
, ("getNthCListInt", 600, [AtLeast 10])
, ("getNthCListX", 600, [AtLeast 10])
, ("getNth", 1000, [AtLeast 10])
, ("cfmapInt", 1000, [AtLeast 10])
, ("cfmapIntX", 1600, [AtLeast 10])
, ("cfmapIntCListInt", 600, [AtLeast 2]) ]
, checkExprReaches "tests/Samples/GetNthErr.hs" 800 Nothing Nothing (Just "error") "getNth"
[AtLeast 8, RForAll errors]
, checkInputOutputs "tests/Samples/FoldlUses.hs" [ ("sum_foldl", 1600, [AtLeast 3])
, ("dotProd", 1000, [AtLeast 3]) ]
, checkInputOutputs "tests/Samples/FoldlUsesPoly.hs" [ ("sumMinAndMax", 600, [AtLeast 10])
, ("maxes", 400, [AtLeast 10])
, ("switchInt", 400, [AtLeast 1])
, ("getInInt", 400, [AtLeast 1])
, ("switchP", 400, [AtLeast 1]) ]
]
-- Tests that are intended to ensure a specific feature works, but that are not neccessarily interesting beyond that
testFileTests :: TestTree
testFileTests = testGroup "TestFiles"
[
checkExpr "tests/TestFiles/IfTest.hs" 400 "f"
[ RForAll (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y)), App _ (Lit (LitInt r))] ->
if x == y then r == x + y else r == y)
, AtLeast 2]
, checkExprAssert "tests/TestFiles/AssumeAssert.hs" 400 (Just "assertGt5") "outShouldBeGt5" [Exactly 0]
, checkExprAssert "tests/TestFiles/AssumeAssert.hs" 400 (Just "assertGt5") "outShouldBeGe5" [AtLeast 1]
, checkExprAssumeAssert "tests/TestFiles/AssumeAssert.hs" 400
(Just "assumeGt5") (Just "assertGt5") "outShouldBeGt5" [Exactly 0]
, checkExprAssumeAssert "tests/TestFiles/AssumeAssert.hs" 400
(Just "assumeGt5") (Just "assertGt5") "outShouldBeGe5" [Exactly 0]
, checkExpr "tests/TestFiles/CheckSq.hs" 400 "checkSq"
[AtLeast 2, RExists (\[x, _] -> isInt x (\x' -> x' == 3 || x' == -3))]
, checkExpr "tests/TestFiles/Defunc1.hs" 400 "f"
[RExists defunc1Add1, RExists defunc1Multiply2, RExists defuncB, AtLeast 3]
, checkInputOutputs "tests/TestFiles/Defunc1.hs" [ ("x", 400, [AtLeast 1])
, ("mapYInt", 600, [AtLeast 1])
, ("makeMoney", 600, [AtLeast 2])
, ("compZZ", 1600, [AtLeast 2])
, ("compZZ2", 1600, [AtLeast 2]) ]
, checkInputOutput "tests/TestFiles/Defunc2.hs" "funcMap" 400 [AtLeast 30]
, checkExpr "tests/TestFiles/MultCase.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), y] -> x == 2 && getBoolB y id)
, RExists (\[App _ (Lit (LitInt x)), y] -> x == 1 && getBoolB y id)
, RExists (\[App _ (Lit (LitInt x)), y] -> x /= 2 && x /= 1 && getBoolB y not)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating.hs" 400 (Just "output6") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 6)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating2.hs" 400 (Just "output16") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 15)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating3.hs" 600 (Just "output32") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 4)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating4.hs" 400 (Just "output12") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 11)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating5.hs" 400 (Just "output19") Nothing "f"
[AtLeast 1, RForAll (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y)), _] -> x + y + 1 == 19)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating6.hs" 400 (Just "output32") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 25)]
, checkExpr "tests/TestFiles/TypeClass/TypeClass1.hs" 400 "f" [RExists (\[x, y] -> x == y), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass2.hs" 400 "f" [RExists (\[x, y] -> x == y), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass3.hs" 400 "f"
[RExists (\[x, y] -> getIntB x $ \x' -> getIntB y $ \y' -> x' + 8 == y'), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass4.hs" 1000 "f" [AtLeast 1]
, checkExprAssumeAssert "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 (Just "largeJ") Nothing "extractJ"
[RForAll (\[x, ly@(App _ (Lit (LitInt y)))] -> appNthArgIs x (ly ==) 2 && y > 100), Exactly 1]
, checkExprAssumeAssert "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 (Just "largeE") Nothing "extractE"
[RForAll (\[x, ly@(App _ (Lit (LitInt y)))] -> appNthArgIs x (ly ==) 4 && y > 100), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 "changeJ"
[RForAll (\[_, x, y] -> dcInAppHasName "J" x 2 && (dcInAppHasName "J" y 2 || isError y)), AtLeast 2]
, checkExpr "tests/TestFiles/Case1.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), y] -> x < 0 && dcHasName "A" y)
, RExists (\[App _ (Lit (LitInt x)), y] -> x >= 0 && dcHasName "C" y), Exactly 2]
, checkExpr "tests/TestFiles/Case2.hs" 400 "f"
[ RExists exists1
, RExists exists2
, RExists exists3
, RExists exists4
, AtLeast 4]
, checkExprAssumeAssert "tests/TestFiles/Guards.hs" 400 (Just "g") Nothing "f"
[AtLeast 1, RExists (\[dc, _] -> getBoolB dc id)]
, checkExprAssumeAssert "tests/TestFiles/Infinite.hs" 400 (Just "g") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x <= 100 && x /= 80)]
, checkExpr "tests/TestFiles/Strictness1.hs" 400 "f"
[AtLeast 1, RExists (\[(App x (App _ (Lit (LitInt y))))] -> dcHasName "A" x && y == 9)]
, checkExpr "tests/TestFiles/Where1.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y))] -> x == 4 && y == 1)
, RExists (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y))] -> x /= 4 && y == 1) ]
, checkInputOutputs "tests/TestFiles/Error/Error1.hs" [ ("f", 400, [AtLeast 1])
, ("g", 400, [AtLeast 1])
, ("f", 400, [AtLeast 1])
, ("f", 400, [AtLeast 1])
, ("g", 400, [AtLeast 1]) ]
, checkInputOutputs "tests/TestFiles/Error/Undefined1.hs" [ ("undefined1", 400, [AtLeast 1])
, ("undefined2", 400, [AtLeast 1])]
, checkInputOutput "tests/TestFiles/Error/IrrefutError.hs" "f" 400 [AtLeast 2]
, checkInputOutputs "tests/TestFiles/BadNames1.hs" [ ("abs'", 400, [Exactly 2])
, ("xswitch", 400, [AtLeast 10]) ]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "f"
[Exactly 2, RExists (\[x, _, y] -> x == y), RExists (\[_, App _ x, y] -> x == y)]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "getFstXIntInt"
[AtLeast 2, RExists (\[x, y] -> isApp x && isError y)]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "sum" [AtLeast 3, RExists (\[x, y] -> isApp x && isError y)]
, checkExprAssumeAssert "tests/TestFiles/MultiSplit.hs" 1000 (Just "equals1") Nothing "f" [Exactly 0]
, checkExpr "tests/TestFiles/MatchesFunc1.hs" 400 "f"
[RExists (\[x, y] -> getIntB x $ \x' -> getIntB y $ \y' -> y' == 6 + x'), AtLeast 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "f"
[ RExists
(\[x, y] -> appNthArgIs x notCast 0
&& appNthArgIs x (\x' -> getIntB x' $ \x'' -> getIntB y $ \y' -> x'' + 1 == y') 1)
, Exactly 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "fCall" [RExists (\[x] -> isInt x ((==) 35)), Exactly 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "g"
[ RExists (\[x, y] -> appNthArgIs x (dcHasName "A") 2 && appNthArgIs y (dcHasName "B") 2)
, RExists (\[x, y] -> appNthArgIs x (dcHasName "B") 2 && appNthArgIs y (dcHasName "C") 2)
, RExists (\[x, y] -> appNthArgIs x (dcHasName "C") 2 && appNthArgIs y (dcHasName "A") 2)
, Exactly 3]
, checkInputOutputs "tests/TestFiles/Deriving/DerivingSimple.hs" [ ("eq", 400, [AtLeast 2])
, ("lt", 400, [AtLeast 2]) ]
, checkInputOutputs "tests/TestFiles/Deriving/DerivingComp.hs" [ ("eq", 800, [AtLeast 2])
, ("lt", 800, [AtLeast 2]) ]
, checkInputOutputs "tests/TestFiles/Coercions/Age.hs" [ ("born", 400, [Exactly 1])
, ("yearPasses", 400, [AtLeast 1])
, ("age", 400, [AtLeast 1])
, ("diffAge", 400, [AtLeast 1])
, ("yearBefore", 400, [AtLeast 5])]
, checkInputOutputs "tests/TestFiles/Coercions/NewType1.hs" [ ("add1N4", 400, [Exactly 1])
, ("f", 400, [Exactly 1])
, ("g", 400, [Exactly 1])
, ("mapWInt", 400, [AtLeast 2])
, ("appLeftFloat", 400, [AtLeast 2])
, ("getLIntFloat", 400, [AtLeast 2])
, ("getRIntFloat", 400, [AtLeast 2])
, ("getCIntFloatDouble", 400, [AtLeast 2])
, ("getRIntFloatX'", 400, [AtLeast 2])]
, checkInputOutput "tests/TestFiles/Coercions/BadCoerce.hs" "f" 400 [AtLeast 1]
, checkInputOutput "tests/TestFiles/Expr.hs" "leadingLams" 400 [AtLeast 5]
, checkExprAssume "tests/TestFiles/Subseq.hs" 1200 (Just "assume") "subseqTest" [AtLeast 1]
, checkInputOutputs "tests/TestFiles/Strings/Strings1.hs" [ ("con", 300, [AtLeast 10])
, ("eq", 700, [AtLeast 10])
, ("eqGt1", 700, [AtLeast 10])
, ("capABC", 150, [AtLeast 10])
, ("appendEq", 500, [AtLeast 5]) ]
, checkExpr "tests/TestFiles/Strings/Strings1.hs" 1000 "exclaimEq"
[AtLeast 5, RExists (\[_, _, r] -> dcHasName "True" r)]
, checkExpr "tests/TestFiles/Sets/SetInsert.hs" 700 "prop" [AtLeast 3]
, checkInputOutputs "tests/TestFiles/BadDC.hs" [ ("f", 400, [AtLeast 5])
, ("g", 400, [AtLeast 3]) ]
-- , checkInputOutput "tests/TestFiles/BadBool.hs" "BadBool" "f" 1400 [AtLeast 1]
, " tests / TestFiles / Coercions / GADT.hs " 400 Nothing Nothing " g " 2
-- [ AtLeast 2
, RExists ( \[x , y ] - > x = = Lit ( LitInt 0 ) & & y = = App ( Data ( PrimCon I ) ) ( Lit ( LitInt 0 ) ) )
, RExists ( \[x , _ ] - > x /= Lit ( LitInt 0 ) ) ]
, " tests / TestFiles / HigherOrderList.hs " 400 Nothing Nothing " g " [ AtLeast 10 ]
]
baseTests :: TestTree
baseTests = testGroup "Base"
[
checkInputOutput "tests/Samples/Peano.hs" "add" 400 [AtLeast 4]
, checkInputOutputs "tests/BaseTests/ListTests.hs" [ ("test", 1000, [AtLeast 1])
, ("maxMap", 1000, [AtLeast 4])
, ("minTest", 1000, [AtLeast 2])
, ("foldrTest2", 1000, [AtLeast 1]) ]
, checkInputOutput "tests/BaseTests/Tuples.hs" "addTupleElems" 1000 [AtLeast 2]
, checkInputOutputs "tests/BaseTests/MaybeTest.hs" [ ("headMaybeInt", 1000, [AtLeast 2])
, ("sumN", 1000, [AtLeast 6])
, ("lengthN", 1000, [AtLeast 6]) ]
, checkInputOutput "tests/BaseTests/Other.hs" "check4VeryEasy2" 600 [AtLeast 1]
]
primTests :: TestTree
primTests = testGroup "Prims"
[
checkInputOutputs "tests/Prim/Prim2.hs" [ ("quotI1", 1000, [AtLeast 4])
, ("quotI2", 1000, [AtLeast 4])
, ("remI1", 1000, [AtLeast 4])
, ("remI2", 1000, [AtLeast 3])
, ("remI3", 1000, [AtLeast 1])
, ("remI4", 1000, [AtLeast 1])
, ("p1List", 300000, [AtLeast 1])
, ("p2List", 700000, [AtLeast 1])
, ("integerToFloatList", 150000, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/Prim3.hs" [ ("int2FloatTest", 1000, [AtLeast 1])
, ("int2DoubleTest", 1000, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/Prim4.hs" [ ("divIntTest", 1500, [AtLeast 4])
, ("divIntegerTest", 1500, [AtLeast 1])
, ("divIntegerTest2", 1500, [AtLeast 4])
, ("divFloatTest", 1500, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/DataTag.hs" [ ("dataToTag1", 1000, [Exactly 1])
, ("dataToTag2", 1000, [AtLeast 1])
, ("dataToTag3", 1000, [Exactly 5])
, ("tagToEnum1", 1000, [AtLeast 1])
, ("tagToEnum3", 1000, [AtLeast 4])
, ("tagToEnum4", 1000, [AtLeast 4])
, ("tagToEnum5", 1000, [Exactly 1])
, ("tagToEnum6", 1000, [AtLeast 4]) ]
, checkExpr "tests/Prim/DataTag.hs" 1000 "tagToEnum2" [Exactly 1, RForAll (\[r] -> isError r)]
, checkInputOutputs "tests/Prim/Chr.hs" [ ("lowerLetters", 9000, [AtLeast 1])
, ("allLetters", 9000, [AtLeast 1])
, ("printBasedOnChr", 1500, [AtLeast 7])
, ("printBasedOnOrd", 1500, [AtLeast 7]) ]
]
-- To Do Tests
--------------
todoTests :: TestTree
todoTests = testGroup "To Do"
[
checkExpr "tests/TestFiles/TypeClass/TypeClass5.hs" 800 "run" [AtLeast 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass5.hs" 800 "run2" [AtLeast 0]
, checkInputOutput "tests/Prim/Prim2.hs" "sqrtList" 10000 [AtLeast 1]
, checkInputOutputs "tests/BaseTests/MaybeTest.hs" [ ("average", 2000, [AtLeast 6])
, ("averageF", 2000, [AtLeast 6])
, ("maybeAvg", 200, [AtLeast 6])
]
, checkInputOutputs "tests/Prim/Prim3.hs" [ ("float2IntTest", 1000, [AtLeast 1])
, ("double2IntTest", 1000, [AtLeast 1])]
]
data ToDo = RunMain
| RunToDo
deriving (Eq, Typeable)
instance IsOption ToDo where
defaultValue = RunMain
parseValue s =
let
ws = words s
in
if "y" `elem` ws || "yes" `elem` ws then Just RunToDo else Nothing
optionName = Tagged "todo"
optionHelp = Tagged "Specifies whether to run the main, passing tests, or the todo tests."
Generic helpers for tests
----------------------------
checkExpr :: String -> Int -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExpr src stps entry reqList =
checkExprReaches src stps Nothing Nothing Nothing entry reqList
checkExprAssume :: String -> Int -> Maybe String -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExprAssume src stps m_assume entry reqList =
checkExprReaches src stps m_assume Nothing Nothing entry reqList
checkExprAssert :: String -> Int -> Maybe String -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExprAssert src stps m_assert entry reqList =
checkExprReaches src stps Nothing m_assert Nothing entry reqList
checkExprAssumeAssert :: String
-> Int
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprAssumeAssert src stps m_assume m_assert entry reqList =
checkExprReaches src stps m_assume m_assert Nothing entry reqList
checkExprReaches :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprReaches src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestIO
return $ config {steps = stps})
checkExprWithMap :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprWithMap src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestWithMapIO
return $ config {steps = stps})
checkExprWithSet :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprWithSet src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestWithSetIO
return $ config {steps = stps})
checkExprWithConfig :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> IO Config
-> TestTree
checkExprWithConfig src m_assume m_assert m_reaches entry reqList config_f = do
testCase src (do
config <- config_f
res <- testFile src m_assume m_assert m_reaches entry config
let ch = case res of
Left _ -> False
Right exprs -> null $ checkExprGen (map (\(inp, out) -> inp ++ [out]) exprs) reqList
assertBool ("Assume/Assert for file " ++ src
++ " with functions [" ++ (fromMaybe "" m_assume) ++ "] "
++ "[" ++ (fromMaybe "" m_assert) ++ "] "
++ entry ++ " failed.\n")
ch
)
-- return . testCase src
$ assertBool ( " Assume / Assert for file " + + src + +
" with functions [ " + + ( fromMaybe " " m_assume ) + + " ] " + +
" [ " + + ( fromMaybe " " m_assert ) + + " ] " + +
entry + + " failed.\n " ) ch
testFile :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> Config
-> IO (Either SomeException [([Expr], Expr)])
testFile src m_assume m_assert m_reaches entry config =
try (testFileWithConfig src m_assume m_assert m_reaches entry config)
testFileWithConfig :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> Config
-> IO [([Expr], Expr)]
testFileWithConfig src m_assume m_assert m_reaches entry config = do
let proj = takeDirectory src
r <- doTimeout (timeLimit config)
$ runG2FromFile
[proj]
[src]
(fmap T.pack m_assume)
(fmap T.pack m_assert)
(fmap T.pack m_reaches)
(isJust m_assert || isJust m_reaches)
(T.pack entry)
simplTranslationConfig
config
let (states, _) = maybe (error "Timeout") fst r
return $ map (\(ExecRes { conc_args = i, conc_out = o}) -> (i, o)) states
-- For mergeState unit tests
checkFn :: Either String Bool -> String -> IO TestTree
checkFn f testName = do
let res = f
case res of
Left e -> return . testCase testName $ assertFailure e
Right _ -> return . testCase testName $ return ()
checkFnIO :: IO (Either String Bool) -> String -> IO TestTree
checkFnIO f testName = do
res <- f
case res of
Left e -> return . testCase testName $ assertFailure e
Right _ -> return . testCase testName $ return ()
errors :: [Expr] -> Bool
errors e =
case last e of
Prim Error _ -> True
_ -> False
| null | https://raw.githubusercontent.com/BillHallahan/G2/43e7a9f1e8f5131d91e28b54ce669e5e6782412b/tests/Test.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
Run with no arguments for default test cases.
All default test cases should pass.
Run with flag '--test-options="todo yes"' to run test cases corresponding to to-be-fixed bugs.
Test based on examples that are also good for demos
[ RExists approxSqrtRes
, RExists pythagoreanRes
Tests that are intended to ensure a specific feature works, but that are not neccessarily interesting beyond that
, checkInputOutput "tests/TestFiles/BadBool.hs" "BadBool" "f" 1400 [AtLeast 1]
[ AtLeast 2
To Do Tests
------------
--------------------------
return . testCase src
For mergeState unit tests | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
module Main where
import Test.Tasty
import Test.Tasty.HUnit ( testCase, assertBool, assertFailure )
import Test.Tasty.Options
import Test.Tasty.Runners
import G2.Config
import G2.Interface
import G2.Language as G2
import Control.Exception
import Data.Maybe
import Data.Proxy
import Data.Tagged
import qualified Data.Text as T
import System.Environment
import System.FilePath
import Type.Reflection (Typeable)
import PeanoTest
import HigherOrderMathTest
import GetNthTest
import DefuncTest
import CaseTest
import Expr
import Simplifications
import Typing
import UnionFindTests
import UFMapTests
import RewriteVerify.RewriteVerifyTest
import G2.Translation
import InputOutputTest
import Reqs
import TestUtils
import qualified Data.Map.Lazy as M
main :: IO ()
main = do
as <- getArgs
let todo = "--todo" `elem` as
defaultMainWithIngredients
(defaultIngredients ++
[TestReporter
[ Option (Proxy :: Proxy ToDo) ]
(\_ _ -> Just (\_ -> return (\_ -> return False)))
])
(if todo then todoTests else tests)
tests :: TestTree
tests = testGroup "Tests"
[ sampleTests
, testFileTests
, baseTests
, primTests
, exprTests
, typingTests
, simplificationTests
, ufMapQuickcheck
, unionFindQuickcheck
, rewriteTests
]
timeout :: Timeout
timeout = mkTimeout 1
sampleTests :: TestTree
sampleTests = testGroup "Samples"
[
checkExprAssert "tests/Samples/Peano.hs" 900 (Just "equalsFour") "add"
[RForAll $ not . peano_4_out, AtLeast 10]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 900 (Just "fstIsEvenAddToFour") (Just "fstIsTwo") "add"
[RExists peano_0_4, RExists peano_4_0, Exactly 2]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 1200 (Just "multiplyToFour") (Just "equalsFour") "add"
[RExists peano_1_4_5, RExists peano_4_1_5, Exactly 2]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 750 (Just "eqEachOtherAndAddTo4") Nothing "add"
[RForAll peano_2_2, Exactly 1]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 600 (Just "equalsFour") Nothing "add"
[ RExists peano_0_4
, RExists peano_1_3
, RExists peano_2_2
, RExists peano_3_1
, RExists peano_4_0
, Exactly 5]
, checkExprAssumeAssert "tests/Samples/Peano.hs" 750 (Just "equalsFour") Nothing "multiply"
[ RExists peano_1_4
, RExists peano_2_2
, RExists peano_4_1
, Exactly 3]
, checkExprAssume "tests/Samples/HigherOrderMath.hs" 800 (Just "isTrue0") "notNegativeAt0NegativeAt1"
[RExists negativeSquareRes, AtLeast 1]
, checkExprAssume "tests/Samples/HigherOrderMath.hs" 600 (Just "isTrue1") "fixed"
[ RExists abs2NonNeg
, RExists squareRes
, RExists fourthPowerRes
, RForAll allabs2NonNeg
, AtLeast 4]
, checkExpr "tests/Samples/HigherOrderMath.hs" 600 "fixed" [ RExists abs2NonNeg
, RExists squareRes
, RExists fourthPowerRes
, AtLeast 4]
, checkExprAssumeAssert "tests/Samples/HigherOrderMath.hs" 600 (Just "isTrue2") Nothing "sameFloatArgLarger"
[ RExists addRes
, RExists subRes
, AtLeast 2]
, checkExpr "tests/Samples/HigherOrderMath.hs" 600 "functionSatisfies" [RExists functionSatisfiesRes, AtLeast 1]
, checkExpr "tests/Samples/HigherOrderMath.hs" 1000 "approxSqrt" [AtLeast 2]
The below test fails because Z3 returns unknown .
, checkExprAssume " tests / Samples / HigherOrderMath.hs " 1200 ( Just " isTrue2 " ) " sameFloatArgLarger " 2
, AtLeast 2 ]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "lessThan91") Nothing "mccarthy"
[ RForAll (\[App _ (Lit (LitInt x)), _] -> x <= 100)
, AtLeast 1]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 400 (Just "greaterThan10Less") Nothing "mccarthy"
[ RForAll (\[App _ (Lit (LitInt x)), _] -> x > 100)
, AtLeast 1]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "lessThanNot91") Nothing "mccarthy" [Exactly 0]
, checkExprAssumeAssert "tests/Samples/McCarthy91.hs" 1000 (Just "greaterThanNot10Less") Nothing "mccarthy"
[Exactly 0]
, checkInputOutput "tests/Samples/GetNth.hs" "getNth" 600 [AtLeast 10]
, checkInputOutputs "tests/Samples/GetNthPoly.hs" [ ("getNthInt", 600, [AtLeast 10])
, ("getNthX", 600, [AtLeast 10])
, ("getNthPeano", 600, [AtLeast 10])
, ("getNthCListInt", 600, [AtLeast 10])
, ("getNthCListX", 600, [AtLeast 10])
, ("getNth", 1000, [AtLeast 10])
, ("cfmapInt", 1000, [AtLeast 10])
, ("cfmapIntX", 1600, [AtLeast 10])
, ("cfmapIntCListInt", 600, [AtLeast 2]) ]
, checkExprReaches "tests/Samples/GetNthErr.hs" 800 Nothing Nothing (Just "error") "getNth"
[AtLeast 8, RForAll errors]
, checkInputOutputs "tests/Samples/FoldlUses.hs" [ ("sum_foldl", 1600, [AtLeast 3])
, ("dotProd", 1000, [AtLeast 3]) ]
, checkInputOutputs "tests/Samples/FoldlUsesPoly.hs" [ ("sumMinAndMax", 600, [AtLeast 10])
, ("maxes", 400, [AtLeast 10])
, ("switchInt", 400, [AtLeast 1])
, ("getInInt", 400, [AtLeast 1])
, ("switchP", 400, [AtLeast 1]) ]
]
testFileTests :: TestTree
testFileTests = testGroup "TestFiles"
[
checkExpr "tests/TestFiles/IfTest.hs" 400 "f"
[ RForAll (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y)), App _ (Lit (LitInt r))] ->
if x == y then r == x + y else r == y)
, AtLeast 2]
, checkExprAssert "tests/TestFiles/AssumeAssert.hs" 400 (Just "assertGt5") "outShouldBeGt5" [Exactly 0]
, checkExprAssert "tests/TestFiles/AssumeAssert.hs" 400 (Just "assertGt5") "outShouldBeGe5" [AtLeast 1]
, checkExprAssumeAssert "tests/TestFiles/AssumeAssert.hs" 400
(Just "assumeGt5") (Just "assertGt5") "outShouldBeGt5" [Exactly 0]
, checkExprAssumeAssert "tests/TestFiles/AssumeAssert.hs" 400
(Just "assumeGt5") (Just "assertGt5") "outShouldBeGe5" [Exactly 0]
, checkExpr "tests/TestFiles/CheckSq.hs" 400 "checkSq"
[AtLeast 2, RExists (\[x, _] -> isInt x (\x' -> x' == 3 || x' == -3))]
, checkExpr "tests/TestFiles/Defunc1.hs" 400 "f"
[RExists defunc1Add1, RExists defunc1Multiply2, RExists defuncB, AtLeast 3]
, checkInputOutputs "tests/TestFiles/Defunc1.hs" [ ("x", 400, [AtLeast 1])
, ("mapYInt", 600, [AtLeast 1])
, ("makeMoney", 600, [AtLeast 2])
, ("compZZ", 1600, [AtLeast 2])
, ("compZZ2", 1600, [AtLeast 2]) ]
, checkInputOutput "tests/TestFiles/Defunc2.hs" "funcMap" 400 [AtLeast 30]
, checkExpr "tests/TestFiles/MultCase.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), y] -> x == 2 && getBoolB y id)
, RExists (\[App _ (Lit (LitInt x)), y] -> x == 1 && getBoolB y id)
, RExists (\[App _ (Lit (LitInt x)), y] -> x /= 2 && x /= 1 && getBoolB y not)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating.hs" 400 (Just "output6") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 6)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating2.hs" 400 (Just "output16") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 15)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating3.hs" 600 (Just "output32") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 4)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating4.hs" 400 (Just "output12") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 11)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating5.hs" 400 (Just "output19") Nothing "f"
[AtLeast 1, RForAll (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y)), _] -> x + y + 1 == 19)]
, checkExprAssumeAssert "tests/TestFiles/LetFloating/LetFloating6.hs" 400 (Just "output32") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x == 25)]
, checkExpr "tests/TestFiles/TypeClass/TypeClass1.hs" 400 "f" [RExists (\[x, y] -> x == y), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass2.hs" 400 "f" [RExists (\[x, y] -> x == y), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass3.hs" 400 "f"
[RExists (\[x, y] -> getIntB x $ \x' -> getIntB y $ \y' -> x' + 8 == y'), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass4.hs" 1000 "f" [AtLeast 1]
, checkExprAssumeAssert "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 (Just "largeJ") Nothing "extractJ"
[RForAll (\[x, ly@(App _ (Lit (LitInt y)))] -> appNthArgIs x (ly ==) 2 && y > 100), Exactly 1]
, checkExprAssumeAssert "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 (Just "largeE") Nothing "extractE"
[RForAll (\[x, ly@(App _ (Lit (LitInt y)))] -> appNthArgIs x (ly ==) 4 && y > 100), Exactly 1]
, checkExpr "tests/TestFiles/TypeClass/HKTypeClass1.hs" 400 "changeJ"
[RForAll (\[_, x, y] -> dcInAppHasName "J" x 2 && (dcInAppHasName "J" y 2 || isError y)), AtLeast 2]
, checkExpr "tests/TestFiles/Case1.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), y] -> x < 0 && dcHasName "A" y)
, RExists (\[App _ (Lit (LitInt x)), y] -> x >= 0 && dcHasName "C" y), Exactly 2]
, checkExpr "tests/TestFiles/Case2.hs" 400 "f"
[ RExists exists1
, RExists exists2
, RExists exists3
, RExists exists4
, AtLeast 4]
, checkExprAssumeAssert "tests/TestFiles/Guards.hs" 400 (Just "g") Nothing "f"
[AtLeast 1, RExists (\[dc, _] -> getBoolB dc id)]
, checkExprAssumeAssert "tests/TestFiles/Infinite.hs" 400 (Just "g") Nothing "f"
[AtLeast 1, RExists (\[App _ (Lit (LitInt x)), _] -> x <= 100 && x /= 80)]
, checkExpr "tests/TestFiles/Strictness1.hs" 400 "f"
[AtLeast 1, RExists (\[(App x (App _ (Lit (LitInt y))))] -> dcHasName "A" x && y == 9)]
, checkExpr "tests/TestFiles/Where1.hs" 400 "f"
[ RExists (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y))] -> x == 4 && y == 1)
, RExists (\[App _ (Lit (LitInt x)), App _ (Lit (LitInt y))] -> x /= 4 && y == 1) ]
, checkInputOutputs "tests/TestFiles/Error/Error1.hs" [ ("f", 400, [AtLeast 1])
, ("g", 400, [AtLeast 1])
, ("f", 400, [AtLeast 1])
, ("f", 400, [AtLeast 1])
, ("g", 400, [AtLeast 1]) ]
, checkInputOutputs "tests/TestFiles/Error/Undefined1.hs" [ ("undefined1", 400, [AtLeast 1])
, ("undefined2", 400, [AtLeast 1])]
, checkInputOutput "tests/TestFiles/Error/IrrefutError.hs" "f" 400 [AtLeast 2]
, checkInputOutputs "tests/TestFiles/BadNames1.hs" [ ("abs'", 400, [Exactly 2])
, ("xswitch", 400, [AtLeast 10]) ]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "f"
[Exactly 2, RExists (\[x, _, y] -> x == y), RExists (\[_, App _ x, y] -> x == y)]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "getFstXIntInt"
[AtLeast 2, RExists (\[x, y] -> isApp x && isError y)]
, checkExpr "tests/TestFiles/PolyDataTy1.hs" 400 "sum" [AtLeast 3, RExists (\[x, y] -> isApp x && isError y)]
, checkExprAssumeAssert "tests/TestFiles/MultiSplit.hs" 1000 (Just "equals1") Nothing "f" [Exactly 0]
, checkExpr "tests/TestFiles/MatchesFunc1.hs" 400 "f"
[RExists (\[x, y] -> getIntB x $ \x' -> getIntB y $ \y' -> y' == 6 + x'), AtLeast 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "f"
[ RExists
(\[x, y] -> appNthArgIs x notCast 0
&& appNthArgIs x (\x' -> getIntB x' $ \x'' -> getIntB y $ \y' -> x'' + 1 == y') 1)
, Exactly 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "fCall" [RExists (\[x] -> isInt x ((==) 35)), Exactly 1]
, checkExpr "tests/TestFiles/RecordFields1.hs" 400 "g"
[ RExists (\[x, y] -> appNthArgIs x (dcHasName "A") 2 && appNthArgIs y (dcHasName "B") 2)
, RExists (\[x, y] -> appNthArgIs x (dcHasName "B") 2 && appNthArgIs y (dcHasName "C") 2)
, RExists (\[x, y] -> appNthArgIs x (dcHasName "C") 2 && appNthArgIs y (dcHasName "A") 2)
, Exactly 3]
, checkInputOutputs "tests/TestFiles/Deriving/DerivingSimple.hs" [ ("eq", 400, [AtLeast 2])
, ("lt", 400, [AtLeast 2]) ]
, checkInputOutputs "tests/TestFiles/Deriving/DerivingComp.hs" [ ("eq", 800, [AtLeast 2])
, ("lt", 800, [AtLeast 2]) ]
, checkInputOutputs "tests/TestFiles/Coercions/Age.hs" [ ("born", 400, [Exactly 1])
, ("yearPasses", 400, [AtLeast 1])
, ("age", 400, [AtLeast 1])
, ("diffAge", 400, [AtLeast 1])
, ("yearBefore", 400, [AtLeast 5])]
, checkInputOutputs "tests/TestFiles/Coercions/NewType1.hs" [ ("add1N4", 400, [Exactly 1])
, ("f", 400, [Exactly 1])
, ("g", 400, [Exactly 1])
, ("mapWInt", 400, [AtLeast 2])
, ("appLeftFloat", 400, [AtLeast 2])
, ("getLIntFloat", 400, [AtLeast 2])
, ("getRIntFloat", 400, [AtLeast 2])
, ("getCIntFloatDouble", 400, [AtLeast 2])
, ("getRIntFloatX'", 400, [AtLeast 2])]
, checkInputOutput "tests/TestFiles/Coercions/BadCoerce.hs" "f" 400 [AtLeast 1]
, checkInputOutput "tests/TestFiles/Expr.hs" "leadingLams" 400 [AtLeast 5]
, checkExprAssume "tests/TestFiles/Subseq.hs" 1200 (Just "assume") "subseqTest" [AtLeast 1]
, checkInputOutputs "tests/TestFiles/Strings/Strings1.hs" [ ("con", 300, [AtLeast 10])
, ("eq", 700, [AtLeast 10])
, ("eqGt1", 700, [AtLeast 10])
, ("capABC", 150, [AtLeast 10])
, ("appendEq", 500, [AtLeast 5]) ]
, checkExpr "tests/TestFiles/Strings/Strings1.hs" 1000 "exclaimEq"
[AtLeast 5, RExists (\[_, _, r] -> dcHasName "True" r)]
, checkExpr "tests/TestFiles/Sets/SetInsert.hs" 700 "prop" [AtLeast 3]
, checkInputOutputs "tests/TestFiles/BadDC.hs" [ ("f", 400, [AtLeast 5])
, ("g", 400, [AtLeast 3]) ]
, " tests / TestFiles / Coercions / GADT.hs " 400 Nothing Nothing " g " 2
, RExists ( \[x , y ] - > x = = Lit ( LitInt 0 ) & & y = = App ( Data ( PrimCon I ) ) ( Lit ( LitInt 0 ) ) )
, RExists ( \[x , _ ] - > x /= Lit ( LitInt 0 ) ) ]
, " tests / TestFiles / HigherOrderList.hs " 400 Nothing Nothing " g " [ AtLeast 10 ]
]
baseTests :: TestTree
baseTests = testGroup "Base"
[
checkInputOutput "tests/Samples/Peano.hs" "add" 400 [AtLeast 4]
, checkInputOutputs "tests/BaseTests/ListTests.hs" [ ("test", 1000, [AtLeast 1])
, ("maxMap", 1000, [AtLeast 4])
, ("minTest", 1000, [AtLeast 2])
, ("foldrTest2", 1000, [AtLeast 1]) ]
, checkInputOutput "tests/BaseTests/Tuples.hs" "addTupleElems" 1000 [AtLeast 2]
, checkInputOutputs "tests/BaseTests/MaybeTest.hs" [ ("headMaybeInt", 1000, [AtLeast 2])
, ("sumN", 1000, [AtLeast 6])
, ("lengthN", 1000, [AtLeast 6]) ]
, checkInputOutput "tests/BaseTests/Other.hs" "check4VeryEasy2" 600 [AtLeast 1]
]
primTests :: TestTree
primTests = testGroup "Prims"
[
checkInputOutputs "tests/Prim/Prim2.hs" [ ("quotI1", 1000, [AtLeast 4])
, ("quotI2", 1000, [AtLeast 4])
, ("remI1", 1000, [AtLeast 4])
, ("remI2", 1000, [AtLeast 3])
, ("remI3", 1000, [AtLeast 1])
, ("remI4", 1000, [AtLeast 1])
, ("p1List", 300000, [AtLeast 1])
, ("p2List", 700000, [AtLeast 1])
, ("integerToFloatList", 150000, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/Prim3.hs" [ ("int2FloatTest", 1000, [AtLeast 1])
, ("int2DoubleTest", 1000, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/Prim4.hs" [ ("divIntTest", 1500, [AtLeast 4])
, ("divIntegerTest", 1500, [AtLeast 1])
, ("divIntegerTest2", 1500, [AtLeast 4])
, ("divFloatTest", 1500, [AtLeast 1]) ]
, checkInputOutputs "tests/Prim/DataTag.hs" [ ("dataToTag1", 1000, [Exactly 1])
, ("dataToTag2", 1000, [AtLeast 1])
, ("dataToTag3", 1000, [Exactly 5])
, ("tagToEnum1", 1000, [AtLeast 1])
, ("tagToEnum3", 1000, [AtLeast 4])
, ("tagToEnum4", 1000, [AtLeast 4])
, ("tagToEnum5", 1000, [Exactly 1])
, ("tagToEnum6", 1000, [AtLeast 4]) ]
, checkExpr "tests/Prim/DataTag.hs" 1000 "tagToEnum2" [Exactly 1, RForAll (\[r] -> isError r)]
, checkInputOutputs "tests/Prim/Chr.hs" [ ("lowerLetters", 9000, [AtLeast 1])
, ("allLetters", 9000, [AtLeast 1])
, ("printBasedOnChr", 1500, [AtLeast 7])
, ("printBasedOnOrd", 1500, [AtLeast 7]) ]
]
todoTests :: TestTree
todoTests = testGroup "To Do"
[
checkExpr "tests/TestFiles/TypeClass/TypeClass5.hs" 800 "run" [AtLeast 1]
, checkExpr "tests/TestFiles/TypeClass/TypeClass5.hs" 800 "run2" [AtLeast 0]
, checkInputOutput "tests/Prim/Prim2.hs" "sqrtList" 10000 [AtLeast 1]
, checkInputOutputs "tests/BaseTests/MaybeTest.hs" [ ("average", 2000, [AtLeast 6])
, ("averageF", 2000, [AtLeast 6])
, ("maybeAvg", 200, [AtLeast 6])
]
, checkInputOutputs "tests/Prim/Prim3.hs" [ ("float2IntTest", 1000, [AtLeast 1])
, ("double2IntTest", 1000, [AtLeast 1])]
]
data ToDo = RunMain
| RunToDo
deriving (Eq, Typeable)
instance IsOption ToDo where
defaultValue = RunMain
parseValue s =
let
ws = words s
in
if "y" `elem` ws || "yes" `elem` ws then Just RunToDo else Nothing
optionName = Tagged "todo"
optionHelp = Tagged "Specifies whether to run the main, passing tests, or the todo tests."
Generic helpers for tests
checkExpr :: String -> Int -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExpr src stps entry reqList =
checkExprReaches src stps Nothing Nothing Nothing entry reqList
checkExprAssume :: String -> Int -> Maybe String -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExprAssume src stps m_assume entry reqList =
checkExprReaches src stps m_assume Nothing Nothing entry reqList
checkExprAssert :: String -> Int -> Maybe String -> String -> [Reqs ([Expr] -> Bool)] -> TestTree
checkExprAssert src stps m_assert entry reqList =
checkExprReaches src stps Nothing m_assert Nothing entry reqList
checkExprAssumeAssert :: String
-> Int
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprAssumeAssert src stps m_assume m_assert entry reqList =
checkExprReaches src stps m_assume m_assert Nothing entry reqList
checkExprReaches :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprReaches src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestIO
return $ config {steps = stps})
checkExprWithMap :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprWithMap src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestWithMapIO
return $ config {steps = stps})
checkExprWithSet :: String
-> Int
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> TestTree
checkExprWithSet src stps m_assume m_assert m_reaches entry reqList = do
checkExprWithConfig src m_assume m_assert m_reaches entry reqList
(do
config <- mkConfigTestWithSetIO
return $ config {steps = stps})
checkExprWithConfig :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> [Reqs ([Expr] -> Bool)]
-> IO Config
-> TestTree
checkExprWithConfig src m_assume m_assert m_reaches entry reqList config_f = do
testCase src (do
config <- config_f
res <- testFile src m_assume m_assert m_reaches entry config
let ch = case res of
Left _ -> False
Right exprs -> null $ checkExprGen (map (\(inp, out) -> inp ++ [out]) exprs) reqList
assertBool ("Assume/Assert for file " ++ src
++ " with functions [" ++ (fromMaybe "" m_assume) ++ "] "
++ "[" ++ (fromMaybe "" m_assert) ++ "] "
++ entry ++ " failed.\n")
ch
)
$ assertBool ( " Assume / Assert for file " + + src + +
" with functions [ " + + ( fromMaybe " " m_assume ) + + " ] " + +
" [ " + + ( fromMaybe " " m_assert ) + + " ] " + +
entry + + " failed.\n " ) ch
testFile :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> Config
-> IO (Either SomeException [([Expr], Expr)])
testFile src m_assume m_assert m_reaches entry config =
try (testFileWithConfig src m_assume m_assert m_reaches entry config)
testFileWithConfig :: String
-> Maybe String
-> Maybe String
-> Maybe String
-> String
-> Config
-> IO [([Expr], Expr)]
testFileWithConfig src m_assume m_assert m_reaches entry config = do
let proj = takeDirectory src
r <- doTimeout (timeLimit config)
$ runG2FromFile
[proj]
[src]
(fmap T.pack m_assume)
(fmap T.pack m_assert)
(fmap T.pack m_reaches)
(isJust m_assert || isJust m_reaches)
(T.pack entry)
simplTranslationConfig
config
let (states, _) = maybe (error "Timeout") fst r
return $ map (\(ExecRes { conc_args = i, conc_out = o}) -> (i, o)) states
checkFn :: Either String Bool -> String -> IO TestTree
checkFn f testName = do
let res = f
case res of
Left e -> return . testCase testName $ assertFailure e
Right _ -> return . testCase testName $ return ()
checkFnIO :: IO (Either String Bool) -> String -> IO TestTree
checkFnIO f testName = do
res <- f
case res of
Left e -> return . testCase testName $ assertFailure e
Right _ -> return . testCase testName $ return ()
errors :: [Expr] -> Bool
errors e =
case last e of
Prim Error _ -> True
_ -> False
|
76ed4da0ccba357d294522cbdb03b36700b414226cf906ebc0c740dd80697795 | LesleyLai/PFPL-Interpreters | e.mli | include module type of Types
val pp_typ: Format.formatter -> typ -> unit
val pp_expr: Format.formatter -> expr -> unit
val expr_typ: expr -> typ option
val is_val: expr -> bool
val step: expr -> expr
| null | https://raw.githubusercontent.com/LesleyLai/PFPL-Interpreters/5da05555b8590aa039933eb131cafd095290bde2/lib/e/e.mli | ocaml | include module type of Types
val pp_typ: Format.formatter -> typ -> unit
val pp_expr: Format.formatter -> expr -> unit
val expr_typ: expr -> typ option
val is_val: expr -> bool
val step: expr -> expr
| |
9b10b61fb50c6afed827f960a4d7c7b97eef9dde59c8bcf50141f4e154577d32 | ftovagliari/ocamleditor | text_options.ml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
open GUtil
class options () =
object (self)
val mutable text_color : GDraw.color = `BLACK
val mutable base_color : GDraw.color = `WHITE
val mutable indent_lines_color_solid : GDraw.color = `BLACK
val mutable indent_lines_color_dashed : GDraw.color = `BLACK
val mutable highlight_current_line : string option = None
val mutable current_line_border_enabled = true;
val mutable current_line_border_color : GDraw.color = `NAME "#d0d0d0"
val mutable current_line_bg_color : GDraw.color = `NAME "#f0f0f0"
val mutable show_line_numbers = true
val mutable show_indent_lines = true
val mutable show_dot_leaders = true
val mutable show_whitespace_chars = false
val mutable show_markers = true
val mutable smart_home = true;
val mutable smart_end = true;
val mutable word_wrap = false
val mutable mark_occurrences : (bool * bool * string) = false, false , ""
val mutable visible_right_margin : (int * GDraw.color) option = None;
val mutable line_numbers_font = ""
val mutable mark_occurrences_changed = new mark_occurrences_changed ()
val mutable line_numbers_changed = new line_numbers_changed ()
val mutable highlight_current_line_changed = new highlight_current_line_changed ()
val mutable line_numbers_font_changed = new line_numbers_font_changed ()
val mutable show_markers_changed = new show_markers_changed ()
val mutable word_wrap_changed = new word_wrap_changed ()
initializer
ignore (self#connect#line_numbers_changed ~callback:(fun x -> show_line_numbers <- x));
ignore (self#connect#line_numbers_font_changed ~callback:(fun x -> line_numbers_font <- x));
ignore (self#connect#highlight_current_line_changed ~callback:(fun x -> highlight_current_line <- x));
ignore (self#connect#show_markers_changed ~callback:(fun x -> show_markers <- x));
ignore (self#connect#word_wrap_changed ~callback:(fun x -> word_wrap <- x));
ignore (self#connect#mark_occurrences_changed ~callback:(fun x -> mark_occurrences <- x));
method set_base_color x = base_color <- `NAME x
method base_color = base_color
method text_color = text_color
method set_text_color color = text_color <- `NAME color;
method set_indent_lines_color_solid x = indent_lines_color_solid <- x
method indent_lines_color_solid = indent_lines_color_solid
method set_indent_lines_color_dashed x = indent_lines_color_dashed <- x
method indent_lines_color_dashed = indent_lines_color_dashed
method set_highlight_current_line = highlight_current_line_changed#call
method highlight_current_line = highlight_current_line
method current_line_border_enabled = current_line_border_enabled
method set_current_line_border_enabled x = current_line_border_enabled <- x
method current_line_border_color = current_line_border_color
method set_current_line_border_color x = current_line_border_color <- x
method set_current_line_bg_color x = current_line_bg_color <- x
method current_line_bg_color = current_line_bg_color
method set_show_line_numbers = line_numbers_changed#call;
method show_line_numbers = show_line_numbers
method set_show_indent_lines x = show_indent_lines <- x
method show_indent_lines = show_indent_lines
method smart_home = smart_home
method set_smart_home x = smart_home <- x
method smart_end = smart_end
method set_smart_end x = smart_end <- x
method word_wrap = word_wrap
method set_word_wrap = word_wrap_changed#call
method show_whitespace_chars = show_whitespace_chars
method set_show_whitespace_chars x = show_whitespace_chars <- x
method set_show_markers = show_markers_changed#call
method show_markers = show_markers
method show_dot_leaders = show_dot_leaders
method set_show_dot_leaders x = show_dot_leaders <- x
method mark_occurrences = mark_occurrences
method set_mark_occurrences = mark_occurrences_changed#call
method set_line_numbers_font = line_numbers_font_changed#call
method line_numbers_font = line_numbers_font
method visible_right_margin = visible_right_margin
method set_visible_right_margin x = visible_right_margin <- x
method connect = new signals
~line_numbers_changed
~line_numbers_font_changed
~highlight_current_line_changed
~show_markers_changed
~word_wrap_changed
~mark_occurrences_changed
end
(** Signals *)
and line_numbers_changed () = object inherit [bool] signal () end
and line_numbers_font_changed () = object inherit [string] signal () end
and highlight_current_line_changed () = object inherit [string option] signal () end
and show_markers_changed () = object inherit [bool] signal () end
and word_wrap_changed () = object inherit [bool] signal () end
and mark_occurrences_changed () = object inherit [bool * bool * string] signal () end
and signals
~line_numbers_changed
~line_numbers_font_changed
~highlight_current_line_changed
~show_markers_changed
~word_wrap_changed
~mark_occurrences_changed =
object
inherit ml_signals [
line_numbers_changed#disconnect;
line_numbers_font_changed#disconnect;
highlight_current_line_changed#disconnect;
show_markers_changed#disconnect;
word_wrap_changed#disconnect;
mark_occurrences_changed#disconnect
]
method line_numbers_changed = line_numbers_changed#connect ~after
method line_numbers_font_changed = line_numbers_font_changed#connect ~after
method highlight_current_line_changed = highlight_current_line_changed#connect ~after
method show_markers_changed = show_markers_changed#connect ~after
method word_wrap_changed = word_wrap_changed#connect ~after
method mark_occurrences_changed = mark_occurrences_changed#connect ~after
end
| null | https://raw.githubusercontent.com/ftovagliari/ocamleditor/937d6120ac48de511294f913a78ab6cd82dca92c/src/text_options.ml | ocaml | * Signals |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
open GUtil
class options () =
object (self)
val mutable text_color : GDraw.color = `BLACK
val mutable base_color : GDraw.color = `WHITE
val mutable indent_lines_color_solid : GDraw.color = `BLACK
val mutable indent_lines_color_dashed : GDraw.color = `BLACK
val mutable highlight_current_line : string option = None
val mutable current_line_border_enabled = true;
val mutable current_line_border_color : GDraw.color = `NAME "#d0d0d0"
val mutable current_line_bg_color : GDraw.color = `NAME "#f0f0f0"
val mutable show_line_numbers = true
val mutable show_indent_lines = true
val mutable show_dot_leaders = true
val mutable show_whitespace_chars = false
val mutable show_markers = true
val mutable smart_home = true;
val mutable smart_end = true;
val mutable word_wrap = false
val mutable mark_occurrences : (bool * bool * string) = false, false , ""
val mutable visible_right_margin : (int * GDraw.color) option = None;
val mutable line_numbers_font = ""
val mutable mark_occurrences_changed = new mark_occurrences_changed ()
val mutable line_numbers_changed = new line_numbers_changed ()
val mutable highlight_current_line_changed = new highlight_current_line_changed ()
val mutable line_numbers_font_changed = new line_numbers_font_changed ()
val mutable show_markers_changed = new show_markers_changed ()
val mutable word_wrap_changed = new word_wrap_changed ()
initializer
ignore (self#connect#line_numbers_changed ~callback:(fun x -> show_line_numbers <- x));
ignore (self#connect#line_numbers_font_changed ~callback:(fun x -> line_numbers_font <- x));
ignore (self#connect#highlight_current_line_changed ~callback:(fun x -> highlight_current_line <- x));
ignore (self#connect#show_markers_changed ~callback:(fun x -> show_markers <- x));
ignore (self#connect#word_wrap_changed ~callback:(fun x -> word_wrap <- x));
ignore (self#connect#mark_occurrences_changed ~callback:(fun x -> mark_occurrences <- x));
method set_base_color x = base_color <- `NAME x
method base_color = base_color
method text_color = text_color
method set_text_color color = text_color <- `NAME color;
method set_indent_lines_color_solid x = indent_lines_color_solid <- x
method indent_lines_color_solid = indent_lines_color_solid
method set_indent_lines_color_dashed x = indent_lines_color_dashed <- x
method indent_lines_color_dashed = indent_lines_color_dashed
method set_highlight_current_line = highlight_current_line_changed#call
method highlight_current_line = highlight_current_line
method current_line_border_enabled = current_line_border_enabled
method set_current_line_border_enabled x = current_line_border_enabled <- x
method current_line_border_color = current_line_border_color
method set_current_line_border_color x = current_line_border_color <- x
method set_current_line_bg_color x = current_line_bg_color <- x
method current_line_bg_color = current_line_bg_color
method set_show_line_numbers = line_numbers_changed#call;
method show_line_numbers = show_line_numbers
method set_show_indent_lines x = show_indent_lines <- x
method show_indent_lines = show_indent_lines
method smart_home = smart_home
method set_smart_home x = smart_home <- x
method smart_end = smart_end
method set_smart_end x = smart_end <- x
method word_wrap = word_wrap
method set_word_wrap = word_wrap_changed#call
method show_whitespace_chars = show_whitespace_chars
method set_show_whitespace_chars x = show_whitespace_chars <- x
method set_show_markers = show_markers_changed#call
method show_markers = show_markers
method show_dot_leaders = show_dot_leaders
method set_show_dot_leaders x = show_dot_leaders <- x
method mark_occurrences = mark_occurrences
method set_mark_occurrences = mark_occurrences_changed#call
method set_line_numbers_font = line_numbers_font_changed#call
method line_numbers_font = line_numbers_font
method visible_right_margin = visible_right_margin
method set_visible_right_margin x = visible_right_margin <- x
method connect = new signals
~line_numbers_changed
~line_numbers_font_changed
~highlight_current_line_changed
~show_markers_changed
~word_wrap_changed
~mark_occurrences_changed
end
and line_numbers_changed () = object inherit [bool] signal () end
and line_numbers_font_changed () = object inherit [string] signal () end
and highlight_current_line_changed () = object inherit [string option] signal () end
and show_markers_changed () = object inherit [bool] signal () end
and word_wrap_changed () = object inherit [bool] signal () end
and mark_occurrences_changed () = object inherit [bool * bool * string] signal () end
and signals
~line_numbers_changed
~line_numbers_font_changed
~highlight_current_line_changed
~show_markers_changed
~word_wrap_changed
~mark_occurrences_changed =
object
inherit ml_signals [
line_numbers_changed#disconnect;
line_numbers_font_changed#disconnect;
highlight_current_line_changed#disconnect;
show_markers_changed#disconnect;
word_wrap_changed#disconnect;
mark_occurrences_changed#disconnect
]
method line_numbers_changed = line_numbers_changed#connect ~after
method line_numbers_font_changed = line_numbers_font_changed#connect ~after
method highlight_current_line_changed = highlight_current_line_changed#connect ~after
method show_markers_changed = show_markers_changed#connect ~after
method word_wrap_changed = word_wrap_changed#connect ~after
method mark_occurrences_changed = mark_occurrences_changed#connect ~after
end
|
4f2f01d49dfeabaccd0a69ad0aac9d944aebda2931b31608965de4a866c9775c | anmonteiro/piaf | request_info.ml | type t =
{ scheme : Scheme.t
; version : Versions.HTTP.t
; client_address : Eio.Net.Sockaddr.stream
; sw : Eio.Switch.t
}
| null | https://raw.githubusercontent.com/anmonteiro/piaf/782a60390b172cf1122105717bdb5c2e9924f89b/lib/request_info.ml | ocaml | type t =
{ scheme : Scheme.t
; version : Versions.HTTP.t
; client_address : Eio.Net.Sockaddr.stream
; sw : Eio.Switch.t
}
| |
34b7549c5ffa2a7c1d2319d4f410e85956f0295ec47066f00465dec650ee8f2b | gregwebs/Shelly.hs | Base.hs | # LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE InstanceSigs #
module Shelly.Base
(
Sh(..), ShIO, runSh, State(..), ReadOnlyState(..), StdHandle(..),
HandleInitializer, StdInit(..),
FilePath, Text,
relPath, path, absPath, canonic, canonicalize,
test_d, test_s,
unpack, gets, get, modify, trace,
ls, lsRelAbs,
toTextIgnore,
echo, echo_n, echo_err, echo_n_err, inspect, inspect_err,
catchany,
liftIO, (>=>),
eitherRelativeTo, relativeTo, maybeRelativeTo,
whenM
-- * utilities not yet exported
, addTrailingSlash
) where
import Data.Text (Text)
import System.Process( StdStream(..) )
import System.IO ( Handle, hFlush, stderr, stdout )
import Control.Monad ( when, (>=>) )
#if !MIN_VERSION_base(4,13,0)
import Control.Monad.Fail (MonadFail)
import Control.Applicative (Applicative, (<$>))
import Data.Monoid (mappend)
#endif
import Control.Monad.Base
import Control.Monad.Trans.Control
import System.Directory( doesDirectoryExist, listDirectory)
import System.PosixCompat.Files( getSymbolicLinkStatus, isSymbolicLink )
import System.FilePath ( isRelative)
import qualified System.FilePath as FP
import qualified System.Directory as FS
import Data.IORef (readIORef, modifyIORef, IORef)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Control.Exception (SomeException, catch, throwIO, Exception)
import Data.Maybe (fromMaybe)
import qualified Control.Monad.Catch as Catch
import Control.Monad.Trans ( MonadIO, liftIO )
import Control.Monad.Reader.Class (MonadReader, ask)
import Control.Monad.Trans.Reader (runReaderT, ReaderT(..))
import qualified Data.Set as S
import Data.Typeable (Typeable)
-- | ShIO is Deprecated in favor of 'Sh', which is easier to type.
type ShIO a = Sh a
{-# DEPRECATED ShIO "Use Sh instead of ShIO" #-}
newtype Sh a = Sh {
unSh :: ReaderT (IORef State) IO a
} deriving (Applicative, Monad, MonadFail, MonadIO, MonadReader (IORef State), Functor, Catch.MonadMask)
instance MonadBase IO Sh where
liftBase = Sh . ReaderT . const
instance MonadBaseControl IO Sh where
#if MIN_VERSION_monad_control(1,0,0)
type StM Sh a = StM (ReaderT (IORef State) IO) a
liftBaseWith f =
Sh $ liftBaseWith $ \runInBase -> f $ \k ->
runInBase $ unSh k
restoreM = Sh . restoreM
#else
newtype StM Sh a = StMSh (StM (ReaderT (IORef State) IO) a)
liftBaseWith f =
Sh $ liftBaseWith $ \runInBase -> f $ \k ->
liftM StMSh $ runInBase $ unSh k
restoreM (StMSh m) = Sh . restoreM $ m
#endif
instance Catch.MonadThrow Sh where
throwM = liftIO . Catch.throwM
instance Catch.MonadCatch Sh where
catch (Sh (ReaderT m)) c =
Sh $ ReaderT $ \r -> m r `Catch.catch` \e -> runSh (c e) r
runSh :: Sh a -> IORef State -> IO a
runSh = runReaderT . unSh
data ReadOnlyState = ReadOnlyState { rosFailToDir :: Bool }
data State = State
{ sCode :: Int -- ^ exit code for command that ran
^ stdin for the command to be run
, sStderr :: Text -- ^ stderr for command that ran
, sDirectory :: FilePath -- ^ working directory
^ by default , hPutStrLn stdout
, sPrintStdout :: Bool -- ^ print stdout of command that is executed
, sPutStderr :: Text -> IO () -- ^ by default, hPutStrLn stderr
, sPrintStderr :: Bool -- ^ print stderr of command that is executed
, sPrintCommands :: Bool -- ^ print command that is executed
, sInitCommandHandles :: StdInit -- ^ initializers for the standard process handles
-- when running a command
, sCommandEscaping :: Bool -- ^ when running a command, escape shell characters such as '*' rather
-- than passing to the shell for expansion
, sEnvironment :: [(String, String)]
, sPathExecutables :: Maybe [(FilePath, S.Set FilePath)] -- ^ cache of executables in the PATH
, sTracing :: Bool -- ^ should we trace command execution
, sTrace :: Text -- ^ the trace of command execution
, sErrExit :: Bool -- ^ should we exit immediately on any error
, sReadOnly :: ReadOnlyState
, sFollowSymlink :: Bool -- ^ 'find'-command follows symlinks.
}
data StdHandle = InHandle StdStream
| OutHandle StdStream
| ErrorHandle StdStream
-- | Initialize a handle before using it.
type HandleInitializer = Handle -> IO ()
| A collection of initializers for the three standard process handles .
data StdInit =
StdInit {
inInit :: HandleInitializer,
outInit :: HandleInitializer,
errInit :: HandleInitializer
}
-- | A monadic-conditional version of the 'when' guard.
whenM :: Monad m => m Bool -> m () -> m ()
whenM c a = c >>= \res -> when res a
-- | Makes a relative path relative to the current 'Sh' working directory.
-- An absolute path is returned as is.
-- To create an absolute path, use 'absPath'.
relPath :: FilePath -> Sh FilePath
relPath fp = do
wd <- gets sDirectory
rel <- eitherRelativeTo wd fp
return $ case rel of
Right p -> p
Left p -> p
eitherRelativeTo
:: FilePath -- ^ Anchor path, the prefix.
-> FilePath -- ^ Make this relative to anchor path.
^ ' Left ' is canonic of second path .
eitherRelativeTo relativeFP fp = do
let fullFp = relativeFP FP.</> fp
let relDir = addTrailingSlash relativeFP
stripIt relativeFP fp $
stripIt relativeFP fullFp $
stripIt relDir fp $
stripIt relDir fullFp $ do
relCan <- canonic relDir
fpCan <- canonic fullFp
stripIt relCan fpCan $ return $ Left fpCan
where
stripIt
:: FilePath
-> FilePath
-> Sh (Either FilePath FilePath)
-> Sh (Either FilePath FilePath)
stripIt rel toStrip nada =
let stripped = FP.makeRelative rel toStrip
in if stripped == toStrip
then nada
else return $ Right stripped
| Make the second path relative to the first .
-- Will canonicalize the paths if necessary.
relativeTo :: FilePath -- ^ Anchor path, the prefix.
-> FilePath -- ^ Make this relative to anchor path.
-> Sh FilePath
relativeTo relativeFP fp =
fmap (fromMaybe fp) $ maybeRelativeTo relativeFP fp
maybeRelativeTo :: FilePath -- ^ Anchor path, the prefix.
-> FilePath -- ^ Make this relative to anchor path.
-> Sh (Maybe FilePath)
maybeRelativeTo relativeFP fp = do
epath <- eitherRelativeTo relativeFP fp
return $ case epath of
Right p -> Just p
Left _ -> Nothing
-- | Add a trailing slash to ensure the path indicates a directory.
addTrailingSlash :: FilePath -> FilePath
addTrailingSlash = FP.addTrailingPathSeparator
-- | Make an absolute path.
-- Like 'canonicalize', but on an exception returns 'absPath'.
canonic :: FilePath -> Sh FilePath
canonic fp = do
p <- absPath fp
liftIO $ canonicalizePath p `catchany` \_ -> return p
-- | Obtain a (reasonably) canonic file path to a filesystem object. Based on
-- 'canonicalizePath'.
canonicalize :: FilePath -> Sh FilePath
canonicalize = absPath >=> liftIO . canonicalizePath
-- | Version of 'FS.canonicalizePath' that keeps a trailing slash.
canonicalizePath :: FilePath -> IO FilePath
canonicalizePath p = let was_dir = null (FP.takeFileName p) in
if not was_dir then FS.canonicalizePath p
else addTrailingSlash `fmap` FS.canonicalizePath p
data EmptyFilePathError = EmptyFilePathError deriving Typeable
instance Show EmptyFilePathError where
show _ = "Empty filepath"
instance Exception EmptyFilePathError
-- | Make a relative path absolute by combining with the working directory.
-- An absolute path is returned as is.
-- To create a relative path, use 'relPath'.
absPath :: FilePath -> Sh FilePath
absPath p | null p = liftIO $ throwIO EmptyFilePathError
| isRelative p = do
cwd <- gets sDirectory
return (cwd FP.</> p)
| otherwise = return p
path :: FilePath -> Sh FilePath
path = absPath
{-# DEPRECATED path "use absPath, canonic, or relPath instead" #-}
-- | Does a path point to an existing directory?
test_d :: FilePath -> Sh Bool
test_d = absPath >=> liftIO . doesDirectoryExist
-- | Does a path point to a symlink?
test_s :: FilePath -> Sh Bool
test_s = absPath >=> liftIO . \f -> do
stat <- getSymbolicLinkStatus f
return $ isSymbolicLink stat
unpack :: FilePath -> String
unpack = id
gets :: (State -> a) -> Sh a
gets f = f <$> get
get :: Sh State
get = do
stateVar <- ask
liftIO (readIORef stateVar)
modify :: (State -> State) -> Sh ()
modify f = do
state <- ask
liftIO (modifyIORef state f)
-- | Internally log what occurred.
-- Log will be re-played on failure.
trace :: Text -> Sh ()
trace msg =
whenM (gets sTracing) $ modify $
\st -> st { sTrace = sTrace st `mappend` msg `mappend` "\n" }
| List directory contents . Does /not/ include @.@ and @ .. @ , but it does
-- include (other) hidden files.
ls :: FilePath -> Sh [FilePath]
-- it is important to use path and not absPath so that the listing can remain relative
ls fp = do
trace $ "ls " `mappend` toTextIgnore fp
fmap fst $ lsRelAbs fp
lsRelAbs :: FilePath -> Sh ([FilePath], [FilePath])
lsRelAbs f = absPath f >>= \fp -> do
files <- liftIO $ listDirectory fp
let absolute = map (fp FP.</>) files
let relativized = map (\p -> FP.joinPath [f, p]) files
return (relativized, absolute)
toTextIgnore :: FilePath -> Text
toTextIgnore = T.pack
-- | 'print' lifted into 'Sh'.
inspect :: Show s => s -> Sh ()
inspect x = do
trace $ T.pack s
liftIO $ putStrLn s
where s = show x
-- | A 'print' lifted into 'Sh' using stderr.
inspect_err :: Show s => s -> Sh ()
inspect_err x = do
let shown = T.pack $ show x
trace shown
echo_err shown
| Echo text to standard ( error , when using @_err@ variants ) output . The @_n@
-- variants do not print a final newline.
echo, echo_n, echo_err, echo_n_err :: Text -> Sh ()
echo msg = traceEcho msg >> liftIO (TIO.putStrLn msg >> hFlush stdout)
echo_n msg = traceEcho msg >> liftIO (TIO.putStr msg >> hFlush stdout)
echo_err msg = traceEcho msg >> liftIO (TIO.hPutStrLn stderr msg >> hFlush stdout)
echo_n_err msg = traceEcho msg >> liftIO (TIO.hPutStr stderr msg >> hFlush stderr)
traceEcho :: Text -> Sh ()
traceEcho msg = trace ("echo " `mappend` "'" `mappend` msg `mappend` "'")
-- | A helper to catch any exception (same as
@ ... ` catch ` \(e : : SomeException ) - > ... @ ) .
catchany :: IO a -> (SomeException -> IO a) -> IO a
catchany = catch
| null | https://raw.githubusercontent.com/gregwebs/Shelly.hs/fda3a8fb49d906c78ed95a6cbf2be80964f9ee03/src/Shelly/Base.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE DeriveDataTypeable #
* utilities not yet exported
| ShIO is Deprecated in favor of 'Sh', which is easier to type.
# DEPRECATED ShIO "Use Sh instead of ShIO" #
^ exit code for command that ran
^ stderr for command that ran
^ working directory
^ print stdout of command that is executed
^ by default, hPutStrLn stderr
^ print stderr of command that is executed
^ print command that is executed
^ initializers for the standard process handles
when running a command
^ when running a command, escape shell characters such as '*' rather
than passing to the shell for expansion
^ cache of executables in the PATH
^ should we trace command execution
^ the trace of command execution
^ should we exit immediately on any error
^ 'find'-command follows symlinks.
| Initialize a handle before using it.
| A monadic-conditional version of the 'when' guard.
| Makes a relative path relative to the current 'Sh' working directory.
An absolute path is returned as is.
To create an absolute path, use 'absPath'.
^ Anchor path, the prefix.
^ Make this relative to anchor path.
Will canonicalize the paths if necessary.
^ Anchor path, the prefix.
^ Make this relative to anchor path.
^ Anchor path, the prefix.
^ Make this relative to anchor path.
| Add a trailing slash to ensure the path indicates a directory.
| Make an absolute path.
Like 'canonicalize', but on an exception returns 'absPath'.
| Obtain a (reasonably) canonic file path to a filesystem object. Based on
'canonicalizePath'.
| Version of 'FS.canonicalizePath' that keeps a trailing slash.
| Make a relative path absolute by combining with the working directory.
An absolute path is returned as is.
To create a relative path, use 'relPath'.
# DEPRECATED path "use absPath, canonic, or relPath instead" #
| Does a path point to an existing directory?
| Does a path point to a symlink?
| Internally log what occurred.
Log will be re-played on failure.
include (other) hidden files.
it is important to use path and not absPath so that the listing can remain relative
| 'print' lifted into 'Sh'.
| A 'print' lifted into 'Sh' using stderr.
variants do not print a final newline.
| A helper to catch any exception (same as | # LANGUAGE ScopedTypeVariables #
# LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE InstanceSigs #
module Shelly.Base
(
Sh(..), ShIO, runSh, State(..), ReadOnlyState(..), StdHandle(..),
HandleInitializer, StdInit(..),
FilePath, Text,
relPath, path, absPath, canonic, canonicalize,
test_d, test_s,
unpack, gets, get, modify, trace,
ls, lsRelAbs,
toTextIgnore,
echo, echo_n, echo_err, echo_n_err, inspect, inspect_err,
catchany,
liftIO, (>=>),
eitherRelativeTo, relativeTo, maybeRelativeTo,
whenM
, addTrailingSlash
) where
import Data.Text (Text)
import System.Process( StdStream(..) )
import System.IO ( Handle, hFlush, stderr, stdout )
import Control.Monad ( when, (>=>) )
#if !MIN_VERSION_base(4,13,0)
import Control.Monad.Fail (MonadFail)
import Control.Applicative (Applicative, (<$>))
import Data.Monoid (mappend)
#endif
import Control.Monad.Base
import Control.Monad.Trans.Control
import System.Directory( doesDirectoryExist, listDirectory)
import System.PosixCompat.Files( getSymbolicLinkStatus, isSymbolicLink )
import System.FilePath ( isRelative)
import qualified System.FilePath as FP
import qualified System.Directory as FS
import Data.IORef (readIORef, modifyIORef, IORef)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Control.Exception (SomeException, catch, throwIO, Exception)
import Data.Maybe (fromMaybe)
import qualified Control.Monad.Catch as Catch
import Control.Monad.Trans ( MonadIO, liftIO )
import Control.Monad.Reader.Class (MonadReader, ask)
import Control.Monad.Trans.Reader (runReaderT, ReaderT(..))
import qualified Data.Set as S
import Data.Typeable (Typeable)
type ShIO a = Sh a
newtype Sh a = Sh {
unSh :: ReaderT (IORef State) IO a
} deriving (Applicative, Monad, MonadFail, MonadIO, MonadReader (IORef State), Functor, Catch.MonadMask)
instance MonadBase IO Sh where
liftBase = Sh . ReaderT . const
instance MonadBaseControl IO Sh where
#if MIN_VERSION_monad_control(1,0,0)
type StM Sh a = StM (ReaderT (IORef State) IO) a
liftBaseWith f =
Sh $ liftBaseWith $ \runInBase -> f $ \k ->
runInBase $ unSh k
restoreM = Sh . restoreM
#else
newtype StM Sh a = StMSh (StM (ReaderT (IORef State) IO) a)
liftBaseWith f =
Sh $ liftBaseWith $ \runInBase -> f $ \k ->
liftM StMSh $ runInBase $ unSh k
restoreM (StMSh m) = Sh . restoreM $ m
#endif
instance Catch.MonadThrow Sh where
throwM = liftIO . Catch.throwM
instance Catch.MonadCatch Sh where
catch (Sh (ReaderT m)) c =
Sh $ ReaderT $ \r -> m r `Catch.catch` \e -> runSh (c e) r
runSh :: Sh a -> IORef State -> IO a
runSh = runReaderT . unSh
data ReadOnlyState = ReadOnlyState { rosFailToDir :: Bool }
data State = State
^ stdin for the command to be run
^ by default , hPutStrLn stdout
, sEnvironment :: [(String, String)]
, sReadOnly :: ReadOnlyState
}
data StdHandle = InHandle StdStream
| OutHandle StdStream
| ErrorHandle StdStream
type HandleInitializer = Handle -> IO ()
| A collection of initializers for the three standard process handles .
data StdInit =
StdInit {
inInit :: HandleInitializer,
outInit :: HandleInitializer,
errInit :: HandleInitializer
}
whenM :: Monad m => m Bool -> m () -> m ()
whenM c a = c >>= \res -> when res a
relPath :: FilePath -> Sh FilePath
relPath fp = do
wd <- gets sDirectory
rel <- eitherRelativeTo wd fp
return $ case rel of
Right p -> p
Left p -> p
eitherRelativeTo
^ ' Left ' is canonic of second path .
eitherRelativeTo relativeFP fp = do
let fullFp = relativeFP FP.</> fp
let relDir = addTrailingSlash relativeFP
stripIt relativeFP fp $
stripIt relativeFP fullFp $
stripIt relDir fp $
stripIt relDir fullFp $ do
relCan <- canonic relDir
fpCan <- canonic fullFp
stripIt relCan fpCan $ return $ Left fpCan
where
stripIt
:: FilePath
-> FilePath
-> Sh (Either FilePath FilePath)
-> Sh (Either FilePath FilePath)
stripIt rel toStrip nada =
let stripped = FP.makeRelative rel toStrip
in if stripped == toStrip
then nada
else return $ Right stripped
| Make the second path relative to the first .
-> Sh FilePath
relativeTo relativeFP fp =
fmap (fromMaybe fp) $ maybeRelativeTo relativeFP fp
-> Sh (Maybe FilePath)
maybeRelativeTo relativeFP fp = do
epath <- eitherRelativeTo relativeFP fp
return $ case epath of
Right p -> Just p
Left _ -> Nothing
addTrailingSlash :: FilePath -> FilePath
addTrailingSlash = FP.addTrailingPathSeparator
canonic :: FilePath -> Sh FilePath
canonic fp = do
p <- absPath fp
liftIO $ canonicalizePath p `catchany` \_ -> return p
canonicalize :: FilePath -> Sh FilePath
canonicalize = absPath >=> liftIO . canonicalizePath
canonicalizePath :: FilePath -> IO FilePath
canonicalizePath p = let was_dir = null (FP.takeFileName p) in
if not was_dir then FS.canonicalizePath p
else addTrailingSlash `fmap` FS.canonicalizePath p
data EmptyFilePathError = EmptyFilePathError deriving Typeable
instance Show EmptyFilePathError where
show _ = "Empty filepath"
instance Exception EmptyFilePathError
absPath :: FilePath -> Sh FilePath
absPath p | null p = liftIO $ throwIO EmptyFilePathError
| isRelative p = do
cwd <- gets sDirectory
return (cwd FP.</> p)
| otherwise = return p
path :: FilePath -> Sh FilePath
path = absPath
test_d :: FilePath -> Sh Bool
test_d = absPath >=> liftIO . doesDirectoryExist
test_s :: FilePath -> Sh Bool
test_s = absPath >=> liftIO . \f -> do
stat <- getSymbolicLinkStatus f
return $ isSymbolicLink stat
unpack :: FilePath -> String
unpack = id
gets :: (State -> a) -> Sh a
gets f = f <$> get
get :: Sh State
get = do
stateVar <- ask
liftIO (readIORef stateVar)
modify :: (State -> State) -> Sh ()
modify f = do
state <- ask
liftIO (modifyIORef state f)
trace :: Text -> Sh ()
trace msg =
whenM (gets sTracing) $ modify $
\st -> st { sTrace = sTrace st `mappend` msg `mappend` "\n" }
| List directory contents . Does /not/ include @.@ and @ .. @ , but it does
ls :: FilePath -> Sh [FilePath]
ls fp = do
trace $ "ls " `mappend` toTextIgnore fp
fmap fst $ lsRelAbs fp
lsRelAbs :: FilePath -> Sh ([FilePath], [FilePath])
lsRelAbs f = absPath f >>= \fp -> do
files <- liftIO $ listDirectory fp
let absolute = map (fp FP.</>) files
let relativized = map (\p -> FP.joinPath [f, p]) files
return (relativized, absolute)
toTextIgnore :: FilePath -> Text
toTextIgnore = T.pack
inspect :: Show s => s -> Sh ()
inspect x = do
trace $ T.pack s
liftIO $ putStrLn s
where s = show x
inspect_err :: Show s => s -> Sh ()
inspect_err x = do
let shown = T.pack $ show x
trace shown
echo_err shown
| Echo text to standard ( error , when using @_err@ variants ) output . The @_n@
echo, echo_n, echo_err, echo_n_err :: Text -> Sh ()
echo msg = traceEcho msg >> liftIO (TIO.putStrLn msg >> hFlush stdout)
echo_n msg = traceEcho msg >> liftIO (TIO.putStr msg >> hFlush stdout)
echo_err msg = traceEcho msg >> liftIO (TIO.hPutStrLn stderr msg >> hFlush stdout)
echo_n_err msg = traceEcho msg >> liftIO (TIO.hPutStr stderr msg >> hFlush stderr)
traceEcho :: Text -> Sh ()
traceEcho msg = trace ("echo " `mappend` "'" `mappend` msg `mappend` "'")
@ ... ` catch ` \(e : : SomeException ) - > ... @ ) .
catchany :: IO a -> (SomeException -> IO a) -> IO a
catchany = catch
|
b8ba25d8a7602018e67e9b95298b19eaee36bc12aa83d97a8bf874446c3fcc7e | knupfer/type-of-html | Custom.hs | # LANGUAGE TypeFamilies #
{-# LANGUAGE DataKinds #-}
module Custom where
import Html
newtype instance Attribute "hx-post" 'True v = HxPostA v
| null | https://raw.githubusercontent.com/knupfer/type-of-html/102bd92e7570354489bcd6d7fdeadd29c28e82a1/test/Custom.hs | haskell | # LANGUAGE DataKinds # | # LANGUAGE TypeFamilies #
module Custom where
import Html
newtype instance Attribute "hx-post" 'True v = HxPostA v
|
0f2ee49830768f4ce9e32c99f9f7a32561b81b9ac62a9807588edbdfa70eeb58 | tmattio/spin | spin_std.ml | module Glob = Glob
module Hashtbl = Hashtbl
module List = List
module Result = Result
module String = String
module Filename = Filename
module Sys = Sys
module Spawn = struct
include Spawn
let resolve_in_path prog =
Do not try to resolve in the path if the program is something like
* ./this.exe
* ./this.exe *)
if String.split_on_char '/' prog |> List.length <> 1 then
Some prog
else
let paths = Sys.getenv "PATH" |> String.split_on_char ':' in
List.map (fun d -> Filename.concat d prog) paths
|> List.find_opt Sys.file_exists
let resolve_in_path_exn prog =
match resolve_in_path prog with
| None ->
failwith (Printf.sprintf "no program in path %s" prog)
| Some prog ->
prog
let spawn ?env ?cwd ?stdin ?stdout ?stderr prog argv =
let prog = resolve_in_path_exn prog in
let argv = prog :: argv in
spawn ~prog ~argv ?env ?cwd ?stdin ?stdout ?stderr ()
let exec ?env ?cwd ?stdin ?stdout ?stderr prog argv =
let pid = spawn ?env ?cwd ?stdin ?stdout ?stderr prog argv in
match snd (Unix.waitpid [] pid) with
| WEXITED 0 ->
Ok ()
| WEXITED n ->
Error (Printf.sprintf "exited with code %d" n)
| WSIGNALED n ->
Error (Printf.sprintf "exited with signal %d" n)
| WSTOPPED n ->
Error (Printf.sprintf "stopped with code %d" n)
end
| null | https://raw.githubusercontent.com/tmattio/spin/092ab5979d4f1d01f538743443b70465a2d5ed23/lib/spin_std/spin_std.ml | ocaml | module Glob = Glob
module Hashtbl = Hashtbl
module List = List
module Result = Result
module String = String
module Filename = Filename
module Sys = Sys
module Spawn = struct
include Spawn
let resolve_in_path prog =
Do not try to resolve in the path if the program is something like
* ./this.exe
* ./this.exe *)
if String.split_on_char '/' prog |> List.length <> 1 then
Some prog
else
let paths = Sys.getenv "PATH" |> String.split_on_char ':' in
List.map (fun d -> Filename.concat d prog) paths
|> List.find_opt Sys.file_exists
let resolve_in_path_exn prog =
match resolve_in_path prog with
| None ->
failwith (Printf.sprintf "no program in path %s" prog)
| Some prog ->
prog
let spawn ?env ?cwd ?stdin ?stdout ?stderr prog argv =
let prog = resolve_in_path_exn prog in
let argv = prog :: argv in
spawn ~prog ~argv ?env ?cwd ?stdin ?stdout ?stderr ()
let exec ?env ?cwd ?stdin ?stdout ?stderr prog argv =
let pid = spawn ?env ?cwd ?stdin ?stdout ?stderr prog argv in
match snd (Unix.waitpid [] pid) with
| WEXITED 0 ->
Ok ()
| WEXITED n ->
Error (Printf.sprintf "exited with code %d" n)
| WSIGNALED n ->
Error (Printf.sprintf "exited with signal %d" n)
| WSTOPPED n ->
Error (Printf.sprintf "stopped with code %d" n)
end
| |
7f1f450a2df6a0f2cd774b6b349c0a45758d9ac53d180f015ecba3ece52587b0 | nuvla/api-server | data_record_test.cljc | (ns sixsq.nuvla.server.resources.spec.data-record-test
(:require
[clojure.test :refer [deftest]]
[sixsq.nuvla.server.resources.data-record :as data-record-resource]
[sixsq.nuvla.server.resources.spec.data-record :as data-record]
[sixsq.nuvla.server.resources.spec.spec-test-utils :as stu]))
(def valid-acl {:owners ["group/nuvla-admin"]
:view-acl ["group/nuvla-anon"]})
(deftest check-data-record
(let [timestamp "1964-08-25T10:00:00.00Z"
location [6.143158 46.204391 373.0]
data-record {:id (str data-record-resource/resource-type "/uuid")
:resource-type data-record-resource/resource-type
:created timestamp
:updated timestamp
:acl valid-acl
:infrastructure-service "infrastructure-service/my-service-uuid"
:other "value"
:content-type "text/html; charset=utf-8"
:bytes 10234
:md5sum "abcde"
:timestamp timestamp
:location location
:mount {:mount-type "volume"
:target "/mnt/bucket"
:volume-options {:o "addr=127.0.0.1"
:type "nfs"
:device ":/data/bucket"}}}]
(stu/is-valid ::data-record/schema data-record)
;; mandatory keywords
(doseq [k #{:created :updated :acl :infrastructure-service}]
(stu/is-invalid ::data-record/schema (dissoc data-record k)))
;; optional keywords
(doseq [k #{:other :content-type :bytes :md5sum :timestamp :location :mount}]
(stu/is-valid ::data-record/schema (dissoc data-record k)))))
| null | https://raw.githubusercontent.com/nuvla/api-server/a64a61b227733f1a0a945003edf5abaf5150a15c/code/test/sixsq/nuvla/server/resources/spec/data_record_test.cljc | clojure | mandatory keywords
optional keywords | (ns sixsq.nuvla.server.resources.spec.data-record-test
(:require
[clojure.test :refer [deftest]]
[sixsq.nuvla.server.resources.data-record :as data-record-resource]
[sixsq.nuvla.server.resources.spec.data-record :as data-record]
[sixsq.nuvla.server.resources.spec.spec-test-utils :as stu]))
(def valid-acl {:owners ["group/nuvla-admin"]
:view-acl ["group/nuvla-anon"]})
(deftest check-data-record
(let [timestamp "1964-08-25T10:00:00.00Z"
location [6.143158 46.204391 373.0]
data-record {:id (str data-record-resource/resource-type "/uuid")
:resource-type data-record-resource/resource-type
:created timestamp
:updated timestamp
:acl valid-acl
:infrastructure-service "infrastructure-service/my-service-uuid"
:other "value"
:content-type "text/html; charset=utf-8"
:bytes 10234
:md5sum "abcde"
:timestamp timestamp
:location location
:mount {:mount-type "volume"
:target "/mnt/bucket"
:volume-options {:o "addr=127.0.0.1"
:type "nfs"
:device ":/data/bucket"}}}]
(stu/is-valid ::data-record/schema data-record)
(doseq [k #{:created :updated :acl :infrastructure-service}]
(stu/is-invalid ::data-record/schema (dissoc data-record k)))
(doseq [k #{:other :content-type :bytes :md5sum :timestamp :location :mount}]
(stu/is-valid ::data-record/schema (dissoc data-record k)))))
|
ef474c21f6c9be93ea9a09c095a7486960c80c367538e8e1bd91c8f2aeefac91 | Elzair/nazghul | timer.scm | ;; A generic timer mech
(define (tmr-mk targ timeout sig)
(list targ timeout 0 #f sig))
(define (tmr-targ tmr) (car tmr))
(define (tmr-timeout tmr) (cadr tmr))
(define (tmr-count tmr) (caddr tmr))
(define (tmr-on? tmr) (cadddr tmr))
(define (tmr-sig tmr) (list-ref tmr 4))
(define (tmr-set-count! tmr val) (set-car! (cddr tmr) val))
(define (tmr-set-start! tmr val) (set-car! (cdddr tmr) val))
(define (tmr-expired? tmr) (>= (tmr-count tmr) (tmr-timeout tmr)))
(define (tmr-stop! tmr)
(tmr-set-count! tmr 0)
(tmr-set-start! tmr #f))
(define (tmr-inc! tmr) (tmr-set-count! tmr (+ 1 (tmr-count tmr))))
(define (ktmr-start! ktmr)
(let ((tmr (kobj-gob-data ktmr)))
(tmr-set-count! tmr 0)
(tmr-set-start! tmr #t)))
(define (ktmr-exec ktmr)
(let ((tmr (kobj-gob-data ktmr)))
(display "tmr-exec")(newline)
(if (tmr-on? tmr)
(begin
(display "tmr-on")(newline)
(tmr-inc! tmr)
(if (tmr-expired? tmr)
(let* ((tag (tmr-targ tmr))
(targ (safe-eval tag)))
(display "timer-expired")(newline)
(display "timer-sig:")(display (tmr-sig tmr))(newline)
(tmr-stop! tmr)
(if (notnull? tag)
(signal-kobj targ (tmr-sig tmr) targ ktmr))))))))
(define timer-ifc
(ifc nil
(method 'exec ktmr-exec)
(method 'start ktmr-start!)
))
(mk-obj-type 't_timer "timer" '() layer-mechanism timer-ifc)
(define (mk-timer target-tag timeout sig)
(bind (kern-mk-obj t_timer 1)
(tmr-mk target-tag timeout sig)))
| null | https://raw.githubusercontent.com/Elzair/nazghul/8f3a45ed6289cd9f469c4ff618d39366f2fbc1d8/worlds/haxima-1.002/timer.scm | scheme | A generic timer mech |
(define (tmr-mk targ timeout sig)
(list targ timeout 0 #f sig))
(define (tmr-targ tmr) (car tmr))
(define (tmr-timeout tmr) (cadr tmr))
(define (tmr-count tmr) (caddr tmr))
(define (tmr-on? tmr) (cadddr tmr))
(define (tmr-sig tmr) (list-ref tmr 4))
(define (tmr-set-count! tmr val) (set-car! (cddr tmr) val))
(define (tmr-set-start! tmr val) (set-car! (cdddr tmr) val))
(define (tmr-expired? tmr) (>= (tmr-count tmr) (tmr-timeout tmr)))
(define (tmr-stop! tmr)
(tmr-set-count! tmr 0)
(tmr-set-start! tmr #f))
(define (tmr-inc! tmr) (tmr-set-count! tmr (+ 1 (tmr-count tmr))))
(define (ktmr-start! ktmr)
(let ((tmr (kobj-gob-data ktmr)))
(tmr-set-count! tmr 0)
(tmr-set-start! tmr #t)))
(define (ktmr-exec ktmr)
(let ((tmr (kobj-gob-data ktmr)))
(display "tmr-exec")(newline)
(if (tmr-on? tmr)
(begin
(display "tmr-on")(newline)
(tmr-inc! tmr)
(if (tmr-expired? tmr)
(let* ((tag (tmr-targ tmr))
(targ (safe-eval tag)))
(display "timer-expired")(newline)
(display "timer-sig:")(display (tmr-sig tmr))(newline)
(tmr-stop! tmr)
(if (notnull? tag)
(signal-kobj targ (tmr-sig tmr) targ ktmr))))))))
(define timer-ifc
(ifc nil
(method 'exec ktmr-exec)
(method 'start ktmr-start!)
))
(mk-obj-type 't_timer "timer" '() layer-mechanism timer-ifc)
(define (mk-timer target-tag timeout sig)
(bind (kern-mk-obj t_timer 1)
(tmr-mk target-tag timeout sig)))
|
a226865ee7408059620a7aaaaa57e00dbb3ec74ebe0ca305978b48198b0138a2 | glondu/belenios | mails_voter.ml | (**************************************************************************)
(* BELENIOS *)
(* *)
Copyright © 2012 - 2023
(* *)
(* This program is free software: you can redistribute it and/or modify *)
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
(* License, or (at your option) any later version, with the additional *)
exemption that compiling , linking , and/or using OpenSSL is allowed .
(* *)
(* This program is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *)
(* Affero General Public License for more details. *)
(* *)
You should have received a copy of the GNU Affero General Public
(* License along with this program. If not, see *)
(* </>. *)
(**************************************************************************)
open Lwt
open Lwt.Syntax
open Belenios_core
open Serializable_j
open Common
open Web_serializable_j
open Web_common
let contact_footer l contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
match contact with
| None -> fun _ -> ()
| Some x ->
fun b ->
let open Belenios_ui.Mail_formatter in
add_newline b;
add_newline b;
add_sentence b (s_ "To get more information, please contact:");
add_newline b;
add_string b " ";
add_string b x
let mail_password l title login password weight url contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (s_ "Please find below your login and password for the election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "Note that you also need a credential, sent in a separate email, to start voting.");
add_newline b;
add_newline b;
add_string b (s_ "Username:"); add_string b " "; add_string b login; add_newline b;
add_string b (s_ "Password:"); add_string b " "; add_string b password; add_newline b;
add_newline b;
(match weight with
| Some weight ->
add_string b (s_ "Number of votes:"); add_string b " "; add_string b (Weight.to_string weight); add_newline b
| None -> ()
);
add_string b (s_ "Page of the election:"); add_string b " "; add_string b url; add_newline b;
add_newline b;
add_sentence b (s_ "You are allowed to vote several times.");
add_sentence b (s_ "Only the last vote counts.");
contact_footer l contact b;
contents b
let format_password_email (x : password_email) =
let url = get_election_home_url x.uuid in
let* bodies =
Lwt_list.map_s (fun lang ->
let* l = Web_i18n.get ~component:"voter" ~lang in
return (mail_password l x.title x.login x.password x.weight url x.contact)
) x.langs
in
let body = String.concat "\n\n----------\n\n" bodies in
let body = body ^ "\n\n-- \nBelenios" in
let* subject =
let* l = Web_i18n.get ~component:"voter" ~lang:(List.hd x.langs) in
let open (val l) in
Printf.kprintf return (f_ "Your password for election %s") x.title
in
Lwt.return (subject, body)
open Belenios_platform.Platform
let generate_password_email metadata langs title uuid v show_weight =
let (_, {address; login; weight}) : Voter.t = v in
let weight = if show_weight then weight else None in
let salt = generate_token () in
let* password =
let x = generate_token ~length:15 () in
return (format_password x)
in
let hashed = sha256_hex (salt ^ password) in
let x : password_email = {
uuid;
title;
login = Option.value login ~default:address;
password;
weight;
contact = metadata.e_contact;
langs;
recipient = address;
}
in
return (`Password x, (salt, hashed))
let mail_credential l has_passwords title ~login cred weight url metadata =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (s_ "You are listed as a voter for the election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "You will find below your credential.");
add_sentence b (s_ "You will be asked to enter your credential before entering the voting booth.");
if has_passwords then (
add_sentence b (s_ "To cast a vote, you will also need a password, sent in a separate email.");
);
add_newline b;
add_newline b;
add_string b (s_ "Credential:"); add_string b " "; add_string b cred; add_newline b;
add_newline b;
add_string b (s_ "Username:"); add_string b " "; add_string b login; add_newline b;
(match weight with
| Some weight ->
add_string b (s_ "Number of votes:"); add_string b " "; add_string b (Weight.to_string weight); add_newline b
| None -> ()
);
add_string b (s_ "Page of the election:"); add_string b " "; add_string b url; add_newline b;
add_newline b;
add_sentence b (s_ "You are allowed to vote several times.");
add_sentence b (s_ "Only the last vote counts.");
contact_footer l metadata b;
contents b
let format_credential_email (x : credential_email) =
let url = get_election_home_url x.uuid in
let* bodies =
Lwt_list.map_s
(fun lang ->
let* l = Web_i18n.get ~component:"voter" ~lang in
return (mail_credential l x.has_passwords x.title ~login:x.login x.credential x.weight url x.contact)
) x.langs
in
let body = String.concat "\n\n----------\n\n" bodies in
let body = body ^ "\n\n-- \nBelenios" in
let* subject =
let* l = Web_i18n.get ~component:"voter" ~lang:(List.hd x.langs) in
let open (val l) in
Printf.ksprintf return (f_ "Your credential for election %s") x.title
in
return (subject, body)
let generate_credential_email uuid se =
let title = se.se_questions.t_name in
let show_weight = has_explicit_weights se.se_voters in
let has_passwords =
match se.se_metadata.e_auth_config with
| Some [{auth_system = "password"; _}] -> true
| _ -> false
in
let langs = get_languages se.se_metadata.e_languages in
fun ~recipient ~login ~weight ~credential ->
let oweight = if show_weight then Some weight else None in
let x : credential_email = {
uuid;
title;
login;
credential;
weight = oweight;
contact = se.se_metadata.e_contact;
langs;
has_passwords;
recipient;
}
in
Lwt.return @@ `Credential x
let send_bulk_email = function
| `Password x ->
let* subject, body = format_password_email x in
send_email (MailPassword x.uuid) ~recipient:x.recipient ~subject ~body
| `Credential x ->
let* subject, body = format_credential_email x in
send_email (MailCredential x.uuid) ~recipient:x.recipient ~subject ~body
module Bulk_processor = struct
type t = {
mutable locked : bool;
mutable queue : bulk_emails option;
submitters : unit Lwt.u Queue.t;
processors : unit Lwt.u Queue.t;
}
let create () = {
locked = false;
queue = None;
submitters = Queue.create ();
processors = Queue.create ();
}
let lock ~is_submitter m =
if m.locked then (
let q = if is_submitter then m.submitters else m.processors in
let t, u = Lwt.wait () in
Queue.push u q;
t
) else (
m.locked <- true;
Lwt.return_unit
)
let unlock m =
if m.locked then (
match Queue.take_opt m.submitters with
| None ->
begin
match Queue.take_opt m.processors with
| None -> m.locked <- false
| Some u -> Lwt.wakeup_later u ()
end
| Some u -> Lwt.wakeup_later u ()
)
let with_lock ~is_submitter m f =
let* () = lock ~is_submitter m in
Lwt.finalize f (fun () -> unlock m; Lwt.return_unit)
end
module Ocsipersist_bulk = struct
module F = Ocsipersist.Functorial
module T = F.Table (struct let name = "belenios_bulk_emails" end) (F.Column.String) (F.Column.String)
module type SerializableInput = sig
type t
val name : string
val default : t
val of_string : string -> t
val to_string : t -> string
end
module type SerializableOutput = sig
type t
val get : unit -> t Lwt.t
val set : t -> unit Lwt.t
end
module MakeSerializable (I : SerializableInput) : SerializableOutput with type t := I.t = struct
let default = I.to_string I.default
let var = T.Variable.make ~name:I.name ~default
let get () =
let* x = T.Variable.get var in
Lwt.return (I.of_string x)
let set x =
T.Variable.set var (I.to_string x)
end
module PrimaryQueueInput = struct
type t = bulk_emails
let name = "primary_queue"
let default = [||]
let of_string = bulk_emails_of_string
let to_string x = string_of_bulk_emails x
end
module SecondaryQueueInput = struct
type t = bulk_emails
let name = "secondary_queue"
let default = [||]
let of_string = bulk_emails_of_string
let to_string x = string_of_bulk_emails x
end
module ProcessedInput = struct
type t = bulk_processed
let name = "processed"
let default = {mode = `Primary; processed = 0}
let of_string = bulk_processed_of_string
let to_string x = string_of_bulk_processed x
end
module PrimaryQueue = MakeSerializable (PrimaryQueueInput)
module SecondaryQueue = MakeSerializable (SecondaryQueueInput)
module Processed = MakeSerializable (ProcessedInput)
let m = Bulk_processor.create ()
let get_queue () =
let* p = Processed.get () in
match m.queue with
| Some x -> Lwt.return (p, x)
| None ->
let* x =
match p.mode with
| `Primary -> PrimaryQueue.get ()
| `Secondary -> SecondaryQueue.get ()
in
m.queue <- Some x;
Lwt.return (p, x)
let submit jobs =
let jobs = Array.of_list jobs in
let@ () = Bulk_processor.with_lock ~is_submitter:true m in
let* p, current = get_queue () in
let newset, newmode, oldset =
match p.mode with
| `Primary -> SecondaryQueue.set, `Secondary, PrimaryQueue.set
| `Secondary -> PrimaryQueue.set, `Primary, SecondaryQueue.set
in
let current = Array.sub current p.processed (Array.length current - p.processed) in
let newqueue = Array.append current jobs in
let* () = newset newqueue in
let* () = Processed.set {mode = newmode; processed = 0} in
m.queue <- Some newqueue;
let* () = oldset [||] in
Lwt.return_unit
let process_one () =
let@ () = Bulk_processor.with_lock ~is_submitter:false m in
let* p, current = get_queue () in
let i = p.processed in
if i < Array.length current then (
let* () = send_bulk_email current.(i) in
let* () = Processed.set {p with processed = i + 1} in
Lwt.return_true
) else (
Lwt.return_false
)
let rec process () =
let* continue = process_one () in
if continue then process () else submit []
end
let process_bulk_emails = Ocsipersist_bulk.process
let submit_bulk_emails jobs =
let* () = Ocsipersist_bulk.submit jobs in
Lwt.async process_bulk_emails;
Lwt.return_unit
let mail_confirmation l user title weight hash revote url1 url2 contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (Printf.sprintf (f_ "Dear %s,") user); add_newline b;
add_newline b;
add_sentence b (s_ "Your vote for election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "has been recorded.");
(match weight with
| Some weight ->
add_sentence b (Printf.sprintf (f_ "Your weight is %s.") (Weight.to_string weight))
| None -> ()
);
add_sentence b (s_ "Your smart ballot tracker is"); add_newline b;
add_newline b;
add_string b " "; add_string b hash; add_newline b;
if revote then (
add_newline b;
add_sentence b (s_ "This vote replaces any previous vote.");
add_newline b;
);
add_newline b;
add_sentence b (s_ "You can check its presence in the ballot box, accessible at");
add_newline b;
add_string b " "; add_string b url1; add_newline b;
add_newline b;
add_sentence b (s_ "Results will be published on the election page");
add_newline b;
add_string b " "; add_string b url2;
contact_footer l contact b;
add_newline b;
add_newline b;
add_string b "-- "; add_newline b;
add_string b "Belenios";
contents b
| null | https://raw.githubusercontent.com/glondu/belenios/a1f9e4cc8c9aa823f3d0f9ba1e21b8c700cd5522/src/web/server/common/mails_voter.ml | ocaml | ************************************************************************
BELENIOS
This program is free software: you can redistribute it and/or modify
License, or (at your option) any later version, with the additional
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
License along with this program. If not, see
</>.
************************************************************************ | Copyright © 2012 - 2023
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
exemption that compiling , linking , and/or using OpenSSL is allowed .
You should have received a copy of the GNU Affero General Public
open Lwt
open Lwt.Syntax
open Belenios_core
open Serializable_j
open Common
open Web_serializable_j
open Web_common
let contact_footer l contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
match contact with
| None -> fun _ -> ()
| Some x ->
fun b ->
let open Belenios_ui.Mail_formatter in
add_newline b;
add_newline b;
add_sentence b (s_ "To get more information, please contact:");
add_newline b;
add_string b " ";
add_string b x
let mail_password l title login password weight url contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (s_ "Please find below your login and password for the election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "Note that you also need a credential, sent in a separate email, to start voting.");
add_newline b;
add_newline b;
add_string b (s_ "Username:"); add_string b " "; add_string b login; add_newline b;
add_string b (s_ "Password:"); add_string b " "; add_string b password; add_newline b;
add_newline b;
(match weight with
| Some weight ->
add_string b (s_ "Number of votes:"); add_string b " "; add_string b (Weight.to_string weight); add_newline b
| None -> ()
);
add_string b (s_ "Page of the election:"); add_string b " "; add_string b url; add_newline b;
add_newline b;
add_sentence b (s_ "You are allowed to vote several times.");
add_sentence b (s_ "Only the last vote counts.");
contact_footer l contact b;
contents b
let format_password_email (x : password_email) =
let url = get_election_home_url x.uuid in
let* bodies =
Lwt_list.map_s (fun lang ->
let* l = Web_i18n.get ~component:"voter" ~lang in
return (mail_password l x.title x.login x.password x.weight url x.contact)
) x.langs
in
let body = String.concat "\n\n----------\n\n" bodies in
let body = body ^ "\n\n-- \nBelenios" in
let* subject =
let* l = Web_i18n.get ~component:"voter" ~lang:(List.hd x.langs) in
let open (val l) in
Printf.kprintf return (f_ "Your password for election %s") x.title
in
Lwt.return (subject, body)
open Belenios_platform.Platform
let generate_password_email metadata langs title uuid v show_weight =
let (_, {address; login; weight}) : Voter.t = v in
let weight = if show_weight then weight else None in
let salt = generate_token () in
let* password =
let x = generate_token ~length:15 () in
return (format_password x)
in
let hashed = sha256_hex (salt ^ password) in
let x : password_email = {
uuid;
title;
login = Option.value login ~default:address;
password;
weight;
contact = metadata.e_contact;
langs;
recipient = address;
}
in
return (`Password x, (salt, hashed))
let mail_credential l has_passwords title ~login cred weight url metadata =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (s_ "You are listed as a voter for the election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "You will find below your credential.");
add_sentence b (s_ "You will be asked to enter your credential before entering the voting booth.");
if has_passwords then (
add_sentence b (s_ "To cast a vote, you will also need a password, sent in a separate email.");
);
add_newline b;
add_newline b;
add_string b (s_ "Credential:"); add_string b " "; add_string b cred; add_newline b;
add_newline b;
add_string b (s_ "Username:"); add_string b " "; add_string b login; add_newline b;
(match weight with
| Some weight ->
add_string b (s_ "Number of votes:"); add_string b " "; add_string b (Weight.to_string weight); add_newline b
| None -> ()
);
add_string b (s_ "Page of the election:"); add_string b " "; add_string b url; add_newline b;
add_newline b;
add_sentence b (s_ "You are allowed to vote several times.");
add_sentence b (s_ "Only the last vote counts.");
contact_footer l metadata b;
contents b
let format_credential_email (x : credential_email) =
let url = get_election_home_url x.uuid in
let* bodies =
Lwt_list.map_s
(fun lang ->
let* l = Web_i18n.get ~component:"voter" ~lang in
return (mail_credential l x.has_passwords x.title ~login:x.login x.credential x.weight url x.contact)
) x.langs
in
let body = String.concat "\n\n----------\n\n" bodies in
let body = body ^ "\n\n-- \nBelenios" in
let* subject =
let* l = Web_i18n.get ~component:"voter" ~lang:(List.hd x.langs) in
let open (val l) in
Printf.ksprintf return (f_ "Your credential for election %s") x.title
in
return (subject, body)
let generate_credential_email uuid se =
let title = se.se_questions.t_name in
let show_weight = has_explicit_weights se.se_voters in
let has_passwords =
match se.se_metadata.e_auth_config with
| Some [{auth_system = "password"; _}] -> true
| _ -> false
in
let langs = get_languages se.se_metadata.e_languages in
fun ~recipient ~login ~weight ~credential ->
let oweight = if show_weight then Some weight else None in
let x : credential_email = {
uuid;
title;
login;
credential;
weight = oweight;
contact = se.se_metadata.e_contact;
langs;
has_passwords;
recipient;
}
in
Lwt.return @@ `Credential x
let send_bulk_email = function
| `Password x ->
let* subject, body = format_password_email x in
send_email (MailPassword x.uuid) ~recipient:x.recipient ~subject ~body
| `Credential x ->
let* subject, body = format_credential_email x in
send_email (MailCredential x.uuid) ~recipient:x.recipient ~subject ~body
module Bulk_processor = struct
type t = {
mutable locked : bool;
mutable queue : bulk_emails option;
submitters : unit Lwt.u Queue.t;
processors : unit Lwt.u Queue.t;
}
let create () = {
locked = false;
queue = None;
submitters = Queue.create ();
processors = Queue.create ();
}
let lock ~is_submitter m =
if m.locked then (
let q = if is_submitter then m.submitters else m.processors in
let t, u = Lwt.wait () in
Queue.push u q;
t
) else (
m.locked <- true;
Lwt.return_unit
)
let unlock m =
if m.locked then (
match Queue.take_opt m.submitters with
| None ->
begin
match Queue.take_opt m.processors with
| None -> m.locked <- false
| Some u -> Lwt.wakeup_later u ()
end
| Some u -> Lwt.wakeup_later u ()
)
let with_lock ~is_submitter m f =
let* () = lock ~is_submitter m in
Lwt.finalize f (fun () -> unlock m; Lwt.return_unit)
end
module Ocsipersist_bulk = struct
module F = Ocsipersist.Functorial
module T = F.Table (struct let name = "belenios_bulk_emails" end) (F.Column.String) (F.Column.String)
module type SerializableInput = sig
type t
val name : string
val default : t
val of_string : string -> t
val to_string : t -> string
end
module type SerializableOutput = sig
type t
val get : unit -> t Lwt.t
val set : t -> unit Lwt.t
end
module MakeSerializable (I : SerializableInput) : SerializableOutput with type t := I.t = struct
let default = I.to_string I.default
let var = T.Variable.make ~name:I.name ~default
let get () =
let* x = T.Variable.get var in
Lwt.return (I.of_string x)
let set x =
T.Variable.set var (I.to_string x)
end
module PrimaryQueueInput = struct
type t = bulk_emails
let name = "primary_queue"
let default = [||]
let of_string = bulk_emails_of_string
let to_string x = string_of_bulk_emails x
end
module SecondaryQueueInput = struct
type t = bulk_emails
let name = "secondary_queue"
let default = [||]
let of_string = bulk_emails_of_string
let to_string x = string_of_bulk_emails x
end
module ProcessedInput = struct
type t = bulk_processed
let name = "processed"
let default = {mode = `Primary; processed = 0}
let of_string = bulk_processed_of_string
let to_string x = string_of_bulk_processed x
end
module PrimaryQueue = MakeSerializable (PrimaryQueueInput)
module SecondaryQueue = MakeSerializable (SecondaryQueueInput)
module Processed = MakeSerializable (ProcessedInput)
let m = Bulk_processor.create ()
let get_queue () =
let* p = Processed.get () in
match m.queue with
| Some x -> Lwt.return (p, x)
| None ->
let* x =
match p.mode with
| `Primary -> PrimaryQueue.get ()
| `Secondary -> SecondaryQueue.get ()
in
m.queue <- Some x;
Lwt.return (p, x)
let submit jobs =
let jobs = Array.of_list jobs in
let@ () = Bulk_processor.with_lock ~is_submitter:true m in
let* p, current = get_queue () in
let newset, newmode, oldset =
match p.mode with
| `Primary -> SecondaryQueue.set, `Secondary, PrimaryQueue.set
| `Secondary -> PrimaryQueue.set, `Primary, SecondaryQueue.set
in
let current = Array.sub current p.processed (Array.length current - p.processed) in
let newqueue = Array.append current jobs in
let* () = newset newqueue in
let* () = Processed.set {mode = newmode; processed = 0} in
m.queue <- Some newqueue;
let* () = oldset [||] in
Lwt.return_unit
let process_one () =
let@ () = Bulk_processor.with_lock ~is_submitter:false m in
let* p, current = get_queue () in
let i = p.processed in
if i < Array.length current then (
let* () = send_bulk_email current.(i) in
let* () = Processed.set {p with processed = i + 1} in
Lwt.return_true
) else (
Lwt.return_false
)
let rec process () =
let* continue = process_one () in
if continue then process () else submit []
end
let process_bulk_emails = Ocsipersist_bulk.process
let submit_bulk_emails jobs =
let* () = Ocsipersist_bulk.submit jobs in
Lwt.async process_bulk_emails;
Lwt.return_unit
let mail_confirmation l user title weight hash revote url1 url2 contact =
let open (val l : Belenios_ui.I18n.GETTEXT) in
let open Belenios_ui.Mail_formatter in
let b = create () in
add_sentence b (Printf.sprintf (f_ "Dear %s,") user); add_newline b;
add_newline b;
add_sentence b (s_ "Your vote for election"); add_newline b;
add_newline b;
add_string b " "; add_string b title; add_newline b;
add_newline b;
add_sentence b (s_ "has been recorded.");
(match weight with
| Some weight ->
add_sentence b (Printf.sprintf (f_ "Your weight is %s.") (Weight.to_string weight))
| None -> ()
);
add_sentence b (s_ "Your smart ballot tracker is"); add_newline b;
add_newline b;
add_string b " "; add_string b hash; add_newline b;
if revote then (
add_newline b;
add_sentence b (s_ "This vote replaces any previous vote.");
add_newline b;
);
add_newline b;
add_sentence b (s_ "You can check its presence in the ballot box, accessible at");
add_newline b;
add_string b " "; add_string b url1; add_newline b;
add_newline b;
add_sentence b (s_ "Results will be published on the election page");
add_newline b;
add_string b " "; add_string b url2;
contact_footer l contact b;
add_newline b;
add_newline b;
add_string b "-- "; add_newline b;
add_string b "Belenios";
contents b
|
50695b07736aeff7ddc48391e0ad74583f88e863ed01af0f1659d626c0e53946 | LaurentMazare/ocaml-minipy | bc_code.ml | open Base
open Import
module Opcode = struct
type t =
| POP_TOP
| ROT_TWO
| ROT_THREE
| DUP_TOP
| DUP_TOP_TWO
| NOP
| UNARY_POSITIVE
| UNARY_NEGATIVE
| UNARY_NOT
| UNARY_INVERT
| BINARY_MATRIX_MULTIPLY
| INPLACE_MATRIX_MULTIPLY
| BINARY_POWER
| BINARY_MULTIPLY
| BINARY_MODULO
| BINARY_ADD
| BINARY_SUBTRACT
| BINARY_SUBSCR
| BINARY_FLOOR_DIVIDE
| BINARY_TRUE_DIVIDE
| INPLACE_FLOOR_DIVIDE
| INPLACE_TRUE_DIVIDE
| GET_AITER
| GET_ANEXT
| BEFORE_ASYNC_WITH
| INPLACE_ADD
| INPLACE_SUBTRACT
| INPLACE_MULTIPLY
| INPLACE_MODULO
| STORE_SUBSCR
| DELETE_SUBSCR
| BINARY_LSHIFT
| BINARY_RSHIFT
| BINARY_AND
| BINARY_XOR
| BINARY_OR
| INPLACE_POWER
| GET_ITER
| GET_YIELD_FROM_ITER
| PRINT_EXPR
| LOAD_BUILD_CLASS
| YIELD_FROM
| GET_AWAITABLE
| INPLACE_LSHIFT
| INPLACE_RSHIFT
| INPLACE_AND
| INPLACE_XOR
| INPLACE_OR
| BREAK_LOOP
| WITH_CLEANUP_START
| WITH_CLEANUP_FINISH
| RETURN_VALUE
| IMPORT_STAR
| SETUP_ANNOTATIONS
| YIELD_VALUE
| POP_BLOCK
| END_FINALLY
| POP_EXCEPT
| STORE_NAME
| DELETE_NAME
| UNPACK_SEQUENCE
| FOR_ITER
| UNPACK_EX
| STORE_ATTR
| DELETE_ATTR
| STORE_GLOBAL
| DELETE_GLOBAL
| LOAD_CONST
| LOAD_NAME
| BUILD_TUPLE
| BUILD_LIST
| BUILD_SET
| BUILD_MAP
| LOAD_ATTR
| COMPARE_OP
| IMPORT_NAME
| IMPORT_FROM
| JUMP_FORWARD
| JUMP_IF_FALSE_OR_POP
| JUMP_IF_TRUE_OR_POP
| JUMP_ABSOLUTE
| POP_JUMP_IF_FALSE
| POP_JUMP_IF_TRUE
| LOAD_GLOBAL
| CONTINUE_LOOP
| SETUP_LOOP
| SETUP_EXCEPT
| SETUP_FINALLY
| LOAD_FAST
| STORE_FAST
| DELETE_FAST
| RAISE_VARARGS
| CALL_FUNCTION
| MAKE_FUNCTION
| BUILD_SLICE
| LOAD_CLOSURE
| LOAD_DEREF
| STORE_DEREF
| DELETE_DEREF
| CALL_FUNCTION_KW
| CALL_FUNCTION_EX
| SETUP_WITH
| EXTENDED_ARG
| LIST_APPEND
| SET_ADD
| MAP_ADD
| LOAD_CLASSDEREF
| BUILD_LIST_UNPACK
| BUILD_MAP_UNPACK
| BUILD_MAP_UNPACK_WITH_CALL
| BUILD_TUPLE_UNPACK
| BUILD_SET_UNPACK
| SETUP_ASYNC_WITH
| FORMAT_VALUE
| BUILD_CONST_KEY_MAP
| BUILD_STRING
| BUILD_TUPLE_UNPACK_WITH_CALL
| LOAD_METHOD
| CALL_METHOD
borrowed from RustPython
[@@deriving sexp]
let of_int opcode =
match opcode with
| 1 -> POP_TOP
| 2 -> ROT_TWO
| 3 -> ROT_THREE
| 4 -> DUP_TOP
| 5 -> DUP_TOP_TWO
| 9 -> NOP
| 10 -> UNARY_POSITIVE
| 11 -> UNARY_NEGATIVE
| 12 -> UNARY_NOT
| 15 -> UNARY_INVERT
| 16 -> BINARY_MATRIX_MULTIPLY
| 17 -> INPLACE_MATRIX_MULTIPLY
| 19 -> BINARY_POWER
| 20 -> BINARY_MULTIPLY
| 22 -> BINARY_MODULO
| 23 -> BINARY_ADD
| 24 -> BINARY_SUBTRACT
| 25 -> BINARY_SUBSCR
| 26 -> BINARY_FLOOR_DIVIDE
| 27 -> BINARY_TRUE_DIVIDE
| 28 -> INPLACE_FLOOR_DIVIDE
| 29 -> INPLACE_TRUE_DIVIDE
| 50 -> GET_AITER
| 51 -> GET_ANEXT
| 52 -> BEFORE_ASYNC_WITH
| 55 -> INPLACE_ADD
| 56 -> INPLACE_SUBTRACT
| 57 -> INPLACE_MULTIPLY
| 59 -> INPLACE_MODULO
| 60 -> STORE_SUBSCR
| 61 -> DELETE_SUBSCR
| 62 -> BINARY_LSHIFT
| 63 -> BINARY_RSHIFT
| 64 -> BINARY_AND
| 65 -> BINARY_XOR
| 66 -> BINARY_OR
| 67 -> INPLACE_POWER
| 68 -> GET_ITER
| 69 -> GET_YIELD_FROM_ITER
| 70 -> PRINT_EXPR
| 71 -> LOAD_BUILD_CLASS
| 72 -> YIELD_FROM
| 73 -> GET_AWAITABLE
| 75 -> INPLACE_LSHIFT
| 76 -> INPLACE_RSHIFT
| 77 -> INPLACE_AND
| 78 -> INPLACE_XOR
| 79 -> INPLACE_OR
| 80 -> BREAK_LOOP
| 81 -> WITH_CLEANUP_START
| 82 -> WITH_CLEANUP_FINISH
| 83 -> RETURN_VALUE
| 84 -> IMPORT_STAR
| 85 -> SETUP_ANNOTATIONS
| 86 -> YIELD_VALUE
| 87 -> POP_BLOCK
| 88 -> END_FINALLY
| 89 -> POP_EXCEPT
| 90 -> STORE_NAME
| 91 -> DELETE_NAME
| 92 -> UNPACK_SEQUENCE
| 93 -> FOR_ITER
| 94 -> UNPACK_EX
| 95 -> STORE_ATTR
| 96 -> DELETE_ATTR
| 97 -> STORE_GLOBAL
| 98 -> DELETE_GLOBAL
| 100 -> LOAD_CONST
| 101 -> LOAD_NAME
| 102 -> BUILD_TUPLE
| 103 -> BUILD_LIST
| 104 -> BUILD_SET
| 105 -> BUILD_MAP
| 106 -> LOAD_ATTR
| 107 -> COMPARE_OP
| 108 -> IMPORT_NAME
| 109 -> IMPORT_FROM
| 110 -> JUMP_FORWARD
| 111 -> JUMP_IF_FALSE_OR_POP
| 112 -> JUMP_IF_TRUE_OR_POP
| 113 -> JUMP_ABSOLUTE
| 114 -> POP_JUMP_IF_FALSE
| 115 -> POP_JUMP_IF_TRUE
| 116 -> LOAD_GLOBAL
| 119 -> CONTINUE_LOOP
| 120 -> SETUP_LOOP
| 121 -> SETUP_EXCEPT
| 122 -> SETUP_FINALLY
| 124 -> LOAD_FAST
| 125 -> STORE_FAST
| 126 -> DELETE_FAST
| 130 -> RAISE_VARARGS
| 131 -> CALL_FUNCTION
| 132 -> MAKE_FUNCTION
| 133 -> BUILD_SLICE
| 135 -> LOAD_CLOSURE
| 136 -> LOAD_DEREF
| 137 -> STORE_DEREF
| 138 -> DELETE_DEREF
| 141 -> CALL_FUNCTION_KW
| 142 -> CALL_FUNCTION_EX
| 143 -> SETUP_WITH
| 144 -> EXTENDED_ARG
| 145 -> LIST_APPEND
| 146 -> SET_ADD
| 147 -> MAP_ADD
| 148 -> LOAD_CLASSDEREF
| 149 -> BUILD_LIST_UNPACK
| 150 -> BUILD_MAP_UNPACK
| 151 -> BUILD_MAP_UNPACK_WITH_CALL
| 152 -> BUILD_TUPLE_UNPACK
| 153 -> BUILD_SET_UNPACK
| 154 -> SETUP_ASYNC_WITH
| 155 -> FORMAT_VALUE
| 156 -> BUILD_CONST_KEY_MAP
| 157 -> BUILD_STRING
| 158 -> BUILD_TUPLE_UNPACK_WITH_CALL
| 160 -> LOAD_METHOD
| 161 -> CALL_METHOD
| 250 -> ENTER_FINALLY
| i -> Printf.failwithf "unknown opcode %d" i ()
end
type opcode_with_arg =
{ opcode : Opcode.t
; arg : int
; lineno : int
}
[@@deriving sexp]
type 'const t =
{ opcodes : opcode_with_arg array
; consts : 'const array
; varnames : string array
; names : string array
; filename : string
}
[@@deriving sexp]
let cmpop_of_int = function
| 0 -> Ast.Lt
| 1 -> LtE
| 2 -> Eq
| 3 -> NotEq
| 4 -> Gt
| 5 -> GtE
| 6 -> In
| 7 -> NotIn
| 8 -> Is
| 9 -> IsNot
| id -> errorf "unknown comparison id %d" id
let int_of_cmpop = function
| Ast.Lt -> 0
| LtE -> 1
| Eq -> 2
| NotEq -> 3
| Gt -> 4
| GtE -> 5
| In -> 6
| NotIn -> 7
| Is -> 8
| IsNot -> 9
| null | https://raw.githubusercontent.com/LaurentMazare/ocaml-minipy/e83d4bfad55819a27195109d401437faa0f65f69/src/bc_code.ml | ocaml | open Base
open Import
module Opcode = struct
type t =
| POP_TOP
| ROT_TWO
| ROT_THREE
| DUP_TOP
| DUP_TOP_TWO
| NOP
| UNARY_POSITIVE
| UNARY_NEGATIVE
| UNARY_NOT
| UNARY_INVERT
| BINARY_MATRIX_MULTIPLY
| INPLACE_MATRIX_MULTIPLY
| BINARY_POWER
| BINARY_MULTIPLY
| BINARY_MODULO
| BINARY_ADD
| BINARY_SUBTRACT
| BINARY_SUBSCR
| BINARY_FLOOR_DIVIDE
| BINARY_TRUE_DIVIDE
| INPLACE_FLOOR_DIVIDE
| INPLACE_TRUE_DIVIDE
| GET_AITER
| GET_ANEXT
| BEFORE_ASYNC_WITH
| INPLACE_ADD
| INPLACE_SUBTRACT
| INPLACE_MULTIPLY
| INPLACE_MODULO
| STORE_SUBSCR
| DELETE_SUBSCR
| BINARY_LSHIFT
| BINARY_RSHIFT
| BINARY_AND
| BINARY_XOR
| BINARY_OR
| INPLACE_POWER
| GET_ITER
| GET_YIELD_FROM_ITER
| PRINT_EXPR
| LOAD_BUILD_CLASS
| YIELD_FROM
| GET_AWAITABLE
| INPLACE_LSHIFT
| INPLACE_RSHIFT
| INPLACE_AND
| INPLACE_XOR
| INPLACE_OR
| BREAK_LOOP
| WITH_CLEANUP_START
| WITH_CLEANUP_FINISH
| RETURN_VALUE
| IMPORT_STAR
| SETUP_ANNOTATIONS
| YIELD_VALUE
| POP_BLOCK
| END_FINALLY
| POP_EXCEPT
| STORE_NAME
| DELETE_NAME
| UNPACK_SEQUENCE
| FOR_ITER
| UNPACK_EX
| STORE_ATTR
| DELETE_ATTR
| STORE_GLOBAL
| DELETE_GLOBAL
| LOAD_CONST
| LOAD_NAME
| BUILD_TUPLE
| BUILD_LIST
| BUILD_SET
| BUILD_MAP
| LOAD_ATTR
| COMPARE_OP
| IMPORT_NAME
| IMPORT_FROM
| JUMP_FORWARD
| JUMP_IF_FALSE_OR_POP
| JUMP_IF_TRUE_OR_POP
| JUMP_ABSOLUTE
| POP_JUMP_IF_FALSE
| POP_JUMP_IF_TRUE
| LOAD_GLOBAL
| CONTINUE_LOOP
| SETUP_LOOP
| SETUP_EXCEPT
| SETUP_FINALLY
| LOAD_FAST
| STORE_FAST
| DELETE_FAST
| RAISE_VARARGS
| CALL_FUNCTION
| MAKE_FUNCTION
| BUILD_SLICE
| LOAD_CLOSURE
| LOAD_DEREF
| STORE_DEREF
| DELETE_DEREF
| CALL_FUNCTION_KW
| CALL_FUNCTION_EX
| SETUP_WITH
| EXTENDED_ARG
| LIST_APPEND
| SET_ADD
| MAP_ADD
| LOAD_CLASSDEREF
| BUILD_LIST_UNPACK
| BUILD_MAP_UNPACK
| BUILD_MAP_UNPACK_WITH_CALL
| BUILD_TUPLE_UNPACK
| BUILD_SET_UNPACK
| SETUP_ASYNC_WITH
| FORMAT_VALUE
| BUILD_CONST_KEY_MAP
| BUILD_STRING
| BUILD_TUPLE_UNPACK_WITH_CALL
| LOAD_METHOD
| CALL_METHOD
borrowed from RustPython
[@@deriving sexp]
let of_int opcode =
match opcode with
| 1 -> POP_TOP
| 2 -> ROT_TWO
| 3 -> ROT_THREE
| 4 -> DUP_TOP
| 5 -> DUP_TOP_TWO
| 9 -> NOP
| 10 -> UNARY_POSITIVE
| 11 -> UNARY_NEGATIVE
| 12 -> UNARY_NOT
| 15 -> UNARY_INVERT
| 16 -> BINARY_MATRIX_MULTIPLY
| 17 -> INPLACE_MATRIX_MULTIPLY
| 19 -> BINARY_POWER
| 20 -> BINARY_MULTIPLY
| 22 -> BINARY_MODULO
| 23 -> BINARY_ADD
| 24 -> BINARY_SUBTRACT
| 25 -> BINARY_SUBSCR
| 26 -> BINARY_FLOOR_DIVIDE
| 27 -> BINARY_TRUE_DIVIDE
| 28 -> INPLACE_FLOOR_DIVIDE
| 29 -> INPLACE_TRUE_DIVIDE
| 50 -> GET_AITER
| 51 -> GET_ANEXT
| 52 -> BEFORE_ASYNC_WITH
| 55 -> INPLACE_ADD
| 56 -> INPLACE_SUBTRACT
| 57 -> INPLACE_MULTIPLY
| 59 -> INPLACE_MODULO
| 60 -> STORE_SUBSCR
| 61 -> DELETE_SUBSCR
| 62 -> BINARY_LSHIFT
| 63 -> BINARY_RSHIFT
| 64 -> BINARY_AND
| 65 -> BINARY_XOR
| 66 -> BINARY_OR
| 67 -> INPLACE_POWER
| 68 -> GET_ITER
| 69 -> GET_YIELD_FROM_ITER
| 70 -> PRINT_EXPR
| 71 -> LOAD_BUILD_CLASS
| 72 -> YIELD_FROM
| 73 -> GET_AWAITABLE
| 75 -> INPLACE_LSHIFT
| 76 -> INPLACE_RSHIFT
| 77 -> INPLACE_AND
| 78 -> INPLACE_XOR
| 79 -> INPLACE_OR
| 80 -> BREAK_LOOP
| 81 -> WITH_CLEANUP_START
| 82 -> WITH_CLEANUP_FINISH
| 83 -> RETURN_VALUE
| 84 -> IMPORT_STAR
| 85 -> SETUP_ANNOTATIONS
| 86 -> YIELD_VALUE
| 87 -> POP_BLOCK
| 88 -> END_FINALLY
| 89 -> POP_EXCEPT
| 90 -> STORE_NAME
| 91 -> DELETE_NAME
| 92 -> UNPACK_SEQUENCE
| 93 -> FOR_ITER
| 94 -> UNPACK_EX
| 95 -> STORE_ATTR
| 96 -> DELETE_ATTR
| 97 -> STORE_GLOBAL
| 98 -> DELETE_GLOBAL
| 100 -> LOAD_CONST
| 101 -> LOAD_NAME
| 102 -> BUILD_TUPLE
| 103 -> BUILD_LIST
| 104 -> BUILD_SET
| 105 -> BUILD_MAP
| 106 -> LOAD_ATTR
| 107 -> COMPARE_OP
| 108 -> IMPORT_NAME
| 109 -> IMPORT_FROM
| 110 -> JUMP_FORWARD
| 111 -> JUMP_IF_FALSE_OR_POP
| 112 -> JUMP_IF_TRUE_OR_POP
| 113 -> JUMP_ABSOLUTE
| 114 -> POP_JUMP_IF_FALSE
| 115 -> POP_JUMP_IF_TRUE
| 116 -> LOAD_GLOBAL
| 119 -> CONTINUE_LOOP
| 120 -> SETUP_LOOP
| 121 -> SETUP_EXCEPT
| 122 -> SETUP_FINALLY
| 124 -> LOAD_FAST
| 125 -> STORE_FAST
| 126 -> DELETE_FAST
| 130 -> RAISE_VARARGS
| 131 -> CALL_FUNCTION
| 132 -> MAKE_FUNCTION
| 133 -> BUILD_SLICE
| 135 -> LOAD_CLOSURE
| 136 -> LOAD_DEREF
| 137 -> STORE_DEREF
| 138 -> DELETE_DEREF
| 141 -> CALL_FUNCTION_KW
| 142 -> CALL_FUNCTION_EX
| 143 -> SETUP_WITH
| 144 -> EXTENDED_ARG
| 145 -> LIST_APPEND
| 146 -> SET_ADD
| 147 -> MAP_ADD
| 148 -> LOAD_CLASSDEREF
| 149 -> BUILD_LIST_UNPACK
| 150 -> BUILD_MAP_UNPACK
| 151 -> BUILD_MAP_UNPACK_WITH_CALL
| 152 -> BUILD_TUPLE_UNPACK
| 153 -> BUILD_SET_UNPACK
| 154 -> SETUP_ASYNC_WITH
| 155 -> FORMAT_VALUE
| 156 -> BUILD_CONST_KEY_MAP
| 157 -> BUILD_STRING
| 158 -> BUILD_TUPLE_UNPACK_WITH_CALL
| 160 -> LOAD_METHOD
| 161 -> CALL_METHOD
| 250 -> ENTER_FINALLY
| i -> Printf.failwithf "unknown opcode %d" i ()
end
type opcode_with_arg =
{ opcode : Opcode.t
; arg : int
; lineno : int
}
[@@deriving sexp]
type 'const t =
{ opcodes : opcode_with_arg array
; consts : 'const array
; varnames : string array
; names : string array
; filename : string
}
[@@deriving sexp]
let cmpop_of_int = function
| 0 -> Ast.Lt
| 1 -> LtE
| 2 -> Eq
| 3 -> NotEq
| 4 -> Gt
| 5 -> GtE
| 6 -> In
| 7 -> NotIn
| 8 -> Is
| 9 -> IsNot
| id -> errorf "unknown comparison id %d" id
let int_of_cmpop = function
| Ast.Lt -> 0
| LtE -> 1
| Eq -> 2
| NotEq -> 3
| Gt -> 4
| GtE -> 5
| In -> 6
| NotIn -> 7
| Is -> 8
| IsNot -> 9
| |
64597c47e99d31d59cc8544c9d086fedf5e98327fea1883e3af81a7495b4e56f | polysemy-research/polysemy | Common.hs | {-# LANGUAGE CPP #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
{-# LANGUAGE ViewPatterns #-}
# OPTIONS_HADDOCK not - home , prune #
-- | Description: TH utilities for generating effect constructors
module Polysemy.Internal.TH.Common
( ConLiftInfo (..)
, getEffectMetadata
, makeMemberConstraint
, makeMemberConstraint'
, makeSemType
, makeInterpreterType
, makeEffectType
, makeUnambiguousSend
, checkExtensions
, foldArrowTs
, splitArrowTs
, pattern (:->)
) where
import Control.Arrow ((>>>))
import Control.Monad
import Data.Bifunctor
import Data.Char (toLower)
import Data.Generics hiding (Fixity)
import Data.List
import qualified Data.Map.Strict as M
import Data.Tuple
import Language.Haskell.TH
import Language.Haskell.TH.Datatype
import Language.Haskell.TH.PprLib
import Polysemy.Internal (Sem, send)
import Polysemy.Internal.Union (Member)
#if __GLASGOW_HASKELL__ >= 804
import Prelude hiding ((<>))
#endif
------------------------------------------------------------------------------
-- Effects TH ----------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Info about constructor being lifted ; use ' ' to create one .
data ConLiftInfo = CLInfo
{ -- | Name of effect's type constructor
cliEffName :: Name
, -- | Effect-specific type arguments
cliEffArgs :: [Type]
, -- | Result type specific to action
cliEffRes :: Type
, -- | Name of action constructor
cliConName :: Name
, -- | Name of final function
cliFunName :: Name
, -- | Fixity of function used as an operator
cliFunFixity :: Maybe Fixity
, -- | Final function arguments
cliFunArgs :: [(Name, Type)]
, -- | Constraints of final function
cliFunCxt :: Cxt
, -- | Name of type variable parameterizing 'Sem'
cliUnionName :: Name
} deriving Show
------------------------------------------------------------------------------
-- | Given an name of datatype or some of it's constructors/fields, return
-- datatype's name together with info about it's constructors.
getEffectMetadata :: Name -> Q [ConLiftInfo]
getEffectMetadata type_name = do
dt_info <- reifyDatatype type_name
cl_infos <- traverse makeCLInfo $ constructorName <$> datatypeCons dt_info
pure cl_infos
------------------------------------------------------------------------------
-- | Creates name of lifting function from action name.
liftFunNameFromCon :: Name -> Name
liftFunNameFromCon n = mkName $
case nameBase n of
':' : cs -> cs
c : cs -> toLower c : cs
"" -> error "liftFunNameFromCon: empty constructor name"
------------------------------------------------------------------------------
-- | Creates info about smart constructor being created from name of the
-- original one.
makeCLInfo :: Name -> Q ConLiftInfo
makeCLInfo cliConName = do
(con_type, cliEffName) <- reify cliConName >>= \case
DataConI _ t p -> pure (t, p)
_ -> notDataCon cliConName
let (con_args, [con_return_type]) = splitAtEnd 1
$ splitArrowTs con_type
(ty_con_args, [monad_arg, res_arg]) <-
case splitAtEnd 2 $ tail $ splitAppTs $ con_return_type of
r@(_, [_, _]) -> pure r
_ -> missingEffArgs cliEffName
monad_name <- maybe (argNotVar cliEffName monad_arg)
pure
(tVarName monad_arg)
cliUnionName <- newName "r"
let normalize_types :: (TypeSubstitution t, Data t) => t -> t
normalize_types = replaceMArg monad_name cliUnionName
. simplifyKinds
cliEffArgs = normalize_types ty_con_args
cliEffRes = normalize_types res_arg
cliFunName = liftFunNameFromCon cliConName
cliFunFixity <- reifyFixity cliConName
fun_arg_names <- replicateM (length con_args) $ newName "x"
let cliFunArgs = zip fun_arg_names $ normalize_types con_args
GADTs seem to forbid constraints further in signature , so top level
-- ones should be fine.
cliFunCxt = topLevelConstraints con_type
pure CLInfo{..}
------------------------------------------------------------------------------
-- | Given a 'ConLiftInfo', get the corresponding effect type.
makeEffectType :: ConLiftInfo -> Type
makeEffectType cli = foldl' AppT (ConT $ cliEffName cli) $ cliEffArgs cli
------------------------------------------------------------------------------
-- | @'makeInterpreterType' con r a@ will produce a @'Polysemy.Sem' (Effect ':
r ) a - > ' Polysemy . Sem ' r a@ type , where @Effect@ is the effect
corresponding to the ' ConLiftInfo ' for
makeInterpreterType :: ConLiftInfo -> Name -> Type -> Type
makeInterpreterType cli r result = sem_with_eff :-> makeSemType r result where
sem_with_eff = ConT ''Sem `AppT` r_with_eff `AppT` result
r_with_eff = PromotedConsT `AppT` makeEffectType cli `AppT` VarT r
------------------------------------------------------------------------------
| Turn a ' ConLiftInfo ' for @Foo@ into a r@ constraint .
makeMemberConstraint :: Name -> ConLiftInfo -> Pred
makeMemberConstraint r cli = makeMemberConstraint' r $ makeEffectType cli
------------------------------------------------------------------------------
| @'makeMemberConstraint '' r type@ will produce a @Member type r@
-- constraint.
makeMemberConstraint' :: Name -> Type -> Pred
makeMemberConstraint' r eff = classPred ''Member [eff, VarT r]
------------------------------------------------------------------------------
-- | @'makeSemType' r a@ will produce a @'Polysemy.Sem' r a@ type.
makeSemType :: Name -> Type -> Type
makeSemType r result = ConT ''Sem `AppT` VarT r `AppT` result
------------------------------------------------------------------------------
-- | Given a 'ConLiftInfo', this will produce an action for it. It's arguments
-- will come from any variables in scope that correspond to the 'cliEffArgs'
-- of the 'ConLiftInfo'.
makeUnambiguousSend :: Bool -> ConLiftInfo -> Exp
makeUnambiguousSend should_make_sigs cli =
let fun_args_names = fst <$> cliFunArgs cli
action = foldl1' AppE
$ ConE (cliConName cli) : (VarE <$> fun_args_names)
eff = foldl' AppT (ConT $ cliEffName cli) $ args
see NOTE(makeSem _ )
args = (if should_make_sigs then id else map capturableTVars)
$ cliEffArgs cli ++ [sem, cliEffRes cli]
sem = ConT ''Sem `AppT` VarT (cliUnionName cli)
in AppE (VarE 'send) $ SigE action eff
-- Error messages and checks -------------------------------------------------
argNotVar :: Name -> Type -> Q a
argNotVar eff_name arg = fail $ show
$ text "Argument ‘" <> ppr arg <> text "’ in effect ‘" <> ppr eff_name
<> text "’ is not a type variable"
-- | Fail the 'Q' monad whenever the given 'Extension's aren't enabled in the
-- current module.
checkExtensions :: [Extension] -> Q ()
checkExtensions exts = do
states <- zip exts <$> traverse isExtEnabled exts
maybe (pure ())
(\(ext, _) -> fail $ show
$ char '‘' <> text (show ext) <> char '’'
<+> text "extension needs to be enabled for Polysemy's Template Haskell to work")
(find (not . snd) states)
missingEffArgs :: Name -> Q a
missingEffArgs name = fail $ show
$ text "Effect ‘" <> ppr name
<> text "’ has not enough type arguments"
$+$ nest 4
( text "At least monad and result argument are required, e.g.:"
$+$ nest 4
( text ""
$+$ ppr (DataD [] base args Nothing [] []) <+> text "..."
$+$ text ""
)
)
where
base = capturableBase name
#if MIN_VERSION_template_haskell(2,17,0)
args = flip PlainTV () . mkName <$> ["m", "a"]
#else
args = PlainTV . mkName <$> ["m", "a"]
#endif
notDataCon :: Name -> Q a
notDataCon name = fail $ show
$ char '‘' <> ppr name <> text "’ is not a data constructor"
------------------------------------------------------------------------------
-- TH utilities --------------------------------------------------------------
------------------------------------------------------------------------------
arrows :: Type -> Bool
arrows = \case
ArrowT -> True
#if MIN_VERSION_template_haskell(2,17,0)
AppT MulArrowT _ -> True
#endif
_ -> False
------------------------------------------------------------------------------
-- | Pattern constructing function type and matching on one that may contain
-- type annotations on arrow itself.
infixr 1 :->
pattern (:->) :: Type -> Type -> Type
pattern a :-> b <- (arrows . removeTyAnns -> True) `AppT` a `AppT` b where
a :-> b = ArrowT `AppT` a `AppT` b
------------------------------------------------------------------------------
-- | Constructs capturable name from base of input name.
capturableBase :: Name -> Name
capturableBase = mkName . nameBase
------------------------------------------------------------------------------
-- | Converts names of all type variables in type to capturable ones based on
-- original name base. Use with caution, may create name conflicts!
capturableTVars :: Type -> Type
capturableTVars = everywhere $ mkT $ \case
VarT n -> VarT $ capturableBase n
ForallT bs cs t -> ForallT (goBndr <$> bs) (capturableTVars <$> cs) t
where
#if MIN_VERSION_template_haskell(2,17,0)
goBndr (PlainTV n flag) = PlainTV (capturableBase n) flag
goBndr (KindedTV n flag k) = KindedTV (capturableBase n) flag $ capturableTVars k
#else
goBndr (PlainTV n ) = PlainTV $ capturableBase n
goBndr (KindedTV n k) = KindedTV (capturableBase n) $ capturableTVars k
#endif
t -> t
------------------------------------------------------------------------------
-- | Folds a list of 'Type's into a right-associative arrow 'Type'.
foldArrowTs :: Type -> [Type] -> Type
foldArrowTs = foldr (:->)
------------------------------------------------------------------------------
-- | Replaces use of @m@ in type with @Sem r@.
replaceMArg :: TypeSubstitution t => Name -> Name -> t -> t
replaceMArg m r = applySubstitution $ M.singleton m $ ConT ''Sem `AppT` VarT r
------------------------------------------------------------------------------
-- Removes 'Type' and variable kind signatures from type.
simplifyKinds :: Data t => t -> t
simplifyKinds = everywhere $ mkT $ \case
SigT t StarT -> t
SigT t VarT{} -> t
ForallT bs cs t -> ForallT (goBndr <$> bs) (simplifyKinds <$> cs) t
where
#if MIN_VERSION_template_haskell(2,17,0)
goBndr (KindedTV n flag StarT) = PlainTV n flag
goBndr (KindedTV n flag VarT{}) = PlainTV n flag
#else
goBndr (KindedTV n StarT) = PlainTV n
goBndr (KindedTV n VarT{}) = PlainTV n
#endif
goBndr b = b
t -> t
------------------------------------------------------------------------------
splitAppTs :: Type -> [Type]
splitAppTs = removeTyAnns >>> \case
t `AppT` arg -> splitAppTs t ++ [arg]
t -> [t]
------------------------------------------------------------------------------
splitArrowTs :: Type -> [Type]
splitArrowTs = removeTyAnns >>> \case
t :-> ts -> t : splitArrowTs ts
t -> [t]
------------------------------------------------------------------------------
-- | Extracts name from type variable (possibly nested in signature and/or
-- some context), returns 'Nothing' otherwise.
tVarName :: Type -> Maybe Name
tVarName = removeTyAnns >>> \case
VarT n -> Just n
_ -> Nothing
------------------------------------------------------------------------------
topLevelConstraints :: Type -> Cxt
topLevelConstraints = \case
ForallT _ cs _ -> cs
_ -> []
------------------------------------------------------------------------------
removeTyAnns :: Type -> Type
removeTyAnns = \case
ForallT _ _ t -> removeTyAnns t
SigT t _ -> removeTyAnns t
ParensT t -> removeTyAnns t
t -> t
------------------------------------------------------------------------------
-- Miscellaneous -------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | 'splitAt' counting from the end.
splitAtEnd :: Int -> [a] -> ([a], [a])
splitAtEnd n = swap . join bimap reverse . splitAt n . reverse
| null | https://raw.githubusercontent.com/polysemy-research/polysemy/10a1336f32438c2d308313f8ad55ac736145390f/src/Polysemy/Internal/TH/Common.hs | haskell | # LANGUAGE CPP #
# LANGUAGE ViewPatterns #
| Description: TH utilities for generating effect constructors
----------------------------------------------------------------------------
Effects TH ----------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Name of effect's type constructor
| Effect-specific type arguments
| Result type specific to action
| Name of action constructor
| Name of final function
| Fixity of function used as an operator
| Final function arguments
| Constraints of final function
| Name of type variable parameterizing 'Sem'
----------------------------------------------------------------------------
| Given an name of datatype or some of it's constructors/fields, return
datatype's name together with info about it's constructors.
----------------------------------------------------------------------------
| Creates name of lifting function from action name.
----------------------------------------------------------------------------
| Creates info about smart constructor being created from name of the
original one.
ones should be fine.
----------------------------------------------------------------------------
| Given a 'ConLiftInfo', get the corresponding effect type.
----------------------------------------------------------------------------
| @'makeInterpreterType' con r a@ will produce a @'Polysemy.Sem' (Effect ':
----------------------------------------------------------------------------
----------------------------------------------------------------------------
constraint.
----------------------------------------------------------------------------
| @'makeSemType' r a@ will produce a @'Polysemy.Sem' r a@ type.
----------------------------------------------------------------------------
| Given a 'ConLiftInfo', this will produce an action for it. It's arguments
will come from any variables in scope that correspond to the 'cliEffArgs'
of the 'ConLiftInfo'.
Error messages and checks -------------------------------------------------
| Fail the 'Q' monad whenever the given 'Extension's aren't enabled in the
current module.
----------------------------------------------------------------------------
TH utilities --------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Pattern constructing function type and matching on one that may contain
type annotations on arrow itself.
----------------------------------------------------------------------------
| Constructs capturable name from base of input name.
----------------------------------------------------------------------------
| Converts names of all type variables in type to capturable ones based on
original name base. Use with caution, may create name conflicts!
----------------------------------------------------------------------------
| Folds a list of 'Type's into a right-associative arrow 'Type'.
----------------------------------------------------------------------------
| Replaces use of @m@ in type with @Sem r@.
----------------------------------------------------------------------------
Removes 'Type' and variable kind signatures from type.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Extracts name from type variable (possibly nested in signature and/or
some context), returns 'Nothing' otherwise.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Miscellaneous -------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| 'splitAt' counting from the end. | # LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# OPTIONS_HADDOCK not - home , prune #
module Polysemy.Internal.TH.Common
( ConLiftInfo (..)
, getEffectMetadata
, makeMemberConstraint
, makeMemberConstraint'
, makeSemType
, makeInterpreterType
, makeEffectType
, makeUnambiguousSend
, checkExtensions
, foldArrowTs
, splitArrowTs
, pattern (:->)
) where
import Control.Arrow ((>>>))
import Control.Monad
import Data.Bifunctor
import Data.Char (toLower)
import Data.Generics hiding (Fixity)
import Data.List
import qualified Data.Map.Strict as M
import Data.Tuple
import Language.Haskell.TH
import Language.Haskell.TH.Datatype
import Language.Haskell.TH.PprLib
import Polysemy.Internal (Sem, send)
import Polysemy.Internal.Union (Member)
#if __GLASGOW_HASKELL__ >= 804
import Prelude hiding ((<>))
#endif
| Info about constructor being lifted ; use ' ' to create one .
data ConLiftInfo = CLInfo
cliEffName :: Name
cliEffArgs :: [Type]
cliEffRes :: Type
cliConName :: Name
cliFunName :: Name
cliFunFixity :: Maybe Fixity
cliFunArgs :: [(Name, Type)]
cliFunCxt :: Cxt
cliUnionName :: Name
} deriving Show
getEffectMetadata :: Name -> Q [ConLiftInfo]
getEffectMetadata type_name = do
dt_info <- reifyDatatype type_name
cl_infos <- traverse makeCLInfo $ constructorName <$> datatypeCons dt_info
pure cl_infos
liftFunNameFromCon :: Name -> Name
liftFunNameFromCon n = mkName $
case nameBase n of
':' : cs -> cs
c : cs -> toLower c : cs
"" -> error "liftFunNameFromCon: empty constructor name"
makeCLInfo :: Name -> Q ConLiftInfo
makeCLInfo cliConName = do
(con_type, cliEffName) <- reify cliConName >>= \case
DataConI _ t p -> pure (t, p)
_ -> notDataCon cliConName
let (con_args, [con_return_type]) = splitAtEnd 1
$ splitArrowTs con_type
(ty_con_args, [monad_arg, res_arg]) <-
case splitAtEnd 2 $ tail $ splitAppTs $ con_return_type of
r@(_, [_, _]) -> pure r
_ -> missingEffArgs cliEffName
monad_name <- maybe (argNotVar cliEffName monad_arg)
pure
(tVarName monad_arg)
cliUnionName <- newName "r"
let normalize_types :: (TypeSubstitution t, Data t) => t -> t
normalize_types = replaceMArg monad_name cliUnionName
. simplifyKinds
cliEffArgs = normalize_types ty_con_args
cliEffRes = normalize_types res_arg
cliFunName = liftFunNameFromCon cliConName
cliFunFixity <- reifyFixity cliConName
fun_arg_names <- replicateM (length con_args) $ newName "x"
let cliFunArgs = zip fun_arg_names $ normalize_types con_args
GADTs seem to forbid constraints further in signature , so top level
cliFunCxt = topLevelConstraints con_type
pure CLInfo{..}
makeEffectType :: ConLiftInfo -> Type
makeEffectType cli = foldl' AppT (ConT $ cliEffName cli) $ cliEffArgs cli
r ) a - > ' Polysemy . Sem ' r a@ type , where @Effect@ is the effect
corresponding to the ' ConLiftInfo ' for
makeInterpreterType :: ConLiftInfo -> Name -> Type -> Type
makeInterpreterType cli r result = sem_with_eff :-> makeSemType r result where
sem_with_eff = ConT ''Sem `AppT` r_with_eff `AppT` result
r_with_eff = PromotedConsT `AppT` makeEffectType cli `AppT` VarT r
| Turn a ' ConLiftInfo ' for @Foo@ into a r@ constraint .
makeMemberConstraint :: Name -> ConLiftInfo -> Pred
makeMemberConstraint r cli = makeMemberConstraint' r $ makeEffectType cli
| @'makeMemberConstraint '' r type@ will produce a @Member type r@
makeMemberConstraint' :: Name -> Type -> Pred
makeMemberConstraint' r eff = classPred ''Member [eff, VarT r]
makeSemType :: Name -> Type -> Type
makeSemType r result = ConT ''Sem `AppT` VarT r `AppT` result
makeUnambiguousSend :: Bool -> ConLiftInfo -> Exp
makeUnambiguousSend should_make_sigs cli =
let fun_args_names = fst <$> cliFunArgs cli
action = foldl1' AppE
$ ConE (cliConName cli) : (VarE <$> fun_args_names)
eff = foldl' AppT (ConT $ cliEffName cli) $ args
see NOTE(makeSem _ )
args = (if should_make_sigs then id else map capturableTVars)
$ cliEffArgs cli ++ [sem, cliEffRes cli]
sem = ConT ''Sem `AppT` VarT (cliUnionName cli)
in AppE (VarE 'send) $ SigE action eff
argNotVar :: Name -> Type -> Q a
argNotVar eff_name arg = fail $ show
$ text "Argument ‘" <> ppr arg <> text "’ in effect ‘" <> ppr eff_name
<> text "’ is not a type variable"
checkExtensions :: [Extension] -> Q ()
checkExtensions exts = do
states <- zip exts <$> traverse isExtEnabled exts
maybe (pure ())
(\(ext, _) -> fail $ show
$ char '‘' <> text (show ext) <> char '’'
<+> text "extension needs to be enabled for Polysemy's Template Haskell to work")
(find (not . snd) states)
missingEffArgs :: Name -> Q a
missingEffArgs name = fail $ show
$ text "Effect ‘" <> ppr name
<> text "’ has not enough type arguments"
$+$ nest 4
( text "At least monad and result argument are required, e.g.:"
$+$ nest 4
( text ""
$+$ ppr (DataD [] base args Nothing [] []) <+> text "..."
$+$ text ""
)
)
where
base = capturableBase name
#if MIN_VERSION_template_haskell(2,17,0)
args = flip PlainTV () . mkName <$> ["m", "a"]
#else
args = PlainTV . mkName <$> ["m", "a"]
#endif
notDataCon :: Name -> Q a
notDataCon name = fail $ show
$ char '‘' <> ppr name <> text "’ is not a data constructor"
arrows :: Type -> Bool
arrows = \case
ArrowT -> True
#if MIN_VERSION_template_haskell(2,17,0)
AppT MulArrowT _ -> True
#endif
_ -> False
infixr 1 :->
pattern (:->) :: Type -> Type -> Type
pattern a :-> b <- (arrows . removeTyAnns -> True) `AppT` a `AppT` b where
a :-> b = ArrowT `AppT` a `AppT` b
capturableBase :: Name -> Name
capturableBase = mkName . nameBase
capturableTVars :: Type -> Type
capturableTVars = everywhere $ mkT $ \case
VarT n -> VarT $ capturableBase n
ForallT bs cs t -> ForallT (goBndr <$> bs) (capturableTVars <$> cs) t
where
#if MIN_VERSION_template_haskell(2,17,0)
goBndr (PlainTV n flag) = PlainTV (capturableBase n) flag
goBndr (KindedTV n flag k) = KindedTV (capturableBase n) flag $ capturableTVars k
#else
goBndr (PlainTV n ) = PlainTV $ capturableBase n
goBndr (KindedTV n k) = KindedTV (capturableBase n) $ capturableTVars k
#endif
t -> t
foldArrowTs :: Type -> [Type] -> Type
foldArrowTs = foldr (:->)
replaceMArg :: TypeSubstitution t => Name -> Name -> t -> t
replaceMArg m r = applySubstitution $ M.singleton m $ ConT ''Sem `AppT` VarT r
simplifyKinds :: Data t => t -> t
simplifyKinds = everywhere $ mkT $ \case
SigT t StarT -> t
SigT t VarT{} -> t
ForallT bs cs t -> ForallT (goBndr <$> bs) (simplifyKinds <$> cs) t
where
#if MIN_VERSION_template_haskell(2,17,0)
goBndr (KindedTV n flag StarT) = PlainTV n flag
goBndr (KindedTV n flag VarT{}) = PlainTV n flag
#else
goBndr (KindedTV n StarT) = PlainTV n
goBndr (KindedTV n VarT{}) = PlainTV n
#endif
goBndr b = b
t -> t
splitAppTs :: Type -> [Type]
splitAppTs = removeTyAnns >>> \case
t `AppT` arg -> splitAppTs t ++ [arg]
t -> [t]
splitArrowTs :: Type -> [Type]
splitArrowTs = removeTyAnns >>> \case
t :-> ts -> t : splitArrowTs ts
t -> [t]
tVarName :: Type -> Maybe Name
tVarName = removeTyAnns >>> \case
VarT n -> Just n
_ -> Nothing
topLevelConstraints :: Type -> Cxt
topLevelConstraints = \case
ForallT _ cs _ -> cs
_ -> []
removeTyAnns :: Type -> Type
removeTyAnns = \case
ForallT _ _ t -> removeTyAnns t
SigT t _ -> removeTyAnns t
ParensT t -> removeTyAnns t
t -> t
splitAtEnd :: Int -> [a] -> ([a], [a])
splitAtEnd n = swap . join bimap reverse . splitAt n . reverse
|
790f2f94c6b883be1f7c7d4925c38f4e18534cbf715fcaa68427ff8fdfead288 | zotonic/zotonic | zotonic_filehandler_mappers.erl | @author < >
2014 - 2017
%%
%% @doc Handle changed files
Copyright 2014 - 2017
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(zotonic_filehandler_mappers).
-author("Arjan Scherpenisse <>").
-export([
mappers/0
]).
%% Actions, do not use from other modules.
-export([
compile_yecc/1,
compile_sass/2,
compile_coffee/2,
compile_less/2,
run_build/2
]).
-include_lib("zotonic_notifier/include/zotonic_notifier.hrl").
-include_lib("kernel/include/logger.hrl").
-spec mappers() -> [ function() ].
mappers() ->
Builtin = [
fun drop_dirs/7,
fun temp_beam/7,
fun beam_file/7,
fun app_file/7,
fun header_file/7,
fun erlang_file/7,
fun yecc/7,
fun lib_src_build/7,
fun sass_file/7,
fun coffee_file/7,
fun less_file/7
],
zotonic_notifier:foldl(
?SYSTEM_NOTIFIER, zotonic_filehandler_mappers,
zotonic_filehandler_mappers, Builtin,
undefined).
%% ------------------------------ Map files to actions ------------------------------
drop_dirs(delete, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false;
drop_dirs(_Verb, _Application, _What, _Ext, _Root, _Split, Filename) ->
case filelib:is_dir(Filename) of
true -> ok;
false -> false
end.
%% @doc Recompile Erlang files on the fly
temp_beam(_Verb, _Application, _What, <<".bea#">>, _Root, _Split, _Filename) ->
ok;
temp_beam(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
beam_file(delete, _Application, _What, <<".beam">>, _Root, _Split, _Filename) ->
ok;
beam_file(create, _Application, {ebin, _EbinFile}, <<".beam">>, Root, _Split, _Filename) ->
case is_indexed_beam_file(Root) of
true ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]},
{z_module_indexer, reindex, []}
]};
false ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]}
]}
end;
beam_file(_Verb, _Application, {ebin, _EbinFile}, <<".beam">>, Root, _Split, _Filename) ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]}
]};
beam_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
is_indexed_beam_file(<<"mod_", _/binary>>) -> true;
is_indexed_beam_file(<<"m_", _/binary>>) -> true;
is_indexed_beam_file(<<"scomp_", _/binary>>) -> true;
is_indexed_beam_file(<<"filter_", _/binary>>) -> true;
is_indexed_beam_file(_) -> false.
@doc Check for newly created / added Erlang applications
app_file(create, _Application, {app, _AppFile}, <<".app">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, code_path_check, [Filename]}
]};
app_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
header_file(delete, _Basename, _What, <<".hrl">>, _Root, _Split, _Filename) ->
ok;
header_file(_Verb, _Basename, _What, <<".hrl">>, _Root, _Split, _Filename) ->
{ok, [
{zotonic_filehandler_compile, all, []}
]};
header_file(_Verb, _Basename, _What, _Ext, _Root, _Split, _Filename) ->
false.
erlang_file(delete, _Application, _What, <<".erl">>, _Root, _Split, _Filename) ->
% Should delete the beam file
ok;
erlang_file(_Verb, _Application, {src, _Path}, <<".erl">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, recompile, [Filename]}
]};
erlang_file(_Verb, _Application, {test, _Path}, <<".erl">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, recompile, [Filename]}
]};
erlang_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
yecc(delete, _Application, _What, <<".yrl">>, _Root, _Split, _Filename) ->
% Should delete the erlang file
ok;
yecc(_Verb, _Application, {src, _Path}, <<".yrl">>, _Root, _Split, Filename) ->
{ok, [
{?MODULE, compile_yecc, [Filename]}
]};
yecc(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
lib_src_build(_Verb, Application, {priv, <<"lib-src">>, Path}, _Ext, _Root, _Split, _Filename) ->
case build_command(Application, Path) of
{ok, BuildCmd} ->
{ok, [
{?MODULE, run_build, [Application, BuildCmd]}
]};
false ->
false
end;
lib_src_build(_Verb, _Application, _Path, _Ext, _Root, _Split, _Filename) ->
false.
sass_file(delete, _Application, _What, Ext, _Root, _Split, _Filename)
when Ext =:= <<".sass">>; Ext =:= <<".scss">> ->
% Should delete the css file
ok;
sass_file(_Verb, Application, {priv, <<"lib-src">>, Path}, Ext, _Root, _Split, _Filename)
when Ext =:= <<".sass">>; Ext =:= <<".scss">> ->
{ok, [
{?MODULE, compile_sass, [Application, Path]}
]};
sass_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
less_file(delete, _Application, _What, <<".less">>, _Root, _Split, _Filename) ->
% Should delete the css file
ok;
less_file(_Verb, Application, {priv, <<"lib-src">>, Path}, <<".less">>, _Root, _Split, _Filename) ->
{ok, [
{?MODULE, compile_less, [Application, Path]}
]};
less_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
coffee_file(delete, _Application, _What, <<".coffee">>, _Root, _Split, _Filename) ->
% Should delete the js file
ok;
coffee_file(_Verb, Application, {priv, <<"lib-src">>, Path}, <<".coffee">>, _Root, _Split, _Filename) ->
{ok, [
{?MODULE, compile_coffee, [Application, Path]}
]};
coffee_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
%% ------------------------------ Action callbacks -----------------------------------
compile_yecc(Filename) ->
% "Rebuilding yecc file: " ++ filename:basename(Filename);
InPath = unicode:characters_to_list(Filename, utf8),
OutPath = filename:rootname(InPath) ++ ".erl",
case is_newer(InPath, OutPath) of
true ->
zotonic_filehandler:terminal_notifier("Yecc: " ++ filename:basename(InPath)),
Cmd = "erlc -o "
++ z_filelib:os_escape(filename:dirname(InPath))
++ " "
++ z_filelib:os_escape(InPath),
zotonic_filehandler_compile:run_cmd(Cmd);
false ->
ok
end.
%% @doc SCSS / SASS files from priv/lib-src/css/.../foo.sass -> priv/lib/css/.../foo.css
compile_sass(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
SrcFile = filename:join([ AppPriv, "lib-src", SrcPath]),
SassExt = z_convert:to_list( filename:extension(SrcPath) ),
MainScss = case filename:basename(SrcPath) of
<<"_", _/binary>> ->
find_main_sass_files(AppPriv, filename:dirname(SrcPath), SassExt);
<<_/binary>> ->
[ SrcPath ]
end,
lists:map(
fun(MainFile) ->
InFile = filename:join([ AppPriv, "lib-src", MainFile]),
OutPath = filename:join([ AppPriv, "lib", MainFile]),
OutFile = iolist_to_binary([ filename:rootname(OutPath), ".css" ]),
case is_newer(SrcFile, OutFile) of
true ->
case z_filelib:ensure_dir(OutPath) of
ok ->
zotonic_filehandler:terminal_notifier("Sass: " ++ MainFile),
Cmd = [
sass_command(),
z_filelib:os_escape(InFile),
" ",
z_filelib:os_escape(OutFile)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end
end,
MainScss).
sass_command() ->
case os:find_executable("sassc") of
false ->
"sass --sourcemap=none --unix-newlines ";
_Sassc ->
"sassc --omit-map-comment "
end.
find_main_sass_files(AppPriv, SrcPath, SassExt) when is_binary(SrcPath) ->
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
{ok, Files} = file:list_dir(InPath),
MainScss = lists:filter(
fun
([$_|_]) -> false;
(E) -> lists:suffix(SassExt, E)
end,
Files),
case MainScss of
[] ->
case SrcPath of
<<".">> -> [];
_ -> find_main_sass_files(AppPriv, filename:dirname(SrcPath), SassExt)
end;
_ ->
lists:map(
fun(File) ->
filename:join(SrcPath, File)
end,
MainScss)
end.
%% @doc LESS files from priv/lib-src/css/.../foo.less -> priv/lib/css/.../foo.css
%% Check for a 'config' file on the path, if present then that file is used
%% for the compilation of the less files.
compile_less(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
DstPath = unicode:characters_to_list(filename:rootname(SrcPath)) ++ ".css",
OutPath = filename:join([ AppPriv, "lib", DstPath ]),
case is_newer(InPath, OutPath) of
true ->
zotonic_filehandler:terminal_notifier("Lessc: " ++ filename:basename(InPath)),
case z_filelib:ensure_dir(OutPath) of
ok ->
Cmd = [
"lessc ",
z_filelib:os_escape(InPath),
" > ",
z_filelib:os_escape(OutPath)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end.
@doc coffee files from priv / lib - src / js/ ... /foo.coffee - > priv / lib / js/ ...
compile_coffee(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
DstPath = unicode:characters_to_list(filename:rootname(SrcPath)) ++ ".js",
OutPath = filename:join([ AppPriv, "lib", DstPath ]),
case is_newer(InPath, OutPath) of
true ->
case z_filelib:ensure_dir(OutPath) of
ok ->
zotonic_filehandler:terminal_notifier("Coffee: " ++ filename:basename(InPath)),
Cmd = [
"coffee -o ",
z_filelib:os_escape(OutPath),
" -c ",
z_filelib:os_escape(InPath)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end.
%% @doc Run the build command in a lib-src directory
run_build(Application, {make, Makefile}) ->
zotonic_filehandler:terminal_notifier("Make: " ++ app_path(Application, Makefile)),
CmdOpts = [
{env, [
{"APP_DIR", code:lib_dir(Application)},
{"ZOTONIC_LIB", "1"}
]}
],
BuildDir = filename:dirname(Makefile),
MakeCmd = "cd " ++ z_filelib:os_escape(BuildDir) ++ "; sh -c make " ++ z_filelib:os_escape(Makefile),
zotonic_filehandler_compile:run_cmd(MakeCmd, CmdOpts, #{ ignore_dir => BuildDir }).
%% ---------------------------------------- Support routines ------------------------------
app_path(Application, BuildCmd) ->
AppB = atom_to_binary(Application, utf8),
case binary:split(BuildCmd, <<"/", AppB/binary, "/">>) of
[_, X] ->
unicode:characters_to_list(iolist_to_binary([ AppB, "/", X]));
[_] ->
unicode:characters_to_list(BuildCmd)
end.
is_newer(In, Out) ->
InMod = filelib:last_modified(In),
OutMod = filelib:last_modified(Out),
is_newer_1(InMod, OutMod).
is_newer_1(0, _) -> false;
is_newer_1(_, 0) -> true;
is_newer_1(A, B) -> A > B.
%% @doc Find a Makefile on the path to the changed lib-src file.
We look for a Makefile to build the targets in ' priv / lib ' . This Makefile will
%% be executed in the next step (after deduplication).
%% TODO: Support buildwatchers, maybe with "make watcher"
build_command(Application, SrcPath) ->
LibSrcDir = filename:join([code:priv_dir(Application), "lib-src"]),
case find_build(LibSrcDir, filename:split(filename:dirname(SrcPath))) of
{ok, {make, _Makefile} = BuildCmd} ->
{ok, BuildCmd};
false ->
false
end.
find_build(LibSrcDir, Dir) ->
case lists:member(<<"dist">>, Dir) of
true -> false;
false ->
Dirname = filename:join([LibSrcDir] ++ Dir),
Makefile = filename:join(Dirname, <<"Makefile">>),
case filelib:is_file(Makefile) of
true ->
{ok, {make, Makefile}};
false when Dir =/= [] ->
Up = lists:reverse(tl(lists:reverse(Dir))),
find_build(LibSrcDir, Up);
false ->
false
end
end.
| null | https://raw.githubusercontent.com/zotonic/zotonic/1bb4aa8a0688d007dd8ec8ba271546f658312da8/apps/zotonic_filehandler/src/zotonic_filehandler_mappers.erl | erlang |
@doc Handle changed files
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Actions, do not use from other modules.
------------------------------ Map files to actions ------------------------------
@doc Recompile Erlang files on the fly
Should delete the beam file
Should delete the erlang file
Should delete the css file
Should delete the css file
Should delete the js file
------------------------------ Action callbacks -----------------------------------
"Rebuilding yecc file: " ++ filename:basename(Filename);
@doc SCSS / SASS files from priv/lib-src/css/.../foo.sass -> priv/lib/css/.../foo.css
@doc LESS files from priv/lib-src/css/.../foo.less -> priv/lib/css/.../foo.css
Check for a 'config' file on the path, if present then that file is used
for the compilation of the less files.
@doc Run the build command in a lib-src directory
---------------------------------------- Support routines ------------------------------
@doc Find a Makefile on the path to the changed lib-src file.
be executed in the next step (after deduplication).
TODO: Support buildwatchers, maybe with "make watcher" | @author < >
2014 - 2017
Copyright 2014 - 2017
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(zotonic_filehandler_mappers).
-author("Arjan Scherpenisse <>").
-export([
mappers/0
]).
-export([
compile_yecc/1,
compile_sass/2,
compile_coffee/2,
compile_less/2,
run_build/2
]).
-include_lib("zotonic_notifier/include/zotonic_notifier.hrl").
-include_lib("kernel/include/logger.hrl").
-spec mappers() -> [ function() ].
mappers() ->
Builtin = [
fun drop_dirs/7,
fun temp_beam/7,
fun beam_file/7,
fun app_file/7,
fun header_file/7,
fun erlang_file/7,
fun yecc/7,
fun lib_src_build/7,
fun sass_file/7,
fun coffee_file/7,
fun less_file/7
],
zotonic_notifier:foldl(
?SYSTEM_NOTIFIER, zotonic_filehandler_mappers,
zotonic_filehandler_mappers, Builtin,
undefined).
drop_dirs(delete, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false;
drop_dirs(_Verb, _Application, _What, _Ext, _Root, _Split, Filename) ->
case filelib:is_dir(Filename) of
true -> ok;
false -> false
end.
temp_beam(_Verb, _Application, _What, <<".bea#">>, _Root, _Split, _Filename) ->
ok;
temp_beam(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
beam_file(delete, _Application, _What, <<".beam">>, _Root, _Split, _Filename) ->
ok;
beam_file(create, _Application, {ebin, _EbinFile}, <<".beam">>, Root, _Split, _Filename) ->
case is_indexed_beam_file(Root) of
true ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]},
{z_module_indexer, reindex, []}
]};
false ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]}
]}
end;
beam_file(_Verb, _Application, {ebin, _EbinFile}, <<".beam">>, Root, _Split, _Filename) ->
{ok, [
{zotonic_filehandler_compile, ld, [erlang:binary_to_atom(Root, utf8)]}
]};
beam_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
is_indexed_beam_file(<<"mod_", _/binary>>) -> true;
is_indexed_beam_file(<<"m_", _/binary>>) -> true;
is_indexed_beam_file(<<"scomp_", _/binary>>) -> true;
is_indexed_beam_file(<<"filter_", _/binary>>) -> true;
is_indexed_beam_file(_) -> false.
@doc Check for newly created / added Erlang applications
app_file(create, _Application, {app, _AppFile}, <<".app">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, code_path_check, [Filename]}
]};
app_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
header_file(delete, _Basename, _What, <<".hrl">>, _Root, _Split, _Filename) ->
ok;
header_file(_Verb, _Basename, _What, <<".hrl">>, _Root, _Split, _Filename) ->
{ok, [
{zotonic_filehandler_compile, all, []}
]};
header_file(_Verb, _Basename, _What, _Ext, _Root, _Split, _Filename) ->
false.
erlang_file(delete, _Application, _What, <<".erl">>, _Root, _Split, _Filename) ->
ok;
erlang_file(_Verb, _Application, {src, _Path}, <<".erl">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, recompile, [Filename]}
]};
erlang_file(_Verb, _Application, {test, _Path}, <<".erl">>, _Root, _Split, Filename) ->
{ok, [
{zotonic_filehandler_compile, recompile, [Filename]}
]};
erlang_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
yecc(delete, _Application, _What, <<".yrl">>, _Root, _Split, _Filename) ->
ok;
yecc(_Verb, _Application, {src, _Path}, <<".yrl">>, _Root, _Split, Filename) ->
{ok, [
{?MODULE, compile_yecc, [Filename]}
]};
yecc(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
lib_src_build(_Verb, Application, {priv, <<"lib-src">>, Path}, _Ext, _Root, _Split, _Filename) ->
case build_command(Application, Path) of
{ok, BuildCmd} ->
{ok, [
{?MODULE, run_build, [Application, BuildCmd]}
]};
false ->
false
end;
lib_src_build(_Verb, _Application, _Path, _Ext, _Root, _Split, _Filename) ->
false.
sass_file(delete, _Application, _What, Ext, _Root, _Split, _Filename)
when Ext =:= <<".sass">>; Ext =:= <<".scss">> ->
ok;
sass_file(_Verb, Application, {priv, <<"lib-src">>, Path}, Ext, _Root, _Split, _Filename)
when Ext =:= <<".sass">>; Ext =:= <<".scss">> ->
{ok, [
{?MODULE, compile_sass, [Application, Path]}
]};
sass_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
less_file(delete, _Application, _What, <<".less">>, _Root, _Split, _Filename) ->
ok;
less_file(_Verb, Application, {priv, <<"lib-src">>, Path}, <<".less">>, _Root, _Split, _Filename) ->
{ok, [
{?MODULE, compile_less, [Application, Path]}
]};
less_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
coffee_file(delete, _Application, _What, <<".coffee">>, _Root, _Split, _Filename) ->
ok;
coffee_file(_Verb, Application, {priv, <<"lib-src">>, Path}, <<".coffee">>, _Root, _Split, _Filename) ->
{ok, [
{?MODULE, compile_coffee, [Application, Path]}
]};
coffee_file(_Verb, _Application, _What, _Ext, _Root, _Split, _Filename) ->
false.
compile_yecc(Filename) ->
InPath = unicode:characters_to_list(Filename, utf8),
OutPath = filename:rootname(InPath) ++ ".erl",
case is_newer(InPath, OutPath) of
true ->
zotonic_filehandler:terminal_notifier("Yecc: " ++ filename:basename(InPath)),
Cmd = "erlc -o "
++ z_filelib:os_escape(filename:dirname(InPath))
++ " "
++ z_filelib:os_escape(InPath),
zotonic_filehandler_compile:run_cmd(Cmd);
false ->
ok
end.
compile_sass(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
SrcFile = filename:join([ AppPriv, "lib-src", SrcPath]),
SassExt = z_convert:to_list( filename:extension(SrcPath) ),
MainScss = case filename:basename(SrcPath) of
<<"_", _/binary>> ->
find_main_sass_files(AppPriv, filename:dirname(SrcPath), SassExt);
<<_/binary>> ->
[ SrcPath ]
end,
lists:map(
fun(MainFile) ->
InFile = filename:join([ AppPriv, "lib-src", MainFile]),
OutPath = filename:join([ AppPriv, "lib", MainFile]),
OutFile = iolist_to_binary([ filename:rootname(OutPath), ".css" ]),
case is_newer(SrcFile, OutFile) of
true ->
case z_filelib:ensure_dir(OutPath) of
ok ->
zotonic_filehandler:terminal_notifier("Sass: " ++ MainFile),
Cmd = [
sass_command(),
z_filelib:os_escape(InFile),
" ",
z_filelib:os_escape(OutFile)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end
end,
MainScss).
sass_command() ->
case os:find_executable("sassc") of
false ->
"sass --sourcemap=none --unix-newlines ";
_Sassc ->
"sassc --omit-map-comment "
end.
find_main_sass_files(AppPriv, SrcPath, SassExt) when is_binary(SrcPath) ->
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
{ok, Files} = file:list_dir(InPath),
MainScss = lists:filter(
fun
([$_|_]) -> false;
(E) -> lists:suffix(SassExt, E)
end,
Files),
case MainScss of
[] ->
case SrcPath of
<<".">> -> [];
_ -> find_main_sass_files(AppPriv, filename:dirname(SrcPath), SassExt)
end;
_ ->
lists:map(
fun(File) ->
filename:join(SrcPath, File)
end,
MainScss)
end.
compile_less(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
DstPath = unicode:characters_to_list(filename:rootname(SrcPath)) ++ ".css",
OutPath = filename:join([ AppPriv, "lib", DstPath ]),
case is_newer(InPath, OutPath) of
true ->
zotonic_filehandler:terminal_notifier("Lessc: " ++ filename:basename(InPath)),
case z_filelib:ensure_dir(OutPath) of
ok ->
Cmd = [
"lessc ",
z_filelib:os_escape(InPath),
" > ",
z_filelib:os_escape(OutPath)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end.
@doc coffee files from priv / lib - src / js/ ... /foo.coffee - > priv / lib / js/ ...
compile_coffee(Application, SrcPath) ->
AppPriv = code:priv_dir(Application),
InPath = filename:join([AppPriv, "lib-src", SrcPath]),
DstPath = unicode:characters_to_list(filename:rootname(SrcPath)) ++ ".js",
OutPath = filename:join([ AppPriv, "lib", DstPath ]),
case is_newer(InPath, OutPath) of
true ->
case z_filelib:ensure_dir(OutPath) of
ok ->
zotonic_filehandler:terminal_notifier("Coffee: " ++ filename:basename(InPath)),
Cmd = [
"coffee -o ",
z_filelib:os_escape(OutPath),
" -c ",
z_filelib:os_escape(InPath)
],
zotonic_filehandler_compile:run_cmd(Cmd);
{error, Reason} = Error ->
?LOG_ERROR(#{
text => <<"Could not create directory">>,
in => zotonic_filehandler,
path => OutPath,
result => error,
reason => Reason
}),
Error
end;
false ->
ok
end.
run_build(Application, {make, Makefile}) ->
zotonic_filehandler:terminal_notifier("Make: " ++ app_path(Application, Makefile)),
CmdOpts = [
{env, [
{"APP_DIR", code:lib_dir(Application)},
{"ZOTONIC_LIB", "1"}
]}
],
BuildDir = filename:dirname(Makefile),
MakeCmd = "cd " ++ z_filelib:os_escape(BuildDir) ++ "; sh -c make " ++ z_filelib:os_escape(Makefile),
zotonic_filehandler_compile:run_cmd(MakeCmd, CmdOpts, #{ ignore_dir => BuildDir }).
app_path(Application, BuildCmd) ->
AppB = atom_to_binary(Application, utf8),
case binary:split(BuildCmd, <<"/", AppB/binary, "/">>) of
[_, X] ->
unicode:characters_to_list(iolist_to_binary([ AppB, "/", X]));
[_] ->
unicode:characters_to_list(BuildCmd)
end.
is_newer(In, Out) ->
InMod = filelib:last_modified(In),
OutMod = filelib:last_modified(Out),
is_newer_1(InMod, OutMod).
is_newer_1(0, _) -> false;
is_newer_1(_, 0) -> true;
is_newer_1(A, B) -> A > B.
We look for a Makefile to build the targets in ' priv / lib ' . This Makefile will
build_command(Application, SrcPath) ->
LibSrcDir = filename:join([code:priv_dir(Application), "lib-src"]),
case find_build(LibSrcDir, filename:split(filename:dirname(SrcPath))) of
{ok, {make, _Makefile} = BuildCmd} ->
{ok, BuildCmd};
false ->
false
end.
find_build(LibSrcDir, Dir) ->
case lists:member(<<"dist">>, Dir) of
true -> false;
false ->
Dirname = filename:join([LibSrcDir] ++ Dir),
Makefile = filename:join(Dirname, <<"Makefile">>),
case filelib:is_file(Makefile) of
true ->
{ok, {make, Makefile}};
false when Dir =/= [] ->
Up = lists:reverse(tl(lists:reverse(Dir))),
find_build(LibSrcDir, Up);
false ->
false
end
end.
|
05d9f11b6eba392ddd388dbc20b8abf6ecb1a2cc3cb5d3ba17bb727ff1f6e6c6 | facebook/pyre-check | locationBasedLookupProcessor.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(* locationBasedLookupProcessor is a wrapper around LocationBasedLookup.create_of_module that finds
the path and creates a lookup. It is used in both hover and expression-level coverage. *)
open Core
open Ast
open Analysis
type error_reason =
| StubShadowing
| FileNotFound
[@@deriving sexp, show, compare, to_yojson]
type types_by_location = ((Location.t * Type.t) list, error_reason) Result.t
type coverage_by_location = (LocationBasedLookup.coverage_for_path, error_reason) Result.t
let get_lookup ~build_system ~type_environment path =
let generate_lookup_for_existent_path { ModulePath.qualifier; _ } =
let timer = Timer.start () in
let lookup = LocationBasedLookup.create_of_module type_environment qualifier in
Log.log
~section:`Performance
"locationBasedLookupProcessor: create_of_module: %d"
(Timer.stop_in_ms timer);
Result.Ok lookup
in
let generate_lookup_for_nonexistent_path error_reason = Result.Error error_reason in
let full_path =
let { Configuration.Analysis.local_root = root; _ } =
TypeEnvironment.ReadOnly.controls type_environment |> EnvironmentControls.configuration
in
PyrePath.create_relative ~root ~relative:path |> SourcePath.create
in
match BuildSystem.lookup_artifact build_system full_path with
| [] -> generate_lookup_for_nonexistent_path FileNotFound
| analysis_path :: _ -> (
(* If a source path corresponds to multiple artifacts, randomly pick an artifact and compute
results for it. *)
let module_tracker = TypeEnvironment.ReadOnly.module_tracker type_environment in
match ModuleTracker.ReadOnly.lookup_path module_tracker analysis_path with
| ModuleTracker.PathLookup.Found module_path -> generate_lookup_for_existent_path module_path
| ModuleTracker.PathLookup.ShadowedBy _ -> generate_lookup_for_nonexistent_path StubShadowing
| ModuleTracker.PathLookup.NotFound -> generate_lookup_for_nonexistent_path FileNotFound)
let find_all_resolved_types_for_path ~type_environment ~build_system path =
let open Result in
get_lookup ~type_environment ~build_system path
>>| LocationBasedLookup.get_all_nodes_and_coverage_data
>>| List.map ~f:(fun (location, { LocationBasedLookup.type_; expression = _ }) -> location, type_)
>>| List.sort ~compare:[%compare: Location.t * Type.t]
let find_expression_level_coverage_for_path ~type_environment ~build_system path =
let open Result in
get_lookup ~type_environment ~build_system path
>>| LocationBasedLookup.get_expression_level_coverage
| null | https://raw.githubusercontent.com/facebook/pyre-check/2b1a9546553ac08675d475564550b82d2f3e862a/source/server/locationBasedLookupProcessor.ml | ocaml | locationBasedLookupProcessor is a wrapper around LocationBasedLookup.create_of_module that finds
the path and creates a lookup. It is used in both hover and expression-level coverage.
If a source path corresponds to multiple artifacts, randomly pick an artifact and compute
results for it. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
open Ast
open Analysis
type error_reason =
| StubShadowing
| FileNotFound
[@@deriving sexp, show, compare, to_yojson]
type types_by_location = ((Location.t * Type.t) list, error_reason) Result.t
type coverage_by_location = (LocationBasedLookup.coverage_for_path, error_reason) Result.t
let get_lookup ~build_system ~type_environment path =
let generate_lookup_for_existent_path { ModulePath.qualifier; _ } =
let timer = Timer.start () in
let lookup = LocationBasedLookup.create_of_module type_environment qualifier in
Log.log
~section:`Performance
"locationBasedLookupProcessor: create_of_module: %d"
(Timer.stop_in_ms timer);
Result.Ok lookup
in
let generate_lookup_for_nonexistent_path error_reason = Result.Error error_reason in
let full_path =
let { Configuration.Analysis.local_root = root; _ } =
TypeEnvironment.ReadOnly.controls type_environment |> EnvironmentControls.configuration
in
PyrePath.create_relative ~root ~relative:path |> SourcePath.create
in
match BuildSystem.lookup_artifact build_system full_path with
| [] -> generate_lookup_for_nonexistent_path FileNotFound
| analysis_path :: _ -> (
let module_tracker = TypeEnvironment.ReadOnly.module_tracker type_environment in
match ModuleTracker.ReadOnly.lookup_path module_tracker analysis_path with
| ModuleTracker.PathLookup.Found module_path -> generate_lookup_for_existent_path module_path
| ModuleTracker.PathLookup.ShadowedBy _ -> generate_lookup_for_nonexistent_path StubShadowing
| ModuleTracker.PathLookup.NotFound -> generate_lookup_for_nonexistent_path FileNotFound)
let find_all_resolved_types_for_path ~type_environment ~build_system path =
let open Result in
get_lookup ~type_environment ~build_system path
>>| LocationBasedLookup.get_all_nodes_and_coverage_data
>>| List.map ~f:(fun (location, { LocationBasedLookup.type_; expression = _ }) -> location, type_)
>>| List.sort ~compare:[%compare: Location.t * Type.t]
let find_expression_level_coverage_for_path ~type_environment ~build_system path =
let open Result in
get_lookup ~type_environment ~build_system path
>>| LocationBasedLookup.get_expression_level_coverage
|
b51f5f90c26b4e35a4f84656e21e81c12fbcf1e5e10a8a1cd506754cb1d8241d | ocaml/oasis | OASISPackage_intern.ml | (******************************************************************************)
OASIS : architecture for building OCaml libraries and applications
(* *)
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more *)
(* details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
* Package schema and generator
@author
@author Sylvain Le Gall
*)
open OASISTypes
(* END EXPORT *)
open OASISValues
open OASISUtils
open OASISSchema_intern
open OASISGettext
let propagate_fields
~build_depends
~build_tools
~interface_patterns
~implementation_patterns
pkg =
let mod_bs bs =
{bs with
bs_build_depends = build_depends @ bs.bs_build_depends;
bs_build_tools = build_tools @ bs.bs_build_tools;
bs_implementation_patterns =
implementation_patterns @ bs.bs_implementation_patterns;
bs_interface_patterns =
interface_patterns @ bs.bs_interface_patterns}
in
{pkg with
sections =
List.map
(function
| Library (cs, bs, lib) -> Library (cs, mod_bs bs, lib)
| Object (cs, bs, obj) -> Object (cs, mod_bs bs, obj)
| Executable (cs, bs, exec) -> Executable (cs, mod_bs bs, exec)
| Test (cs, test) ->
Test (cs, {test with test_tools = build_tools @ test.test_tools})
| Doc (cs, doc) ->
Doc (cs,
{doc with doc_build_tools = build_tools @ doc.doc_build_tools})
| Flag _ | SrcRepo _ as sct -> sct)
pkg.sections}
let schema =
schema "Package" (fun pkg -> pkg.plugin_data)
let oasis_version =
let current_version =
OASISConf.version_short
in
let extra_supported_versions =
List.map OASISVersion.version_of_string ["0.3"]
in
new_field schema "OASISFormat"
~quickstart_level:(NoChoice current_version)
{
parse =
(fun ~ctxt str ->
let v =
OASISVersion.value.parse ~ctxt str
in
if not
(List.mem
v
(current_version :: extra_supported_versions)) then
failwithf
(f_ "OASIS format version '%s' is not supported.")
str;
v);
update = update_fail;
print = OASISVersion.value.print;
}
(fun () ->
s_ "OASIS format version used to write file `_oasis`.")
(fun pkg -> pkg.oasis_version)
let alpha_features, beta_features =
let value_feature stage =
{
parse =
(fun ~ctxt:_ feature_name ->
match OASISFeatures.get_stage feature_name with
| OASISFeatures.InDev feature_stage ->
if feature_stage <> stage then
failwithf (f_ "Feature %s is in stage %s and not %s.")
feature_name
(OASISFeatures.string_of_stage feature_stage)
(OASISFeatures.string_of_stage stage)
else
feature_name
| OASISFeatures.SinceVersion min_version ->
failwithf
(f_ "Features %s has been published in OASISVersion %s.")
feature_name (OASISVersion.string_of_version min_version));
update = update_fail;
print = (fun s -> s)
}
in
let alpha_features =
new_field schema
(OASISFeatures.field_of_stage OASISFeatures.Alpha)
~default:[]
~feature:OASISFeatures.features
(comma_separated (value_feature OASISFeatures.Alpha))
(fun () ->
s_ "Experimental features in alpha stage \
(frequent change, may never been shipped).")
(fun pkg -> pkg.alpha_features)
in
let beta_features =
new_field schema
(OASISFeatures.field_of_stage OASISFeatures.Beta)
~default:[]
~feature:OASISFeatures.features
(comma_separated (value_feature OASISFeatures.Beta))
(fun () ->
s_ "Experimental features in beta stage \
(will ship, under review).")
(fun pkg -> pkg.beta_features)
in
alpha_features, beta_features
let generator =
let schm = schema in
let new_field ?quickstart_level ?quickstart_question ?default =
new_field schm ?quickstart_level ?quickstart_question ?default
in
let new_field_plugin nm ?default ?quickstart_question value hlp sync =
new_field_plugin schm nm ?default ?quickstart_question value hlp sync
in
let name =
new_field "Name" string_not_empty
(fun () ->
s_ "Name of the package.")
(fun pkg -> pkg.name)
in
let version =
new_field "Version" OASISVersion.value
(fun () ->
s_ "Version of the package.")
(fun pkg -> pkg.version)
in
let synopsis =
new_field "Synopsis" string_not_empty
(fun () ->
s_ "Short description of the purpose of this package.")
(fun pkg -> pkg.synopsis)
in
let description =
new_field "Description"
~default:None
(opt OASISText.value)
(fun () ->
s_ "Long description of the package purpose.")
(fun pkg -> pkg.description)
in
let authors =
new_field "Authors"
(comma_separated string_not_empty)
(fun () ->
s_ "Real people that had contributed to the package.")
(fun pkg -> pkg.authors)
in
let copyrights =
new_field "Copyrights"
~default:[]
(comma_separated copyright)
(fun () ->
s_ "Copyright owners.")
(fun pkg -> pkg.copyrights)
in
let maintainers =
new_field "Maintainers"
~default:[]
(comma_separated string_not_empty)
(fun () ->
s_ "Current maintainers of the package.")
(fun pkg -> pkg.maintainers)
in
let license_file =
new_field "LicenseFile"
~default:None
(opt file)
(fun () ->
s_ "File containing the license.")
(fun pkg -> pkg.license_file)
in
let license =
new_field "License"
OASISLicense.value
~quickstart_question:(fun () ->
ExclusiveChoices (OASISLicense.choices ()))
(fun () ->
(s_ "DEP-5 license of the package \
(See [DEP-5](/#index6h3))."))
(fun pkg -> pkg.license)
in
let ocaml_version =
new_field "OCamlVersion"
~default:None
(opt OASISVersion.comparator_value)
(fun () ->
s_ "Version constraint on OCaml.")
(fun pkg -> pkg.ocaml_version)
in
let findlib_version =
new_field "FindlibVersion"
~default:None
(opt OASISVersion.comparator_value)
(fun () ->
s_ "Version constraint on Finblib.")
(fun pkg -> pkg.findlib_version)
in
let conf_type =
new_field_plugin "ConfType"
~default:(OASISPlugin.builtin `Configure "internal")
~quickstart_question:OASISPlugin.Configure.quickstart_question
`Configure
OASISPlugin.Configure.value
(fun () ->
s_ "Configuration system.")
(fun pkg -> pkg.conf_type)
in
let conf_custom =
OASISCustom.add_fields schm "Conf"
(fun () -> s_ "Command to run before configuration.")
(fun () -> s_ "Command to run after configuration.")
(fun pkg -> pkg.conf_custom)
in
let build_type =
new_field_plugin "BuildType"
~default:(OASISPlugin.builtin `Build "ocamlbuild")
~quickstart_question:OASISPlugin.Build.quickstart_question
`Build
OASISPlugin.Build.value
(fun () ->
s_ "Build system.")
(fun pkg -> pkg.build_type)
in
let build_custom =
OASISCustom.add_fields schm "Build"
(fun () -> s_ "Command to run before build.")
(fun () -> s_ "Command to run after build.")
(fun pkg -> pkg.build_custom)
in
let install_type =
new_field_plugin "InstallType"
~default:(OASISPlugin.builtin `Install "internal")
~quickstart_question:OASISPlugin.Install.quickstart_question
`Install
OASISPlugin.Install.value
(fun () ->
s_ "Install/uninstall system.")
(fun pkg -> pkg.install_type)
in
let install_custom =
OASISCustom.add_fields schm "Install"
(fun () -> s_ "Command to run before install.")
(fun () -> s_ "Command to run after install.")
(fun pkg -> pkg.install_custom)
in
let uninstall_custom =
OASISCustom.add_fields schm "Uninstall"
(fun () -> s_ "Command to run before uninstall.")
(fun () -> s_ "Command to run after uninstall.")
(fun pkg -> pkg.uninstall_custom)
in
let clean_custom =
OASISCustom.add_fields schm "Clean"
(fun () -> s_ "Command to run before clean.")
(fun () -> s_ "Command to run after clean.")
(fun pkg -> pkg.clean_custom)
in
let distclean_custom =
OASISCustom.add_fields schm "Distclean"
(fun () -> s_ "Command to run before distclean.")
(fun () -> s_ "Command to run after distclean.")
(fun pkg -> pkg.distclean_custom)
in
let homepage =
new_field "Homepage"
~default:None
(opt url)
(fun () ->
s_ "URL of the package homepage.")
(fun pkg -> pkg.homepage)
in
let bugreports =
new_field "BugReports"
~default:None
(opt url)
(fun () ->
s_ "URL of the page to report bugs about the package.")
(fun pkg -> pkg.bugreports)
in
let tags =
new_field "Tags"
~default:[]
(comma_separated string_not_empty)
(fun () ->
s_ "List of semantic tags to classify the package.")
(fun pkg -> pkg.tags)
in
let categories =
new_field "Categories"
~default:[]
categories
(fun () ->
s_ "URL(s) describing categories of the package.")
(fun pkg -> pkg.categories)
in
let files_ab =
new_field "FilesAB"
~default:[]
(* TODO: check that filenames end with .ab *)
(comma_separated file)
(fun () -> s_ "Files to generate using environment variable \
substitution.")
(fun pkg -> pkg.files_ab)
in
let plugins =
let quickstart_question () =
match OASISPlugin.Extra.quickstart_question () with
| ExclusiveChoices lst ->
Choices lst
| Choices _ | Field | Text as q ->
q
in
new_field_plugins schm "Plugins"
~default:[]
~quickstart_level:Beginner
~quickstart_question
`Extra
OASISPlugin.Extra.value
(fun () ->
s_ "Extra plugins to use.")
(fun pkg -> pkg.plugins)
in
let disable_oasis_section =
new_field "DisableOASISSection"
~default:[]
~feature:OASISFeatures.disable_oasis_section
(comma_separated (expandable file))
(fun () -> s_ "Files to generate without OASIS section comments or \
digest.")
(fun pkg -> pkg.disable_oasis_section)
in
let build_depends =
OASISBuildSection_intern.build_depends_field schm (fun _ -> [])
in
let build_tools =
OASISBuildSection_intern.build_tools_field schm (fun _ -> [])
in
let interface_patterns, implementation_patterns =
OASISBuildSection_intern.source_patterns_fields schm
[] (fun _ -> [])
[] (fun _ -> [])
in
fun data sections ->
let plugins = plugins data in
let conf = conf_type data in
let build = build_type data in
let install = install_type data in
(* Generate plugin data *)
let set_plugin_data generator plugin_data data =
let rplugin_data = ref plugin_data in
List.iter
(fun plg ->
generator
(plg :> plugin_kind plugin)
rplugin_data
data)
plugins;
generator
(conf :> plugin_kind plugin)
rplugin_data data;
generator
(build :> plugin_kind plugin)
rplugin_data data;
generator
(install :> plugin_kind plugin)
rplugin_data data;
!rplugin_data
in
(* Plugin data for package *)
let plugin_data =
set_plugin_data
OASISPlugin.generator_package
[]
data
in
(* Fix plugin data for sections, set data from plugin
* defined at package level
*)
let sections =
List.map
(fun sct ->
let knd, cs =
OASISSection.section_kind_common sct
in
let plugin_data =
set_plugin_data
(OASISPlugin.generator_section knd)
cs.cs_plugin_data
cs.cs_data
in
OASISSection.section_common_set
{cs with cs_plugin_data = plugin_data}
sct)
sections
in
let oasis_version = oasis_version data in
let alpha_features = alpha_features data in
let beta_features = beta_features data in
if (alpha_features <> [] || beta_features <> []) &&
(OASISVersion.version_compare
oasis_version OASISConf.version_short) <> 0 then begin
failwithf
(f_ "You need to use the latest OASISFormat to be able to use \
fields %s and %s. Change 'OASISFormat: %s' to \
'OASISFormat: %s'")
(OASISFeatures.field_of_stage OASISFeatures.Alpha)
(OASISFeatures.field_of_stage OASISFeatures.Beta)
(OASISVersion.string_of_version oasis_version)
(OASISVersion.string_of_version OASISConf.version_short)
end;
propagate_fields
~build_tools:(build_tools data)
~build_depends:(build_depends data)
~interface_patterns:(interface_patterns data)
~implementation_patterns:(implementation_patterns data)
{
oasis_version = oasis_version;
ocaml_version = ocaml_version data;
findlib_version = findlib_version data;
alpha_features = alpha_features;
beta_features = beta_features;
name = name data;
version = version data;
license = license data;
license_file = license_file data;
copyrights = copyrights data;
maintainers = maintainers data;
authors = authors data;
homepage = homepage data;
bugreports = bugreports data;
synopsis = synopsis data;
description = description data;
tags = tags data;
categories = categories data;
conf_type = conf;
conf_custom = conf_custom data;
build_type = build;
build_custom = build_custom data;
install_type = install;
install_custom = install_custom data;
uninstall_custom = uninstall_custom data;
clean_custom = clean_custom data;
distclean_custom = distclean_custom data;
files_ab = files_ab data;
plugins = plugins;
disable_oasis_section = disable_oasis_section data;
sections = sections;
schema_data = data;
plugin_data = plugin_data;
}
| null | https://raw.githubusercontent.com/ocaml/oasis/3d1a9421db92a0882ebc58c5df219b18c1e5681d/src/oasis/OASISPackage_intern.ml | ocaml | ****************************************************************************
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more
details.
****************************************************************************
END EXPORT
TODO: check that filenames end with .ab
Generate plugin data
Plugin data for package
Fix plugin data for sections, set data from plugin
* defined at package level
| OASIS : architecture for building OCaml libraries and applications
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
* Package schema and generator
@author
@author Sylvain Le Gall
*)
open OASISTypes
open OASISValues
open OASISUtils
open OASISSchema_intern
open OASISGettext
let propagate_fields
~build_depends
~build_tools
~interface_patterns
~implementation_patterns
pkg =
let mod_bs bs =
{bs with
bs_build_depends = build_depends @ bs.bs_build_depends;
bs_build_tools = build_tools @ bs.bs_build_tools;
bs_implementation_patterns =
implementation_patterns @ bs.bs_implementation_patterns;
bs_interface_patterns =
interface_patterns @ bs.bs_interface_patterns}
in
{pkg with
sections =
List.map
(function
| Library (cs, bs, lib) -> Library (cs, mod_bs bs, lib)
| Object (cs, bs, obj) -> Object (cs, mod_bs bs, obj)
| Executable (cs, bs, exec) -> Executable (cs, mod_bs bs, exec)
| Test (cs, test) ->
Test (cs, {test with test_tools = build_tools @ test.test_tools})
| Doc (cs, doc) ->
Doc (cs,
{doc with doc_build_tools = build_tools @ doc.doc_build_tools})
| Flag _ | SrcRepo _ as sct -> sct)
pkg.sections}
let schema =
schema "Package" (fun pkg -> pkg.plugin_data)
let oasis_version =
let current_version =
OASISConf.version_short
in
let extra_supported_versions =
List.map OASISVersion.version_of_string ["0.3"]
in
new_field schema "OASISFormat"
~quickstart_level:(NoChoice current_version)
{
parse =
(fun ~ctxt str ->
let v =
OASISVersion.value.parse ~ctxt str
in
if not
(List.mem
v
(current_version :: extra_supported_versions)) then
failwithf
(f_ "OASIS format version '%s' is not supported.")
str;
v);
update = update_fail;
print = OASISVersion.value.print;
}
(fun () ->
s_ "OASIS format version used to write file `_oasis`.")
(fun pkg -> pkg.oasis_version)
let alpha_features, beta_features =
let value_feature stage =
{
parse =
(fun ~ctxt:_ feature_name ->
match OASISFeatures.get_stage feature_name with
| OASISFeatures.InDev feature_stage ->
if feature_stage <> stage then
failwithf (f_ "Feature %s is in stage %s and not %s.")
feature_name
(OASISFeatures.string_of_stage feature_stage)
(OASISFeatures.string_of_stage stage)
else
feature_name
| OASISFeatures.SinceVersion min_version ->
failwithf
(f_ "Features %s has been published in OASISVersion %s.")
feature_name (OASISVersion.string_of_version min_version));
update = update_fail;
print = (fun s -> s)
}
in
let alpha_features =
new_field schema
(OASISFeatures.field_of_stage OASISFeatures.Alpha)
~default:[]
~feature:OASISFeatures.features
(comma_separated (value_feature OASISFeatures.Alpha))
(fun () ->
s_ "Experimental features in alpha stage \
(frequent change, may never been shipped).")
(fun pkg -> pkg.alpha_features)
in
let beta_features =
new_field schema
(OASISFeatures.field_of_stage OASISFeatures.Beta)
~default:[]
~feature:OASISFeatures.features
(comma_separated (value_feature OASISFeatures.Beta))
(fun () ->
s_ "Experimental features in beta stage \
(will ship, under review).")
(fun pkg -> pkg.beta_features)
in
alpha_features, beta_features
let generator =
let schm = schema in
let new_field ?quickstart_level ?quickstart_question ?default =
new_field schm ?quickstart_level ?quickstart_question ?default
in
let new_field_plugin nm ?default ?quickstart_question value hlp sync =
new_field_plugin schm nm ?default ?quickstart_question value hlp sync
in
let name =
new_field "Name" string_not_empty
(fun () ->
s_ "Name of the package.")
(fun pkg -> pkg.name)
in
let version =
new_field "Version" OASISVersion.value
(fun () ->
s_ "Version of the package.")
(fun pkg -> pkg.version)
in
let synopsis =
new_field "Synopsis" string_not_empty
(fun () ->
s_ "Short description of the purpose of this package.")
(fun pkg -> pkg.synopsis)
in
let description =
new_field "Description"
~default:None
(opt OASISText.value)
(fun () ->
s_ "Long description of the package purpose.")
(fun pkg -> pkg.description)
in
let authors =
new_field "Authors"
(comma_separated string_not_empty)
(fun () ->
s_ "Real people that had contributed to the package.")
(fun pkg -> pkg.authors)
in
let copyrights =
new_field "Copyrights"
~default:[]
(comma_separated copyright)
(fun () ->
s_ "Copyright owners.")
(fun pkg -> pkg.copyrights)
in
let maintainers =
new_field "Maintainers"
~default:[]
(comma_separated string_not_empty)
(fun () ->
s_ "Current maintainers of the package.")
(fun pkg -> pkg.maintainers)
in
let license_file =
new_field "LicenseFile"
~default:None
(opt file)
(fun () ->
s_ "File containing the license.")
(fun pkg -> pkg.license_file)
in
let license =
new_field "License"
OASISLicense.value
~quickstart_question:(fun () ->
ExclusiveChoices (OASISLicense.choices ()))
(fun () ->
(s_ "DEP-5 license of the package \
(See [DEP-5](/#index6h3))."))
(fun pkg -> pkg.license)
in
let ocaml_version =
new_field "OCamlVersion"
~default:None
(opt OASISVersion.comparator_value)
(fun () ->
s_ "Version constraint on OCaml.")
(fun pkg -> pkg.ocaml_version)
in
let findlib_version =
new_field "FindlibVersion"
~default:None
(opt OASISVersion.comparator_value)
(fun () ->
s_ "Version constraint on Finblib.")
(fun pkg -> pkg.findlib_version)
in
let conf_type =
new_field_plugin "ConfType"
~default:(OASISPlugin.builtin `Configure "internal")
~quickstart_question:OASISPlugin.Configure.quickstart_question
`Configure
OASISPlugin.Configure.value
(fun () ->
s_ "Configuration system.")
(fun pkg -> pkg.conf_type)
in
let conf_custom =
OASISCustom.add_fields schm "Conf"
(fun () -> s_ "Command to run before configuration.")
(fun () -> s_ "Command to run after configuration.")
(fun pkg -> pkg.conf_custom)
in
let build_type =
new_field_plugin "BuildType"
~default:(OASISPlugin.builtin `Build "ocamlbuild")
~quickstart_question:OASISPlugin.Build.quickstart_question
`Build
OASISPlugin.Build.value
(fun () ->
s_ "Build system.")
(fun pkg -> pkg.build_type)
in
let build_custom =
OASISCustom.add_fields schm "Build"
(fun () -> s_ "Command to run before build.")
(fun () -> s_ "Command to run after build.")
(fun pkg -> pkg.build_custom)
in
let install_type =
new_field_plugin "InstallType"
~default:(OASISPlugin.builtin `Install "internal")
~quickstart_question:OASISPlugin.Install.quickstart_question
`Install
OASISPlugin.Install.value
(fun () ->
s_ "Install/uninstall system.")
(fun pkg -> pkg.install_type)
in
let install_custom =
OASISCustom.add_fields schm "Install"
(fun () -> s_ "Command to run before install.")
(fun () -> s_ "Command to run after install.")
(fun pkg -> pkg.install_custom)
in
let uninstall_custom =
OASISCustom.add_fields schm "Uninstall"
(fun () -> s_ "Command to run before uninstall.")
(fun () -> s_ "Command to run after uninstall.")
(fun pkg -> pkg.uninstall_custom)
in
let clean_custom =
OASISCustom.add_fields schm "Clean"
(fun () -> s_ "Command to run before clean.")
(fun () -> s_ "Command to run after clean.")
(fun pkg -> pkg.clean_custom)
in
let distclean_custom =
OASISCustom.add_fields schm "Distclean"
(fun () -> s_ "Command to run before distclean.")
(fun () -> s_ "Command to run after distclean.")
(fun pkg -> pkg.distclean_custom)
in
let homepage =
new_field "Homepage"
~default:None
(opt url)
(fun () ->
s_ "URL of the package homepage.")
(fun pkg -> pkg.homepage)
in
let bugreports =
new_field "BugReports"
~default:None
(opt url)
(fun () ->
s_ "URL of the page to report bugs about the package.")
(fun pkg -> pkg.bugreports)
in
let tags =
new_field "Tags"
~default:[]
(comma_separated string_not_empty)
(fun () ->
s_ "List of semantic tags to classify the package.")
(fun pkg -> pkg.tags)
in
let categories =
new_field "Categories"
~default:[]
categories
(fun () ->
s_ "URL(s) describing categories of the package.")
(fun pkg -> pkg.categories)
in
let files_ab =
new_field "FilesAB"
~default:[]
(comma_separated file)
(fun () -> s_ "Files to generate using environment variable \
substitution.")
(fun pkg -> pkg.files_ab)
in
let plugins =
let quickstart_question () =
match OASISPlugin.Extra.quickstart_question () with
| ExclusiveChoices lst ->
Choices lst
| Choices _ | Field | Text as q ->
q
in
new_field_plugins schm "Plugins"
~default:[]
~quickstart_level:Beginner
~quickstart_question
`Extra
OASISPlugin.Extra.value
(fun () ->
s_ "Extra plugins to use.")
(fun pkg -> pkg.plugins)
in
let disable_oasis_section =
new_field "DisableOASISSection"
~default:[]
~feature:OASISFeatures.disable_oasis_section
(comma_separated (expandable file))
(fun () -> s_ "Files to generate without OASIS section comments or \
digest.")
(fun pkg -> pkg.disable_oasis_section)
in
let build_depends =
OASISBuildSection_intern.build_depends_field schm (fun _ -> [])
in
let build_tools =
OASISBuildSection_intern.build_tools_field schm (fun _ -> [])
in
let interface_patterns, implementation_patterns =
OASISBuildSection_intern.source_patterns_fields schm
[] (fun _ -> [])
[] (fun _ -> [])
in
fun data sections ->
let plugins = plugins data in
let conf = conf_type data in
let build = build_type data in
let install = install_type data in
let set_plugin_data generator plugin_data data =
let rplugin_data = ref plugin_data in
List.iter
(fun plg ->
generator
(plg :> plugin_kind plugin)
rplugin_data
data)
plugins;
generator
(conf :> plugin_kind plugin)
rplugin_data data;
generator
(build :> plugin_kind plugin)
rplugin_data data;
generator
(install :> plugin_kind plugin)
rplugin_data data;
!rplugin_data
in
let plugin_data =
set_plugin_data
OASISPlugin.generator_package
[]
data
in
let sections =
List.map
(fun sct ->
let knd, cs =
OASISSection.section_kind_common sct
in
let plugin_data =
set_plugin_data
(OASISPlugin.generator_section knd)
cs.cs_plugin_data
cs.cs_data
in
OASISSection.section_common_set
{cs with cs_plugin_data = plugin_data}
sct)
sections
in
let oasis_version = oasis_version data in
let alpha_features = alpha_features data in
let beta_features = beta_features data in
if (alpha_features <> [] || beta_features <> []) &&
(OASISVersion.version_compare
oasis_version OASISConf.version_short) <> 0 then begin
failwithf
(f_ "You need to use the latest OASISFormat to be able to use \
fields %s and %s. Change 'OASISFormat: %s' to \
'OASISFormat: %s'")
(OASISFeatures.field_of_stage OASISFeatures.Alpha)
(OASISFeatures.field_of_stage OASISFeatures.Beta)
(OASISVersion.string_of_version oasis_version)
(OASISVersion.string_of_version OASISConf.version_short)
end;
propagate_fields
~build_tools:(build_tools data)
~build_depends:(build_depends data)
~interface_patterns:(interface_patterns data)
~implementation_patterns:(implementation_patterns data)
{
oasis_version = oasis_version;
ocaml_version = ocaml_version data;
findlib_version = findlib_version data;
alpha_features = alpha_features;
beta_features = beta_features;
name = name data;
version = version data;
license = license data;
license_file = license_file data;
copyrights = copyrights data;
maintainers = maintainers data;
authors = authors data;
homepage = homepage data;
bugreports = bugreports data;
synopsis = synopsis data;
description = description data;
tags = tags data;
categories = categories data;
conf_type = conf;
conf_custom = conf_custom data;
build_type = build;
build_custom = build_custom data;
install_type = install;
install_custom = install_custom data;
uninstall_custom = uninstall_custom data;
clean_custom = clean_custom data;
distclean_custom = distclean_custom data;
files_ab = files_ab data;
plugins = plugins;
disable_oasis_section = disable_oasis_section data;
sections = sections;
schema_data = data;
plugin_data = plugin_data;
}
|
35645d424cec16a34dfacb5132c4a4446ce8207ddf33e1d76e1285768de830a7 | marick/Midje | core.clj | (ns midje.checking.core
"Core ideas underlying all checking"
(:require [such.types :as types]
[such.sequences :as seq]))
;;; There is a notion of "extended falsehood", in which a false value may be a
;;; map containing information about what went wrong.
(defn data-laden-falsehood? [value]
(:midje/data-laden-falsehood (meta value)))
(defn as-data-laden-falsehood [value]
(vary-meta value assoc :midje/data-laden-falsehood true))
(defn data-laden-falsehood-to-map
"Used for testing Midje itself, this prevents a Midje
example of the expected creation of a data-laden falsehood
from being interpreted as a failure."
[value]
(with-meta value {}))
(defn extended-false? [value]
(or (not value)
(data-laden-falsehood? value)))
(defn extended-true? [value]
(not (extended-false? value)))
(defn user-friendly-falsehood [value]
"'downcast' a possible data-laden falsehood into
`false` if necessary."
(if (data-laden-falsehood? value)
false
value))
There is a notion of " extended equality " that powers the right - hand - side of .
(defn evaluate-checking-function
"Returns a sequence. The first value is either truthy or falsey.
If falsey, the second value is a map with
any additional information. (It may be empty.) If the
result is an exception, the second value contains it under
the :thrown key."
[function actual]
(try
(let [function-result (function actual)]
(if (data-laden-falsehood? function-result)
[false function-result]
[function-result {}]))
(catch Exception ex
[false {:thrown ex}])))
(defn raw-detailed-extended-= [actual expected]
(cond
(data-laden-falsehood? actual) [actual {}]
(data-laden-falsehood? expected) [expected {}]
(types/extended-fn? expected) (evaluate-checking-function expected actual)
(every? types/regex? [actual expected]) [(= (str actual) (str expected)) {}]
(types/regex? expected) [(re-find expected actual) {}]
(and (record? actual)
(types/classic-map? expected)) [(= (into {} actual) expected) {}]
(= (type expected) java.math.BigDecimal) [(= (compare actual expected) 0) {}]
:else [(= actual expected) {}]))
(defn detailed-extended-=
"Equality check that can handle checker functions and compare arguments of
different types. Returns a tuple containing the boolean result and a failure
detail map"
[actual expected]
(try
(raw-detailed-extended-= actual expected)
(catch Throwable ex [false {:thrown ex}])))
(defn extended-=
[actual expected]
(first (detailed-extended-= actual expected)))
(defn- raw-extended-=
[actual expected]
(first (raw-detailed-extended-= actual expected)))
(defn extended-list-=
"Element-by-element comparison, using extended-= for the right-hand-side values."
[actual-args checkers]
(and (= (count actual-args) (count checkers))
(every? (partial apply raw-extended-=)
(seq/vertical-slices actual-args checkers))))
;;; An element of extended-= is that an actual map cannot match an expected record (or type).
;;; That produces a plain `false` above. If client code wants to be more informative, it
;;; can use these functions.
(defn inherently-false-map-to-record-comparison? [actual expected]
(and (record? expected)
(map? actual)
(not= (class expected) (class actual))))
;; Leaving the args in case we decide to have a more explicit note.
(defn inherently-false-map-to-record-comparison-note [actual expected]
(str "A record on the right of the arrow means the value on the left must be of the same type."))
| null | https://raw.githubusercontent.com/marick/Midje/2b9bcb117442d3bd2d16446b47540888d683c717/src/midje/checking/core.clj | clojure | There is a notion of "extended falsehood", in which a false value may be a
map containing information about what went wrong.
An element of extended-= is that an actual map cannot match an expected record (or type).
That produces a plain `false` above. If client code wants to be more informative, it
can use these functions.
Leaving the args in case we decide to have a more explicit note. | (ns midje.checking.core
"Core ideas underlying all checking"
(:require [such.types :as types]
[such.sequences :as seq]))
(defn data-laden-falsehood? [value]
(:midje/data-laden-falsehood (meta value)))
(defn as-data-laden-falsehood [value]
(vary-meta value assoc :midje/data-laden-falsehood true))
(defn data-laden-falsehood-to-map
"Used for testing Midje itself, this prevents a Midje
example of the expected creation of a data-laden falsehood
from being interpreted as a failure."
[value]
(with-meta value {}))
(defn extended-false? [value]
(or (not value)
(data-laden-falsehood? value)))
(defn extended-true? [value]
(not (extended-false? value)))
(defn user-friendly-falsehood [value]
"'downcast' a possible data-laden falsehood into
`false` if necessary."
(if (data-laden-falsehood? value)
false
value))
There is a notion of " extended equality " that powers the right - hand - side of .
(defn evaluate-checking-function
"Returns a sequence. The first value is either truthy or falsey.
If falsey, the second value is a map with
any additional information. (It may be empty.) If the
result is an exception, the second value contains it under
the :thrown key."
[function actual]
(try
(let [function-result (function actual)]
(if (data-laden-falsehood? function-result)
[false function-result]
[function-result {}]))
(catch Exception ex
[false {:thrown ex}])))
(defn raw-detailed-extended-= [actual expected]
(cond
(data-laden-falsehood? actual) [actual {}]
(data-laden-falsehood? expected) [expected {}]
(types/extended-fn? expected) (evaluate-checking-function expected actual)
(every? types/regex? [actual expected]) [(= (str actual) (str expected)) {}]
(types/regex? expected) [(re-find expected actual) {}]
(and (record? actual)
(types/classic-map? expected)) [(= (into {} actual) expected) {}]
(= (type expected) java.math.BigDecimal) [(= (compare actual expected) 0) {}]
:else [(= actual expected) {}]))
(defn detailed-extended-=
"Equality check that can handle checker functions and compare arguments of
different types. Returns a tuple containing the boolean result and a failure
detail map"
[actual expected]
(try
(raw-detailed-extended-= actual expected)
(catch Throwable ex [false {:thrown ex}])))
(defn extended-=
[actual expected]
(first (detailed-extended-= actual expected)))
(defn- raw-extended-=
[actual expected]
(first (raw-detailed-extended-= actual expected)))
(defn extended-list-=
"Element-by-element comparison, using extended-= for the right-hand-side values."
[actual-args checkers]
(and (= (count actual-args) (count checkers))
(every? (partial apply raw-extended-=)
(seq/vertical-slices actual-args checkers))))
(defn inherently-false-map-to-record-comparison? [actual expected]
(and (record? expected)
(map? actual)
(not= (class expected) (class actual))))
(defn inherently-false-map-to-record-comparison-note [actual expected]
(str "A record on the right of the arrow means the value on the left must be of the same type."))
|
4884a9cb5ed35b084a5c196e026020ac5e69d81297a2bf155745269e5cd824bc | cr-org/supernova | Connection.hs | # LANGUAGE LambdaCase , OverloadedStrings #
module Pulsar.Connection where
import Control.Applicative ( (<|>) )
import Control.Concurrent ( forkIO
, killThread
, threadDelay
)
import Control.Concurrent.Async ( async
, concurrently_
)
import Control.Concurrent.Chan.Unagi
import Control.Concurrent.MVar
import Control.Exception ( throwIO )
import Control.Monad ( forever
, when
)
import Control.Monad.Catch ( MonadThrow
, bracket
)
import Control.Monad.IO.Class
import Control.Monad.Managed ( MonadManaged
, managed
, runManaged
)
import Data.Foldable ( traverse_ )
import Data.Functor ( void )
import Data.IORef
import Lens.Family
import qualified Network.Socket as NS
import qualified Network.Socket.ByteString.Lazy
as SBL
import Proto.PulsarApi ( BaseCommand
, MessageMetadata
)
import qualified Proto.PulsarApi_Fields as F
import Pulsar.AppState
import Pulsar.Internal.Logger
import Pulsar.Internal.TCPClient ( acquireSocket )
import qualified Pulsar.Protocol.Commands as P
import Pulsar.Protocol.Decoder ( decodeBaseCommand )
import Pulsar.Protocol.Encoder ( encodeBaseCommand )
import Pulsar.Protocol.Frame ( Payload
, Response(..)
, frameMaxSize
, getCommand
)
import System.Timeout ( timeout )
newtype Connection = Conn NS.Socket
{- | Connection details: host and port. -}
data ConnectData = ConnData
{ connHost :: NS.HostName
, connPort :: NS.ServiceName
} deriving Show
| Internal Pulsar context . You will never need to access its content ( not exported ) but might need to take it as argument .
data PulsarCtx = Ctx
{ ctxConn :: Connection
, ctxState :: IORef AppState
, ctxConnWorker :: Worker
}
{- | Default connection data: "127.0.0.1:6650" -}
defaultConnectData :: ConnectData
defaultConnectData = ConnData { connHost = "127.0.0.1", connPort = "6650" }
| Starts a Pulsar connection with the supplied ' ConnectData '
connect
:: (MonadIO m, MonadThrow m, MonadManaged m) => ConnectData -> m PulsarCtx
connect (ConnData h p) = do
socket <- acquireSocket h p
liftIO $ sendSimpleCmd socket P.connect
checkConnection socket
app <- liftIO initAppState
(cin, cout) <- liftIO newChan
var <- liftIO newEmptyMVar
let
dispatcher = recvDispatch socket app cin
task = concurrently_ dispatcher (keepAlive socket cout)
handler =
managed (bracket (forkIO task) (\i -> readMVar var >> killThread i))
worker <- liftIO $ async (runManaged $ void handler)
return $ Ctx (Conn socket) app (worker, var)
checkConnection :: (MonadIO m, MonadThrow m) => NS.Socket -> m ()
checkConnection socket = do
resp <- receive socket
case getCommand resp ^. F.maybe'connected of
Just _ -> logResponse resp
Nothing -> liftIO . throwIO $ userError "Could not connect"
initAppState :: MonadIO m => m (IORef AppState)
initAppState = liftIO . newIORef $ AppState [] 0 0 0 [] [] []
responseForRequest :: BaseCommand -> Maybe ReqId
responseForRequest cmd =
let cmd1 = view F.requestId <$> cmd ^. F.maybe'success
cmd2 = view F.requestId <$> cmd ^. F.maybe'producerSuccess
cmd3 = view F.requestId <$> cmd ^. F.maybe'lookupTopicResponse
in ReqId <$> (cmd1 <|> cmd2 <|> cmd3)
responseForSendReceipt :: BaseCommand -> Maybe (ProducerId, SeqId)
responseForSendReceipt cmd =
let cmd' = cmd ^. F.maybe'sendReceipt
pid = PId . view F.producerId <$> cmd'
sid = SeqId . view F.sequenceId <$> cmd'
in (,) <$> pid <*> sid
pongResponse :: BaseCommand -> InChan BaseCommand -> IO (Maybe ())
pongResponse cmd inChan =
traverse (const $ writeChan inChan cmd) (cmd ^. F.maybe'pong)
messageResponse :: BaseCommand -> Maybe ConsumerId
messageResponse cmd =
let cmd' = cmd ^. F.maybe'message
cid = view F.consumerId <$> cmd'
in CId <$> cid
{- | It listens to incoming messages directly from the network socket and it writes them to all the
- consumers and producers' communication channels. -}
recvDispatch :: NS.Socket -> IORef AppState -> InChan BaseCommand -> IO ()
recvDispatch s ref inChan = forever $ do
resp <- receive s
cs <- _appConsumers <$> readIORef ref
let
f = \rid -> registerReqResponse ref rid resp
g = (\(pid, sid) -> registerSendReceipt ref pid sid resp)
h = \cid ->
traverse (\(cid', cn) -> when (cid == cid') (writeChan cn resp)) cs
cmd = getCommand resp
traverse_ f (responseForRequest cmd)
traverse_ g (responseForSendReceipt cmd)
traverse_ h (messageResponse cmd)
pongResponse cmd inChan
Emit a PING and expect a PONG every 29 seconds . If a PONG is not received , interrupt connection
keepAlive :: NS.Socket -> OutChan BaseCommand -> IO ()
keepAlive s outChan = forever $ do
threadDelay (29 * 1000000)
logRequest P.ping
sendSimpleCmd s P.ping
timeout (2 * 1000000) (readChan outChan) >>= \case
Just cmd -> logResponse cmd
Nothing -> throwIO $ userError "Keep Alive interruption"
sendSimpleCmd :: MonadIO m => NS.Socket -> BaseCommand -> m ()
sendSimpleCmd s cmd =
liftIO . SBL.sendAll s $ encodeBaseCommand Nothing Nothing cmd
sendPayloadCmd
:: MonadIO m
=> NS.Socket
-> BaseCommand
-> MessageMetadata
-> Maybe Payload
-> m ()
sendPayloadCmd s cmd meta payload =
liftIO . SBL.sendAll s $ encodeBaseCommand (Just meta) payload cmd
receive :: MonadIO m => NS.Socket -> m Response
receive s = liftIO $ do
msg <- SBL.recv s $ fromIntegral frameMaxSize
case decodeBaseCommand msg of
Left e -> fail $ "Decoding error: " <> e
Right resp -> resp <$ traverse_ (const $ f resp) (getCommand resp ^. F.maybe'ping)
where
f resp = do
logResponse $ getCommand resp
logRequest P.pong
sendSimpleCmd s P.pong
| null | https://raw.githubusercontent.com/cr-org/supernova/602409a18f47a38541ba24f5e885199efd383f48/lib/src/Pulsar/Connection.hs | haskell | | Connection details: host and port.
| Default connection data: "127.0.0.1:6650"
| It listens to incoming messages directly from the network socket and it writes them to all the
- consumers and producers' communication channels. | # LANGUAGE LambdaCase , OverloadedStrings #
module Pulsar.Connection where
import Control.Applicative ( (<|>) )
import Control.Concurrent ( forkIO
, killThread
, threadDelay
)
import Control.Concurrent.Async ( async
, concurrently_
)
import Control.Concurrent.Chan.Unagi
import Control.Concurrent.MVar
import Control.Exception ( throwIO )
import Control.Monad ( forever
, when
)
import Control.Monad.Catch ( MonadThrow
, bracket
)
import Control.Monad.IO.Class
import Control.Monad.Managed ( MonadManaged
, managed
, runManaged
)
import Data.Foldable ( traverse_ )
import Data.Functor ( void )
import Data.IORef
import Lens.Family
import qualified Network.Socket as NS
import qualified Network.Socket.ByteString.Lazy
as SBL
import Proto.PulsarApi ( BaseCommand
, MessageMetadata
)
import qualified Proto.PulsarApi_Fields as F
import Pulsar.AppState
import Pulsar.Internal.Logger
import Pulsar.Internal.TCPClient ( acquireSocket )
import qualified Pulsar.Protocol.Commands as P
import Pulsar.Protocol.Decoder ( decodeBaseCommand )
import Pulsar.Protocol.Encoder ( encodeBaseCommand )
import Pulsar.Protocol.Frame ( Payload
, Response(..)
, frameMaxSize
, getCommand
)
import System.Timeout ( timeout )
newtype Connection = Conn NS.Socket
data ConnectData = ConnData
{ connHost :: NS.HostName
, connPort :: NS.ServiceName
} deriving Show
| Internal Pulsar context . You will never need to access its content ( not exported ) but might need to take it as argument .
data PulsarCtx = Ctx
{ ctxConn :: Connection
, ctxState :: IORef AppState
, ctxConnWorker :: Worker
}
defaultConnectData :: ConnectData
defaultConnectData = ConnData { connHost = "127.0.0.1", connPort = "6650" }
| Starts a Pulsar connection with the supplied ' ConnectData '
connect
:: (MonadIO m, MonadThrow m, MonadManaged m) => ConnectData -> m PulsarCtx
connect (ConnData h p) = do
socket <- acquireSocket h p
liftIO $ sendSimpleCmd socket P.connect
checkConnection socket
app <- liftIO initAppState
(cin, cout) <- liftIO newChan
var <- liftIO newEmptyMVar
let
dispatcher = recvDispatch socket app cin
task = concurrently_ dispatcher (keepAlive socket cout)
handler =
managed (bracket (forkIO task) (\i -> readMVar var >> killThread i))
worker <- liftIO $ async (runManaged $ void handler)
return $ Ctx (Conn socket) app (worker, var)
checkConnection :: (MonadIO m, MonadThrow m) => NS.Socket -> m ()
checkConnection socket = do
resp <- receive socket
case getCommand resp ^. F.maybe'connected of
Just _ -> logResponse resp
Nothing -> liftIO . throwIO $ userError "Could not connect"
initAppState :: MonadIO m => m (IORef AppState)
initAppState = liftIO . newIORef $ AppState [] 0 0 0 [] [] []
responseForRequest :: BaseCommand -> Maybe ReqId
responseForRequest cmd =
let cmd1 = view F.requestId <$> cmd ^. F.maybe'success
cmd2 = view F.requestId <$> cmd ^. F.maybe'producerSuccess
cmd3 = view F.requestId <$> cmd ^. F.maybe'lookupTopicResponse
in ReqId <$> (cmd1 <|> cmd2 <|> cmd3)
responseForSendReceipt :: BaseCommand -> Maybe (ProducerId, SeqId)
responseForSendReceipt cmd =
let cmd' = cmd ^. F.maybe'sendReceipt
pid = PId . view F.producerId <$> cmd'
sid = SeqId . view F.sequenceId <$> cmd'
in (,) <$> pid <*> sid
pongResponse :: BaseCommand -> InChan BaseCommand -> IO (Maybe ())
pongResponse cmd inChan =
traverse (const $ writeChan inChan cmd) (cmd ^. F.maybe'pong)
messageResponse :: BaseCommand -> Maybe ConsumerId
messageResponse cmd =
let cmd' = cmd ^. F.maybe'message
cid = view F.consumerId <$> cmd'
in CId <$> cid
recvDispatch :: NS.Socket -> IORef AppState -> InChan BaseCommand -> IO ()
recvDispatch s ref inChan = forever $ do
resp <- receive s
cs <- _appConsumers <$> readIORef ref
let
f = \rid -> registerReqResponse ref rid resp
g = (\(pid, sid) -> registerSendReceipt ref pid sid resp)
h = \cid ->
traverse (\(cid', cn) -> when (cid == cid') (writeChan cn resp)) cs
cmd = getCommand resp
traverse_ f (responseForRequest cmd)
traverse_ g (responseForSendReceipt cmd)
traverse_ h (messageResponse cmd)
pongResponse cmd inChan
Emit a PING and expect a PONG every 29 seconds . If a PONG is not received , interrupt connection
keepAlive :: NS.Socket -> OutChan BaseCommand -> IO ()
keepAlive s outChan = forever $ do
threadDelay (29 * 1000000)
logRequest P.ping
sendSimpleCmd s P.ping
timeout (2 * 1000000) (readChan outChan) >>= \case
Just cmd -> logResponse cmd
Nothing -> throwIO $ userError "Keep Alive interruption"
sendSimpleCmd :: MonadIO m => NS.Socket -> BaseCommand -> m ()
sendSimpleCmd s cmd =
liftIO . SBL.sendAll s $ encodeBaseCommand Nothing Nothing cmd
sendPayloadCmd
:: MonadIO m
=> NS.Socket
-> BaseCommand
-> MessageMetadata
-> Maybe Payload
-> m ()
sendPayloadCmd s cmd meta payload =
liftIO . SBL.sendAll s $ encodeBaseCommand (Just meta) payload cmd
receive :: MonadIO m => NS.Socket -> m Response
receive s = liftIO $ do
msg <- SBL.recv s $ fromIntegral frameMaxSize
case decodeBaseCommand msg of
Left e -> fail $ "Decoding error: " <> e
Right resp -> resp <$ traverse_ (const $ f resp) (getCommand resp ^. F.maybe'ping)
where
f resp = do
logResponse $ getCommand resp
logRequest P.pong
sendSimpleCmd s P.pong
|
435938181b6fcbd3f665ef0d4390c86ccff54639a252c5ba0a4a8743cab1163a | achirkin/vulkan | VK_KHR_sampler_ycbcr_conversion.hs | # OPTIONS_GHC -fno - warn - missing - pattern - synonym - signatures #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
module Graphics.Vulkan.Ext.VK_KHR_sampler_ycbcr_conversion
* Vulkan extension : @VK_KHR_sampler_ycbcr_conversion@
-- |
--
-- supported: @vulkan@
--
contact :
--
-- author: @KHR@
--
-- type: @device@
--
-- Extension number: @157@
--
-- Required extensions: 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
--
-- ** Required extensions: 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
VkBindImagePlaneMemoryInfoKHR, VkChromaLocation(..),
VkChromaLocationKHR(..), VkImagePlaneMemoryRequirementsInfoKHR,
VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
VkSamplerYcbcrConversionCreateInfoKHR,
VkSamplerYcbcrConversionImageFormatPropertiesKHR,
VkSamplerYcbcrConversionInfoKHR, VkSamplerAddressMode(..),
VkSamplerMipmapMode(..), VkSamplerReductionMode(..),
VkSamplerYcbcrModelConversion(..), VkSamplerYcbcrRange(..),
VkSamplerCreateBitmask(..), VkSamplerCreateFlagBits(),
VkSamplerCreateFlags(), VkSamplerReductionModeEXT(..),
VkSamplerYcbcrModelConversionKHR(..), VkSamplerYcbcrRangeKHR(..),
VkCreateSamplerYcbcrConversionKHR,
pattern VkCreateSamplerYcbcrConversionKHR,
HS_vkCreateSamplerYcbcrConversionKHR,
PFN_vkCreateSamplerYcbcrConversionKHR,
VkDestroySamplerYcbcrConversionKHR,
pattern VkDestroySamplerYcbcrConversionKHR,
HS_vkDestroySamplerYcbcrConversionKHR,
PFN_vkDestroySamplerYcbcrConversionKHR,
module Graphics.Vulkan.Marshal, AHardwareBuffer(), ANativeWindow(),
CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..),
VkComponentSwizzle(..), VkComponentTypeNV(..), VkFilter(..),
VkFormat(..), VkFormatFeatureBitmask(..),
VkFormatFeatureFlagBits(), VkFormatFeatureFlags(),
VkInternalAllocationType(..), VkResult(..), VkStructureType(..),
VkSystemAllocationScope(..), newVkAllocationFunction,
newVkDebugReportCallbackEXT, newVkDebugUtilsMessengerCallbackEXT,
newVkFreeFunction, newVkInternalAllocationNotification,
newVkInternalFreeNotification, newVkReallocationFunction,
newVkVoidFunction, unwrapVkAllocationFunction,
unwrapVkDebugReportCallbackEXT,
unwrapVkDebugUtilsMessengerCallbackEXT, unwrapVkFreeFunction,
unwrapVkInternalAllocationNotification,
unwrapVkInternalFreeNotification, unwrapVkReallocationFunction,
unwrapVkVoidFunction, HS_vkAllocationFunction,
HS_vkDebugReportCallbackEXT, HS_vkDebugUtilsMessengerCallbackEXT,
HS_vkFreeFunction, HS_vkInternalAllocationNotification,
HS_vkInternalFreeNotification, HS_vkReallocationFunction,
HS_vkVoidFunction, PFN_vkAllocationFunction,
PFN_vkDebugReportCallbackEXT, PFN_vkDebugUtilsMessengerCallbackEXT,
PFN_vkFreeFunction, PFN_vkInternalAllocationNotification,
PFN_vkInternalFreeNotification, PFN_vkReallocationFunction,
PFN_vkVoidFunction, VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkAllocationCallbacks, VkComponentMapping, VkSamplerCreateInfo,
VkSamplerCustomBorderColorCreateInfoEXT,
VkSamplerReductionModeCreateInfo,
VkSamplerReductionModeCreateInfoEXT,
VkSamplerYcbcrConversionCreateInfo,
VkSamplerYcbcrConversionImageFormatProperties,
VkSamplerYcbcrConversionInfo,
VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION,
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION,
VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR,
pattern VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
pattern VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
pattern VK_FORMAT_R10X6_UNORM_PACK16_KHR,
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
pattern VK_FORMAT_R12X4_UNORM_PACK16_KHR,
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
pattern VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
pattern VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
pattern VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
pattern VK_IMAGE_ASPECT_PLANE_2_BIT_KHR,
pattern VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
pattern VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR,
pattern VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR,
pattern VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR,
pattern VK_CHROMA_LOCATION_COSITED_EVEN_KHR,
pattern VK_CHROMA_LOCATION_MIDPOINT_KHR,
-- ** Required extensions: 'VK_EXT_debug_report', 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Core_1_1 (pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
pattern VK_FORMAT_B16G16R16G16_422_UNORM,
pattern VK_FORMAT_B8G8R8G8_422_UNORM,
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
pattern VK_FORMAT_FEATURE_DISJOINT_BIT,
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
pattern VK_FORMAT_G16B16G16R16_422_UNORM,
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
pattern VK_FORMAT_G8B8G8R8_422_UNORM,
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
pattern VK_FORMAT_R10X6_UNORM_PACK16,
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
pattern VK_FORMAT_R12X4_UNORM_PACK16,
pattern VK_IMAGE_ASPECT_PLANE_0_BIT,
pattern VK_IMAGE_ASPECT_PLANE_1_BIT,
pattern VK_IMAGE_ASPECT_PLANE_2_BIT,
pattern VK_IMAGE_CREATE_DISJOINT_BIT,
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO)
import Graphics.Vulkan.Ext.VK_EXT_debug_report (pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT)
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Enum.ChromaLocation
import Graphics.Vulkan.Types.Enum.Component
import Graphics.Vulkan.Types.Enum.Filter
import Graphics.Vulkan.Types.Enum.Format
import Graphics.Vulkan.Types.Enum.InternalAllocationType
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.Sampler
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Enum.SystemAllocationScope
import Graphics.Vulkan.Types.Funcpointers
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.AllocationCallbacks
import Graphics.Vulkan.Types.Struct.Bind (VkBindImagePlaneMemoryInfoKHR)
import Graphics.Vulkan.Types.Struct.ComponentMapping
import Graphics.Vulkan.Types.Struct.Image (VkImagePlaneMemoryRequirementsInfoKHR)
import Graphics.Vulkan.Types.Struct.PhysicalDevice (VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR)
import Graphics.Vulkan.Types.Struct.Sampler
pattern VkCreateSamplerYcbcrConversionKHR :: CString
pattern VkCreateSamplerYcbcrConversionKHR <-
(is_VkCreateSamplerYcbcrConversionKHR -> True)
where
VkCreateSamplerYcbcrConversionKHR
= _VkCreateSamplerYcbcrConversionKHR
{-# INLINE _VkCreateSamplerYcbcrConversionKHR #-}
_VkCreateSamplerYcbcrConversionKHR :: CString
_VkCreateSamplerYcbcrConversionKHR
= Ptr "vkCreateSamplerYcbcrConversionKHR\NUL"#
# INLINE is_VkCreateSamplerYcbcrConversionKHR #
is_VkCreateSamplerYcbcrConversionKHR :: CString -> Bool
is_VkCreateSamplerYcbcrConversionKHR
= (EQ ==) . cmpCStrings _VkCreateSamplerYcbcrConversionKHR
type VkCreateSamplerYcbcrConversionKHR =
"vkCreateSamplerYcbcrConversionKHR"
-- | This is an alias for `vkCreateSamplerYcbcrConversion`.
--
Success codes : ' VK_SUCCESS ' .
--
-- Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY'.
--
> VkResult vkCreateSamplerYcbcrConversionKHR
-- > ( VkDevice device
-- > , const VkSamplerYcbcrConversionCreateInfo* pCreateInfo
-- > , const VkAllocationCallbacks* pAllocator
-- > , VkSamplerYcbcrConversion* pYcbcrConversion
-- > )
--
< vkCreateSamplerYcbcrConversionKHR registry at www.khronos.org >
type HS_vkCreateSamplerYcbcrConversionKHR =
VkDevice -- ^ device
->
Ptr VkSamplerYcbcrConversionCreateInfo -- ^ pCreateInfo
->
Ptr VkAllocationCallbacks -- ^ pAllocator
->
Ptr VkSamplerYcbcrConversion -- ^ pYcbcrConversion
-> IO VkResult
type PFN_vkCreateSamplerYcbcrConversionKHR =
FunPtr HS_vkCreateSamplerYcbcrConversionKHR
foreign import ccall unsafe "dynamic"
unwrapVkCreateSamplerYcbcrConversionKHRUnsafe ::
PFN_vkCreateSamplerYcbcrConversionKHR ->
HS_vkCreateSamplerYcbcrConversionKHR
foreign import ccall safe "dynamic"
unwrapVkCreateSamplerYcbcrConversionKHRSafe ::
PFN_vkCreateSamplerYcbcrConversionKHR ->
HS_vkCreateSamplerYcbcrConversionKHR
instance VulkanProc "vkCreateSamplerYcbcrConversionKHR" where
type VkProcType "vkCreateSamplerYcbcrConversionKHR" =
HS_vkCreateSamplerYcbcrConversionKHR
vkProcSymbol = _VkCreateSamplerYcbcrConversionKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkCreateSamplerYcbcrConversionKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkCreateSamplerYcbcrConversionKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VkDestroySamplerYcbcrConversionKHR :: CString
pattern VkDestroySamplerYcbcrConversionKHR <-
(is_VkDestroySamplerYcbcrConversionKHR -> True)
where
VkDestroySamplerYcbcrConversionKHR
= _VkDestroySamplerYcbcrConversionKHR
# INLINE _ VkDestroySamplerYcbcrConversionKHR #
_VkDestroySamplerYcbcrConversionKHR :: CString
_VkDestroySamplerYcbcrConversionKHR
= Ptr "vkDestroySamplerYcbcrConversionKHR\NUL"#
# INLINE is_VkDestroySamplerYcbcrConversionKHR #
is_VkDestroySamplerYcbcrConversionKHR :: CString -> Bool
is_VkDestroySamplerYcbcrConversionKHR
= (EQ ==) . cmpCStrings _VkDestroySamplerYcbcrConversionKHR
type VkDestroySamplerYcbcrConversionKHR =
"vkDestroySamplerYcbcrConversionKHR"
-- | This is an alias for `vkDestroySamplerYcbcrConversion`.
--
-- > void vkDestroySamplerYcbcrConversionKHR
-- > ( VkDevice device
-- > , VkSamplerYcbcrConversion ycbcrConversion
-- > , const VkAllocationCallbacks* pAllocator
-- > )
--
-- <-extensions/html/vkspec.html#vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR registry at www.khronos.org>
type HS_vkDestroySamplerYcbcrConversionKHR =
VkDevice -- ^ device
->
VkSamplerYcbcrConversion -- ^ ycbcrConversion
-> Ptr VkAllocationCallbacks -- ^ pAllocator
-> IO ()
type PFN_vkDestroySamplerYcbcrConversionKHR =
FunPtr HS_vkDestroySamplerYcbcrConversionKHR
foreign import ccall unsafe "dynamic"
unwrapVkDestroySamplerYcbcrConversionKHRUnsafe ::
PFN_vkDestroySamplerYcbcrConversionKHR ->
HS_vkDestroySamplerYcbcrConversionKHR
foreign import ccall safe "dynamic"
unwrapVkDestroySamplerYcbcrConversionKHRSafe ::
PFN_vkDestroySamplerYcbcrConversionKHR ->
HS_vkDestroySamplerYcbcrConversionKHR
instance VulkanProc "vkDestroySamplerYcbcrConversionKHR" where
type VkProcType "vkDestroySamplerYcbcrConversionKHR" =
HS_vkDestroySamplerYcbcrConversionKHR
vkProcSymbol = _VkDestroySamplerYcbcrConversionKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkDestroySamplerYcbcrConversionKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkDestroySamplerYcbcrConversionKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION ::
(Num a, Eq a) => a
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION = 14
type VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION = 14
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME :: CString
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME <-
(is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME -> True)
where
VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
{-# INLINE _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME #-}
_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME :: CString
_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= Ptr "VK_KHR_sampler_ycbcr_conversion\NUL"#
# INLINE is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME #
is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME ::
CString -> Bool
is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= (EQ ==) .
cmpCStrings _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
type VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME =
"VK_KHR_sampler_ycbcr_conversion"
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR
= VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR =
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR =
VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR
= VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR
=
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR
=
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT
= VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR =
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION
pattern VK_FORMAT_G8B8G8R8_422_UNORM_KHR =
VK_FORMAT_G8B8G8R8_422_UNORM
pattern VK_FORMAT_B8G8R8G8_422_UNORM_KHR =
VK_FORMAT_B8G8R8G8_422_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR =
VK_FORMAT_G8_B8R8_2PLANE_420_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR =
VK_FORMAT_G8_B8R8_2PLANE_422_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM
pattern VK_FORMAT_R10X6_UNORM_PACK16_KHR =
VK_FORMAT_R10X6_UNORM_PACK16
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR =
VK_FORMAT_R10X6G10X6_UNORM_2PACK16
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR =
VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR =
VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16
pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR =
VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16
pattern VK_FORMAT_R12X4_UNORM_PACK16_KHR =
VK_FORMAT_R12X4_UNORM_PACK16
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR =
VK_FORMAT_R12X4G12X4_UNORM_2PACK16
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR =
VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR =
VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR =
VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16
pattern VK_FORMAT_G16B16G16R16_422_UNORM_KHR =
VK_FORMAT_G16B16G16R16_422_UNORM
pattern VK_FORMAT_B16G16R16G16_422_UNORM_KHR =
VK_FORMAT_B16G16R16G16_422_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR =
VK_FORMAT_G16_B16R16_2PLANE_420_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR =
VK_FORMAT_G16_B16R16_2PLANE_422_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM
pattern VK_IMAGE_ASPECT_PLANE_0_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_0_BIT
pattern VK_IMAGE_ASPECT_PLANE_1_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_1_BIT
pattern VK_IMAGE_ASPECT_PLANE_2_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_2_BIT
pattern VK_IMAGE_CREATE_DISJOINT_BIT_KHR =
VK_IMAGE_CREATE_DISJOINT_BIT
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR =
VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
pattern VK_FORMAT_FEATURE_DISJOINT_BIT_KHR =
VK_FORMAT_FEATURE_DISJOINT_BIT
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR =
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
pattern VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR =
VK_SAMPLER_YCBCR_RANGE_ITU_FULL
pattern VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR =
VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
pattern VK_CHROMA_LOCATION_COSITED_EVEN_KHR =
VK_CHROMA_LOCATION_COSITED_EVEN
pattern VK_CHROMA_LOCATION_MIDPOINT_KHR =
VK_CHROMA_LOCATION_MIDPOINT
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Ext/VK_KHR_sampler_ycbcr_conversion.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE PatternSynonyms #
# LANGUAGE Strict #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
|
supported: @vulkan@
author: @KHR@
type: @device@
Extension number: @157@
Required extensions: 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
** Required extensions: 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
** Required extensions: 'VK_EXT_debug_report', 'VK_KHR_maintenance1', 'VK_KHR_bind_memory2', 'VK_KHR_get_memory_requirements2', 'VK_KHR_get_physical_device_properties2'.
# INLINE _VkCreateSamplerYcbcrConversionKHR #
| This is an alias for `vkCreateSamplerYcbcrConversion`.
Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY'.
> ( VkDevice device
> , const VkSamplerYcbcrConversionCreateInfo* pCreateInfo
> , const VkAllocationCallbacks* pAllocator
> , VkSamplerYcbcrConversion* pYcbcrConversion
> )
^ device
^ pCreateInfo
^ pAllocator
^ pYcbcrConversion
| This is an alias for `vkDestroySamplerYcbcrConversion`.
> void vkDestroySamplerYcbcrConversionKHR
> ( VkDevice device
> , VkSamplerYcbcrConversion ycbcrConversion
> , const VkAllocationCallbacks* pAllocator
> )
<-extensions/html/vkspec.html#vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR registry at www.khronos.org>
^ device
^ ycbcrConversion
^ pAllocator
# INLINE _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME # | # OPTIONS_GHC -fno - warn - missing - pattern - synonym - signatures #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
module Graphics.Vulkan.Ext.VK_KHR_sampler_ycbcr_conversion
* Vulkan extension : @VK_KHR_sampler_ycbcr_conversion@
contact :
VkBindImagePlaneMemoryInfoKHR, VkChromaLocation(..),
VkChromaLocationKHR(..), VkImagePlaneMemoryRequirementsInfoKHR,
VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
VkSamplerYcbcrConversionCreateInfoKHR,
VkSamplerYcbcrConversionImageFormatPropertiesKHR,
VkSamplerYcbcrConversionInfoKHR, VkSamplerAddressMode(..),
VkSamplerMipmapMode(..), VkSamplerReductionMode(..),
VkSamplerYcbcrModelConversion(..), VkSamplerYcbcrRange(..),
VkSamplerCreateBitmask(..), VkSamplerCreateFlagBits(),
VkSamplerCreateFlags(), VkSamplerReductionModeEXT(..),
VkSamplerYcbcrModelConversionKHR(..), VkSamplerYcbcrRangeKHR(..),
VkCreateSamplerYcbcrConversionKHR,
pattern VkCreateSamplerYcbcrConversionKHR,
HS_vkCreateSamplerYcbcrConversionKHR,
PFN_vkCreateSamplerYcbcrConversionKHR,
VkDestroySamplerYcbcrConversionKHR,
pattern VkDestroySamplerYcbcrConversionKHR,
HS_vkDestroySamplerYcbcrConversionKHR,
PFN_vkDestroySamplerYcbcrConversionKHR,
module Graphics.Vulkan.Marshal, AHardwareBuffer(), ANativeWindow(),
CAMetalLayer(), VkBool32(..), VkDeviceAddress(..),
VkDeviceSize(..), VkFlags(..), VkSampleMask(..),
VkComponentSwizzle(..), VkComponentTypeNV(..), VkFilter(..),
VkFormat(..), VkFormatFeatureBitmask(..),
VkFormatFeatureFlagBits(), VkFormatFeatureFlags(),
VkInternalAllocationType(..), VkResult(..), VkStructureType(..),
VkSystemAllocationScope(..), newVkAllocationFunction,
newVkDebugReportCallbackEXT, newVkDebugUtilsMessengerCallbackEXT,
newVkFreeFunction, newVkInternalAllocationNotification,
newVkInternalFreeNotification, newVkReallocationFunction,
newVkVoidFunction, unwrapVkAllocationFunction,
unwrapVkDebugReportCallbackEXT,
unwrapVkDebugUtilsMessengerCallbackEXT, unwrapVkFreeFunction,
unwrapVkInternalAllocationNotification,
unwrapVkInternalFreeNotification, unwrapVkReallocationFunction,
unwrapVkVoidFunction, HS_vkAllocationFunction,
HS_vkDebugReportCallbackEXT, HS_vkDebugUtilsMessengerCallbackEXT,
HS_vkFreeFunction, HS_vkInternalAllocationNotification,
HS_vkInternalFreeNotification, HS_vkReallocationFunction,
HS_vkVoidFunction, PFN_vkAllocationFunction,
PFN_vkDebugReportCallbackEXT, PFN_vkDebugUtilsMessengerCallbackEXT,
PFN_vkFreeFunction, PFN_vkInternalAllocationNotification,
PFN_vkInternalFreeNotification, PFN_vkReallocationFunction,
PFN_vkVoidFunction, VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkAllocationCallbacks, VkComponentMapping, VkSamplerCreateInfo,
VkSamplerCustomBorderColorCreateInfoEXT,
VkSamplerReductionModeCreateInfo,
VkSamplerReductionModeCreateInfoEXT,
VkSamplerYcbcrConversionCreateInfo,
VkSamplerYcbcrConversionImageFormatProperties,
VkSamplerYcbcrConversionInfo,
VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION,
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION,
VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR,
pattern VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
pattern VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
pattern VK_FORMAT_R10X6_UNORM_PACK16_KHR,
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
pattern VK_FORMAT_R12X4_UNORM_PACK16_KHR,
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
pattern VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
pattern VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
pattern VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
pattern VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
pattern VK_IMAGE_ASPECT_PLANE_2_BIT_KHR,
pattern VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
pattern VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR,
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR,
pattern VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR,
pattern VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR,
pattern VK_CHROMA_LOCATION_COSITED_EVEN_KHR,
pattern VK_CHROMA_LOCATION_MIDPOINT_KHR,
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Core_1_1 (pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
pattern VK_FORMAT_B16G16R16G16_422_UNORM,
pattern VK_FORMAT_B8G8R8G8_422_UNORM,
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
pattern VK_FORMAT_FEATURE_DISJOINT_BIT,
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
pattern VK_FORMAT_G16B16G16R16_422_UNORM,
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
pattern VK_FORMAT_G8B8G8R8_422_UNORM,
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
pattern VK_FORMAT_R10X6_UNORM_PACK16,
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
pattern VK_FORMAT_R12X4_UNORM_PACK16,
pattern VK_IMAGE_ASPECT_PLANE_0_BIT,
pattern VK_IMAGE_ASPECT_PLANE_1_BIT,
pattern VK_IMAGE_ASPECT_PLANE_2_BIT,
pattern VK_IMAGE_CREATE_DISJOINT_BIT,
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO)
import Graphics.Vulkan.Ext.VK_EXT_debug_report (pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT)
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Enum.ChromaLocation
import Graphics.Vulkan.Types.Enum.Component
import Graphics.Vulkan.Types.Enum.Filter
import Graphics.Vulkan.Types.Enum.Format
import Graphics.Vulkan.Types.Enum.InternalAllocationType
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.Sampler
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Enum.SystemAllocationScope
import Graphics.Vulkan.Types.Funcpointers
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.AllocationCallbacks
import Graphics.Vulkan.Types.Struct.Bind (VkBindImagePlaneMemoryInfoKHR)
import Graphics.Vulkan.Types.Struct.ComponentMapping
import Graphics.Vulkan.Types.Struct.Image (VkImagePlaneMemoryRequirementsInfoKHR)
import Graphics.Vulkan.Types.Struct.PhysicalDevice (VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR)
import Graphics.Vulkan.Types.Struct.Sampler
pattern VkCreateSamplerYcbcrConversionKHR :: CString
pattern VkCreateSamplerYcbcrConversionKHR <-
(is_VkCreateSamplerYcbcrConversionKHR -> True)
where
VkCreateSamplerYcbcrConversionKHR
= _VkCreateSamplerYcbcrConversionKHR
_VkCreateSamplerYcbcrConversionKHR :: CString
_VkCreateSamplerYcbcrConversionKHR
= Ptr "vkCreateSamplerYcbcrConversionKHR\NUL"#
# INLINE is_VkCreateSamplerYcbcrConversionKHR #
is_VkCreateSamplerYcbcrConversionKHR :: CString -> Bool
is_VkCreateSamplerYcbcrConversionKHR
= (EQ ==) . cmpCStrings _VkCreateSamplerYcbcrConversionKHR
type VkCreateSamplerYcbcrConversionKHR =
"vkCreateSamplerYcbcrConversionKHR"
Success codes : ' VK_SUCCESS ' .
> VkResult vkCreateSamplerYcbcrConversionKHR
< vkCreateSamplerYcbcrConversionKHR registry at www.khronos.org >
type HS_vkCreateSamplerYcbcrConversionKHR =
->
->
->
-> IO VkResult
type PFN_vkCreateSamplerYcbcrConversionKHR =
FunPtr HS_vkCreateSamplerYcbcrConversionKHR
foreign import ccall unsafe "dynamic"
unwrapVkCreateSamplerYcbcrConversionKHRUnsafe ::
PFN_vkCreateSamplerYcbcrConversionKHR ->
HS_vkCreateSamplerYcbcrConversionKHR
foreign import ccall safe "dynamic"
unwrapVkCreateSamplerYcbcrConversionKHRSafe ::
PFN_vkCreateSamplerYcbcrConversionKHR ->
HS_vkCreateSamplerYcbcrConversionKHR
instance VulkanProc "vkCreateSamplerYcbcrConversionKHR" where
type VkProcType "vkCreateSamplerYcbcrConversionKHR" =
HS_vkCreateSamplerYcbcrConversionKHR
vkProcSymbol = _VkCreateSamplerYcbcrConversionKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkCreateSamplerYcbcrConversionKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkCreateSamplerYcbcrConversionKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VkDestroySamplerYcbcrConversionKHR :: CString
pattern VkDestroySamplerYcbcrConversionKHR <-
(is_VkDestroySamplerYcbcrConversionKHR -> True)
where
VkDestroySamplerYcbcrConversionKHR
= _VkDestroySamplerYcbcrConversionKHR
# INLINE _ VkDestroySamplerYcbcrConversionKHR #
_VkDestroySamplerYcbcrConversionKHR :: CString
_VkDestroySamplerYcbcrConversionKHR
= Ptr "vkDestroySamplerYcbcrConversionKHR\NUL"#
# INLINE is_VkDestroySamplerYcbcrConversionKHR #
is_VkDestroySamplerYcbcrConversionKHR :: CString -> Bool
is_VkDestroySamplerYcbcrConversionKHR
= (EQ ==) . cmpCStrings _VkDestroySamplerYcbcrConversionKHR
type VkDestroySamplerYcbcrConversionKHR =
"vkDestroySamplerYcbcrConversionKHR"
type HS_vkDestroySamplerYcbcrConversionKHR =
->
-> IO ()
type PFN_vkDestroySamplerYcbcrConversionKHR =
FunPtr HS_vkDestroySamplerYcbcrConversionKHR
foreign import ccall unsafe "dynamic"
unwrapVkDestroySamplerYcbcrConversionKHRUnsafe ::
PFN_vkDestroySamplerYcbcrConversionKHR ->
HS_vkDestroySamplerYcbcrConversionKHR
foreign import ccall safe "dynamic"
unwrapVkDestroySamplerYcbcrConversionKHRSafe ::
PFN_vkDestroySamplerYcbcrConversionKHR ->
HS_vkDestroySamplerYcbcrConversionKHR
instance VulkanProc "vkDestroySamplerYcbcrConversionKHR" where
type VkProcType "vkDestroySamplerYcbcrConversionKHR" =
HS_vkDestroySamplerYcbcrConversionKHR
vkProcSymbol = _VkDestroySamplerYcbcrConversionKHR
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe
= unwrapVkDestroySamplerYcbcrConversionKHRUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkDestroySamplerYcbcrConversionKHRSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION ::
(Num a, Eq a) => a
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION = 14
type VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION = 14
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME :: CString
pattern VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME <-
(is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME -> True)
where
VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME :: CString
_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= Ptr "VK_KHR_sampler_ycbcr_conversion\NUL"#
# INLINE is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME #
is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME ::
CString -> Bool
is_VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
= (EQ ==) .
cmpCStrings _VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME
type VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME =
"VK_KHR_sampler_ycbcr_conversion"
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR
= VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR =
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO
pattern VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR =
VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO
pattern VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR
= VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO
pattern VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR
=
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES
pattern VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR
=
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES
pattern VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT
= VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT
pattern VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR =
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION
pattern VK_FORMAT_G8B8G8R8_422_UNORM_KHR =
VK_FORMAT_G8B8G8R8_422_UNORM
pattern VK_FORMAT_B8G8R8G8_422_UNORM_KHR =
VK_FORMAT_B8G8R8G8_422_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM
pattern VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR =
VK_FORMAT_G8_B8R8_2PLANE_420_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM
pattern VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR =
VK_FORMAT_G8_B8R8_2PLANE_422_UNORM
pattern VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR =
VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM
pattern VK_FORMAT_R10X6_UNORM_PACK16_KHR =
VK_FORMAT_R10X6_UNORM_PACK16
pattern VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR =
VK_FORMAT_R10X6G10X6_UNORM_2PACK16
pattern VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR =
VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16
pattern VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR =
VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16
pattern VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR =
VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR =
VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16
pattern VK_FORMAT_R12X4_UNORM_PACK16_KHR =
VK_FORMAT_R12X4_UNORM_PACK16
pattern VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR =
VK_FORMAT_R12X4G12X4_UNORM_2PACK16
pattern VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR =
VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16
pattern VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR =
VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16
pattern VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR =
VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16
pattern VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR =
VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16
pattern VK_FORMAT_G16B16G16R16_422_UNORM_KHR =
VK_FORMAT_G16B16G16R16_422_UNORM
pattern VK_FORMAT_B16G16R16G16_422_UNORM_KHR =
VK_FORMAT_B16G16R16G16_422_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM
pattern VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR =
VK_FORMAT_G16_B16R16_2PLANE_420_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM
pattern VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR =
VK_FORMAT_G16_B16R16_2PLANE_422_UNORM
pattern VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR =
VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM
pattern VK_IMAGE_ASPECT_PLANE_0_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_0_BIT
pattern VK_IMAGE_ASPECT_PLANE_1_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_1_BIT
pattern VK_IMAGE_ASPECT_PLANE_2_BIT_KHR =
VK_IMAGE_ASPECT_PLANE_2_BIT
pattern VK_IMAGE_CREATE_DISJOINT_BIT_KHR =
VK_IMAGE_CREATE_DISJOINT_BIT
pattern VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR =
VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
pattern VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR
=
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
pattern VK_FORMAT_FEATURE_DISJOINT_BIT_KHR =
VK_FORMAT_FEATURE_DISJOINT_BIT
pattern VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR =
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
pattern VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR =
VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
pattern VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR =
VK_SAMPLER_YCBCR_RANGE_ITU_FULL
pattern VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR =
VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
pattern VK_CHROMA_LOCATION_COSITED_EVEN_KHR =
VK_CHROMA_LOCATION_COSITED_EVEN
pattern VK_CHROMA_LOCATION_MIDPOINT_KHR =
VK_CHROMA_LOCATION_MIDPOINT
|
ce2fa6e46b0f2616db2f1f68f68f315e6c4822110405a2aa917b56218d712c1d | rbkmoney/cds | cds_maintenance_sup.erl | -module(cds_maintenance_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()} | {error, Reason :: any()}.
start_link() ->
supervisor:start_link(?MODULE, []).
-spec init(_) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init(_) ->
Cleaner = spec(
[
#{
id => cds_session_cleaner,
start => {cds_session_cleaner, start_link, []}
}
],
session_cleaning
),
Recrypter = spec(
[
#{
id => cds_cvv_recrypter,
start => {cds_recrypter, start_link, [#{subject => session}]}
},
#{
id => cds_card_data_recrypter,
start => {cds_recrypter, start_link, [#{subject => carddata}]}
}
],
recrypting
),
{ok, {{one_for_one, 1, 5}, Cleaner ++ Recrypter}}.
spec(Spec, Name) ->
Config = application:get_env(cds, Name, #{}),
case maps:get(enabled, Config, false) of
true -> Spec;
false -> []
end.
| null | https://raw.githubusercontent.com/rbkmoney/cds/7daa88e44a95de6ba0404ff492344d73fae64a95/apps/cds/src/cds_maintenance_sup.erl | erlang | -module(cds_maintenance_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()} | {error, Reason :: any()}.
start_link() ->
supervisor:start_link(?MODULE, []).
-spec init(_) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}.
init(_) ->
Cleaner = spec(
[
#{
id => cds_session_cleaner,
start => {cds_session_cleaner, start_link, []}
}
],
session_cleaning
),
Recrypter = spec(
[
#{
id => cds_cvv_recrypter,
start => {cds_recrypter, start_link, [#{subject => session}]}
},
#{
id => cds_card_data_recrypter,
start => {cds_recrypter, start_link, [#{subject => carddata}]}
}
],
recrypting
),
{ok, {{one_for_one, 1, 5}, Cleaner ++ Recrypter}}.
spec(Spec, Name) ->
Config = application:get_env(cds, Name, #{}),
case maps:get(enabled, Config, false) of
true -> Spec;
false -> []
end.
| |
802139c5344d1536aae4b11e365e08a58cfd334ea5bd3ccc296a3da712612324 | yrashk/erlang | CosNaming_NamingContext_NotFound.erl | %%------------------------------------------------------------
%%
%% Implementation stub file
%%
Target : CosNaming_NamingContext_NotFound
Source : /net / isildur / ldisk / daily_build / otp_prebuild_r13b01.2009 - 06 - 07_20 / otp_src_R13B01 / lib / orber / COSS / CosNaming / cos_naming.idl
IC vsn : 4.2.21
%%
%% This file is automatically generated. DO NOT EDIT IT.
%%
%%------------------------------------------------------------
-module('CosNaming_NamingContext_NotFound').
-ic_compiled("4_2_21").
-include("CosNaming_NamingContext.hrl").
-export([tc/0,id/0,name/0]).
%% returns type code
tc() -> {tk_except,"IDL:omg.org/CosNaming/NamingContext/NotFound:1.0",
"NotFound",
[{"why",
{tk_enum,
"IDL:omg.org/CosNaming/NamingContext/NotFoundReason:1.0",
"NotFoundReason",
["missing_node","not_context","not_object"]}},
{"rest_of_name",
{tk_sequence,
{tk_struct,"IDL:omg.org/CosNaming/NameComponent:1.0",
"NameComponent",
[{"id",{tk_string,0}},{"kind",{tk_string,0}}]},
0}}]}.
%% returns id
id() -> "IDL:omg.org/CosNaming/NamingContext/NotFound:1.0".
%% returns name
name() -> "CosNaming_NamingContext_NotFound".
| null | https://raw.githubusercontent.com/yrashk/erlang/e1282325ed75e52a98d58f5bd9fb0fa27896173f/lib/orber/COSS/CosNaming/CosNaming_NamingContext_NotFound.erl | erlang | ------------------------------------------------------------
Implementation stub file
This file is automatically generated. DO NOT EDIT IT.
------------------------------------------------------------
returns type code
returns id
returns name | Target : CosNaming_NamingContext_NotFound
Source : /net / isildur / ldisk / daily_build / otp_prebuild_r13b01.2009 - 06 - 07_20 / otp_src_R13B01 / lib / orber / COSS / CosNaming / cos_naming.idl
IC vsn : 4.2.21
-module('CosNaming_NamingContext_NotFound').
-ic_compiled("4_2_21").
-include("CosNaming_NamingContext.hrl").
-export([tc/0,id/0,name/0]).
tc() -> {tk_except,"IDL:omg.org/CosNaming/NamingContext/NotFound:1.0",
"NotFound",
[{"why",
{tk_enum,
"IDL:omg.org/CosNaming/NamingContext/NotFoundReason:1.0",
"NotFoundReason",
["missing_node","not_context","not_object"]}},
{"rest_of_name",
{tk_sequence,
{tk_struct,"IDL:omg.org/CosNaming/NameComponent:1.0",
"NameComponent",
[{"id",{tk_string,0}},{"kind",{tk_string,0}}]},
0}}]}.
id() -> "IDL:omg.org/CosNaming/NamingContext/NotFound:1.0".
name() -> "CosNaming_NamingContext_NotFound".
|
a30ed6c773b53bf4cdcc0cf999aa760dde52da1832147ac1fccfaecce369b148 | metosin/spec-tools | perf_test.clj | (ns spec-tools.perf-test
(:require [schema.core :as schema]
[schema.coerce :as coerce]
[spec-tools.core :as st]
[spec-tools.spec :as spec]
[criterium.core :as cc]
[clojure.spec.alpha :as s]))
;;
start repl with ` perf repl `
;; perf measured with the following setup:
;;
;; Model Name: MacBook Pro
;; Model Identifier: MacBookPro11,3
Processor Name : Intel Core i7
Processor Speed : 2,5 GHz
Number of Processors : 1
Total Number of Cores : 4
L2 Cache ( per Core ): 256 KB
L3 Cache : 6 MB
Memory : 16 GB
;;
(s/def ::age (s/and integer? #(> % 10)))
(s/def ::x-age (s/and spec/integer? #(> % 10)))
(def age (schema/constrained schema/Int #(> % 10)))
(defn raw-title [color s]
(println (str color (apply str (repeat (count s) "#")) "\u001B[0m"))
(println (str color s "\u001B[0m"))
(println (str color (apply str (repeat (count s) "#")) "\u001B[0m")))
(def title (partial raw-title "\u001B[35m"))
(def suite (partial raw-title "\u001B[32m"))
(schema/check age 12)
(defn valid-test []
(suite "valid?")
; 81ns (alpha12)
84ns ( alpha14 )
(title "spec: integer?")
(let [call #(s/valid? ::age 12)]
(assert (call))
(cc/quick-bench
(call)))
; 77ns (alpha12)
82ns ( alpha14 )
(title "spec: x-integer?")
(let [call #(s/valid? ::x-age 12)]
(assert (call))
(cc/quick-bench
(call)))
; 430ns
(title "schema: s/Int")
(let [call #(schema/check age 12)]
(assert (nil? (call)))
(cc/quick-bench
(call)))
31ns
(title "schema: s/Int (compiled)")
(let [checker (schema/checker age)
call #(checker 12)]
(assert (nil? (call)))
(cc/quick-bench
(call))))
(defn conform-test []
(suite "no-op conform")
; 1315ns
100ns ( alpha12 )
; 81ns (alpha14)
(title "spec: integer?")
(let [call #(s/conform ::age 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
; 1430ns
; 95ns (alpha12)
; 83ns (alpha14)
(title "spec: x-integer?")
(let [call #(st/conform ::x-age 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
425ns
(title "schema: s/Int")
(let [call #((coerce/coercer age (constantly nil)) 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
; 25ns
(title "schema: s/Int (compiled)")
(let [coercer (coerce/coercer age (constantly nil))
call #(coercer 12)]
(assert (= (call) 12))
(cc/quick-bench
(call))))
(defn conform-test2 []
(suite "transformer set of keywords")
(let [sizes-spec (s/coll-of (s/and spec/keyword? #{:L :M :S}) :into #{})
sizes-schema #{(schema/enum :L :M :S)}]
4300ns
; 1440ns (alpha12)
; 1160ns (alpha14)
(title "spec: conform keyword enum")
(let [call #(st/conform sizes-spec ["L" "M"] st/string-transformer)]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
; 3700ns
990ns ( alpha12 )
990ns ( alpha14 )
(title "spec: conform keyword enum - no-op")
(let [call #(st/conform sizes-spec #{:L :M} st/string-transformer)]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
(title "schema: conform keyword enum")
(let [coercer (coerce/coercer sizes-schema coerce/string-coercion-matcher)
call #(coercer ["L" "M"])]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
780ns
(title "schema: conform keyword enum - no-op")
(let [coercer (coerce/coercer sizes-schema coerce/string-coercion-matcher)
call #(coercer #{:L :M})]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))))
(s/def ::order-id spec/integer?)
(s/def ::product-id spec/integer?)
(s/def ::company-id spec/integer?)
(s/def ::client-id spec/integer?)
(s/def ::product-name spec/string?)
(s/def ::price spec/double?)
(s/def ::quantity spec/integer?)
(s/def ::name spec/string?)
(s/def ::preferred-name ::name)
(s/def ::zip spec/integer?)
(s/def ::street string?)
(s/def ::country (s/and spec/keyword? #{:fi :po}))
(s/def ::receiver (s/keys :req-un [::name ::street ::zip]
:opt-un [::country]))
(s/def ::orderline (s/keys :req-un [::product-id ::price]
:req.un [::product-name]))
(s/def ::orderlines (s/coll-of ::orderline))
(s/def ::customer-type (s/and spec/keyword? #{:client :corporate}))
(defmulti requester-type :customer-type)
(defmethod requester-type :client
[_]
(s/keys :req-un [::client-id ::preferred-name ::customer-type]))
(defmethod requester-type :corporate
[_]
(s/keys :req-un [::company-id ::customer-type]))
(s/def ::requester (s/multi-spec requester-type ::customer-type))
(s/def ::order (s/merge
::requester
(s/keys :req-un [::order-id ::orderlines ::receiver])))
(s/def ::order-with-line (s/and ::order #(> (::orderlines 1))))
(def sample-order-valid
{:order-id 12
:orderlines [{:product-id 1
:price 12.3}
{:product-id 2
:price 9.99
:product-name "token"}]
:receiver {:name "Tommi"
:street "Kotikatu 2"
:zip 33310
:country :fi}})
(def sample-order
{:order-id "12"
:orderlines [{:product-id "1"
:price "12.3"}
{:product-id "2"
:price "9.99"
:product-name "token"}]
:receiver {:name "Tommi"
:street "Kotikatu 2"
:zip "33310"
:country "fi"}})
(def sample-multi-order-corporate
(merge sample-order-valid
{:customer-type "corporate"
:company-id "12345"}))
(defn multi-spec-coercer-test []
(suite "transformer set of multi-specs")
87.668605 µs
(title "Multi coercer")
(let [coercer #(st/coerce ::order % st/string-transformer)
call #(coercer sample-multi-order-corporate)
expected (merge sample-order-valid
{:customer-type :corporate
:company-id 12345})]
(assert (= (call) expected))
(cc/quick-bench
(call))))
(s/form
(s/cat ::first keyword?
:integer-lists (s/+
(s/coll-of
(s/keys :req-un [::order-id
::orderlines
::receiver])))))
(schema/defschema Order
{:order-id Long
:orderlines [{:product-id Long
:price Double
(schema/optional-key :product-name) String}]
:receiver {:name String
:street String
:zip Long
(schema/optional-key :country) (schema/enum :fi :po)}})
(defn conform-test3 []
(suite "transformer a nested map")
4.5µs ( alpha12 )
; 3.9µs (alpha14)
(title "spec: conform")
(let [call #(st/conform ::order sample-order st/string-transformer)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
2.8µs ( alpha12 )
; 2.7µs (alpha14)
(title "spec: conform - no-op")
(let [call #(st/conform ::order sample-order-valid st/string-transformer)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
; 9.1µs
(title "schema: conform")
(let [coercer (coerce/coercer Order coerce/string-coercion-matcher)
call #(coercer sample-order)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
9.3µs
(title "schema: conform - no-op")
(let [coercer (coerce/coercer Order coerce/string-coercion-matcher)
call #(coercer sample-order-valid)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call))))
(comment
(valid-test)
(conform-test)
(conform-test2)
(conform-test3)
(multi-spec-coercer-test))
| null | https://raw.githubusercontent.com/metosin/spec-tools/d05e6e3c76c3c6ff847aa3f8e66344df2705aeae/test/clj/spec_tools/perf_test.clj | clojure |
perf measured with the following setup:
Model Name: MacBook Pro
Model Identifier: MacBookPro11,3
81ns (alpha12)
77ns (alpha12)
430ns
1315ns
81ns (alpha14)
1430ns
95ns (alpha12)
83ns (alpha14)
25ns
1440ns (alpha12)
1160ns (alpha14)
3700ns
3.9µs (alpha14)
2.7µs (alpha14)
9.1µs | (ns spec-tools.perf-test
(:require [schema.core :as schema]
[schema.coerce :as coerce]
[spec-tools.core :as st]
[spec-tools.spec :as spec]
[criterium.core :as cc]
[clojure.spec.alpha :as s]))
start repl with ` perf repl `
Processor Name : Intel Core i7
Processor Speed : 2,5 GHz
Number of Processors : 1
Total Number of Cores : 4
L2 Cache ( per Core ): 256 KB
L3 Cache : 6 MB
Memory : 16 GB
(s/def ::age (s/and integer? #(> % 10)))
(s/def ::x-age (s/and spec/integer? #(> % 10)))
(def age (schema/constrained schema/Int #(> % 10)))
(defn raw-title [color s]
(println (str color (apply str (repeat (count s) "#")) "\u001B[0m"))
(println (str color s "\u001B[0m"))
(println (str color (apply str (repeat (count s) "#")) "\u001B[0m")))
(def title (partial raw-title "\u001B[35m"))
(def suite (partial raw-title "\u001B[32m"))
(schema/check age 12)
(defn valid-test []
(suite "valid?")
84ns ( alpha14 )
(title "spec: integer?")
(let [call #(s/valid? ::age 12)]
(assert (call))
(cc/quick-bench
(call)))
82ns ( alpha14 )
(title "spec: x-integer?")
(let [call #(s/valid? ::x-age 12)]
(assert (call))
(cc/quick-bench
(call)))
(title "schema: s/Int")
(let [call #(schema/check age 12)]
(assert (nil? (call)))
(cc/quick-bench
(call)))
31ns
(title "schema: s/Int (compiled)")
(let [checker (schema/checker age)
call #(checker 12)]
(assert (nil? (call)))
(cc/quick-bench
(call))))
(defn conform-test []
(suite "no-op conform")
100ns ( alpha12 )
(title "spec: integer?")
(let [call #(s/conform ::age 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
(title "spec: x-integer?")
(let [call #(st/conform ::x-age 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
425ns
(title "schema: s/Int")
(let [call #((coerce/coercer age (constantly nil)) 12)]
(assert (= (call) 12))
(cc/quick-bench
(call)))
(title "schema: s/Int (compiled)")
(let [coercer (coerce/coercer age (constantly nil))
call #(coercer 12)]
(assert (= (call) 12))
(cc/quick-bench
(call))))
(defn conform-test2 []
(suite "transformer set of keywords")
(let [sizes-spec (s/coll-of (s/and spec/keyword? #{:L :M :S}) :into #{})
sizes-schema #{(schema/enum :L :M :S)}]
4300ns
(title "spec: conform keyword enum")
(let [call #(st/conform sizes-spec ["L" "M"] st/string-transformer)]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
990ns ( alpha12 )
990ns ( alpha14 )
(title "spec: conform keyword enum - no-op")
(let [call #(st/conform sizes-spec #{:L :M} st/string-transformer)]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
(title "schema: conform keyword enum")
(let [coercer (coerce/coercer sizes-schema coerce/string-coercion-matcher)
call #(coercer ["L" "M"])]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))
780ns
(title "schema: conform keyword enum - no-op")
(let [coercer (coerce/coercer sizes-schema coerce/string-coercion-matcher)
call #(coercer #{:L :M})]
(assert (= (call) #{:L :M}))
(cc/quick-bench
(call)))))
(s/def ::order-id spec/integer?)
(s/def ::product-id spec/integer?)
(s/def ::company-id spec/integer?)
(s/def ::client-id spec/integer?)
(s/def ::product-name spec/string?)
(s/def ::price spec/double?)
(s/def ::quantity spec/integer?)
(s/def ::name spec/string?)
(s/def ::preferred-name ::name)
(s/def ::zip spec/integer?)
(s/def ::street string?)
(s/def ::country (s/and spec/keyword? #{:fi :po}))
(s/def ::receiver (s/keys :req-un [::name ::street ::zip]
:opt-un [::country]))
(s/def ::orderline (s/keys :req-un [::product-id ::price]
:req.un [::product-name]))
(s/def ::orderlines (s/coll-of ::orderline))
(s/def ::customer-type (s/and spec/keyword? #{:client :corporate}))
(defmulti requester-type :customer-type)
(defmethod requester-type :client
[_]
(s/keys :req-un [::client-id ::preferred-name ::customer-type]))
(defmethod requester-type :corporate
[_]
(s/keys :req-un [::company-id ::customer-type]))
(s/def ::requester (s/multi-spec requester-type ::customer-type))
(s/def ::order (s/merge
::requester
(s/keys :req-un [::order-id ::orderlines ::receiver])))
(s/def ::order-with-line (s/and ::order #(> (::orderlines 1))))
(def sample-order-valid
{:order-id 12
:orderlines [{:product-id 1
:price 12.3}
{:product-id 2
:price 9.99
:product-name "token"}]
:receiver {:name "Tommi"
:street "Kotikatu 2"
:zip 33310
:country :fi}})
(def sample-order
{:order-id "12"
:orderlines [{:product-id "1"
:price "12.3"}
{:product-id "2"
:price "9.99"
:product-name "token"}]
:receiver {:name "Tommi"
:street "Kotikatu 2"
:zip "33310"
:country "fi"}})
(def sample-multi-order-corporate
(merge sample-order-valid
{:customer-type "corporate"
:company-id "12345"}))
(defn multi-spec-coercer-test []
(suite "transformer set of multi-specs")
87.668605 µs
(title "Multi coercer")
(let [coercer #(st/coerce ::order % st/string-transformer)
call #(coercer sample-multi-order-corporate)
expected (merge sample-order-valid
{:customer-type :corporate
:company-id 12345})]
(assert (= (call) expected))
(cc/quick-bench
(call))))
(s/form
(s/cat ::first keyword?
:integer-lists (s/+
(s/coll-of
(s/keys :req-un [::order-id
::orderlines
::receiver])))))
(schema/defschema Order
{:order-id Long
:orderlines [{:product-id Long
:price Double
(schema/optional-key :product-name) String}]
:receiver {:name String
:street String
:zip Long
(schema/optional-key :country) (schema/enum :fi :po)}})
(defn conform-test3 []
(suite "transformer a nested map")
4.5µs ( alpha12 )
(title "spec: conform")
(let [call #(st/conform ::order sample-order st/string-transformer)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
2.8µs ( alpha12 )
(title "spec: conform - no-op")
(let [call #(st/conform ::order sample-order-valid st/string-transformer)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
(title "schema: conform")
(let [coercer (coerce/coercer Order coerce/string-coercion-matcher)
call #(coercer sample-order)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call)))
9.3µs
(title "schema: conform - no-op")
(let [coercer (coerce/coercer Order coerce/string-coercion-matcher)
call #(coercer sample-order-valid)]
(assert (= (call) sample-order-valid))
(cc/quick-bench
(call))))
(comment
(valid-test)
(conform-test)
(conform-test2)
(conform-test3)
(multi-spec-coercer-test))
|
24ec902896532c124277cfaef13303dba8c48b3a8745355fbd8d6c1d6d35934a | ekmett/reducers | Combinators.hs | # LANGUAGE CPP #
# LANGUAGE UndecidableInstances , TypeOperators , FlexibleContexts , MultiParamTypeClasses , FlexibleInstances , TypeFamilies #
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 702 && __GLASGOW_HASKELL__ < 710
# LANGUAGE Trustworthy #
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Generator.Combinators
Copyright : ( c ) 2009
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
-- Portability : non-portable (type families, MPTCs)
--
Utilities for working with Monoids that conflict with names from the " Prelude " ,
-- "Data.Foldable", "Control.Monad" or elsewhere. Intended to be imported qualified.
--
-- > import Data.Generator.Combinators as Generator
--
-----------------------------------------------------------------------------
module Data.Generator.Combinators
(
-- * Monadic Reduction
mapM_
, forM_
, msum
-- * Applicative Reduction
, traverse_
, for_
, asum
-- * Logical Reduction
, and
, or
, any
, all
-- * Monoidal Reduction
, foldMap
, fold
, toList
-- * List-Like Reduction
, concatMap
, elem
, filter
, filterWith
--, find
, sum
, product
, notElem
) where
import Prelude hiding
( mapM_, any, all, elem, filter, concatMap, and, or
, sum, product, notElem, replicate, cycle, repeat
#if __GLASGOW_HASKELL__ >= 710
, foldMap
#endif
)
import Control.Applicative (Alternative)
import Control.Monad (MonadPlus)
import Data.Generator
import Data.Semigroup (Sum(..), Product(..), All(..), Any(..), WrappedMonoid(..))
import Data.Semigroup.Applicative (Traversal(..))
import Data.Semigroup.Alternative (Alternate(..))
import Data.Semigroup.Monad (Action(..))
import Data.Semigroup.MonadPlus (MonadSum(..))
import Data.Semigroup.Reducer (Reducer(..))
#if !(MIN_VERSION_base(4,8,0))
import Control.Applicative (Applicative)
import Data.Monoid (Monoid(..))
#endif
-- | Efficiently 'mapReduce' a 'Generator' using the 'Traversal' monoid. A specialized version of its namesake from "Data.Foldable"
--
-- @
-- 'mapReduce' 'getTraversal'
-- @
traverse_ :: (Generator c, Applicative f) => (Elem c -> f b) -> c -> f ()
traverse_ = mapReduceWith getTraversal
{-# INLINE traverse_ #-}
-- | Convenience function as found in "Data.Foldable"
--
-- @
-- 'flip' 'traverse_'
-- @
for_ :: (Generator c, Applicative f) => c -> (Elem c -> f b) -> f ()
for_ = flip traverse_
{-# INLINE for_ #-}
-- | The sum of a collection of actions, generalizing 'concat'
--
-- @
-- 'reduceWith' 'getAlt'
-- @
asum :: (Generator c, Alternative f, f a ~ Elem c) => c -> f a
asum = reduceWith getAlternate
# INLINE asum #
-- | Efficiently 'mapReduce' a 'Generator' using the 'Action' monoid. A specialized version of its namesake from "Data.Foldable" and "Control.Monad"
--
-- @
' ' ' getAction '
-- @
mapM_ :: (Generator c, Monad m) => (Elem c -> m b) -> c -> m ()
mapM_ = mapReduceWith getAction
{-# INLINE mapM_ #-}
-- | Convenience function as found in "Data.Foldable" and "Control.Monad"
--
-- @
-- 'flip' 'mapM_'
-- @
forM_ :: (Generator c, Monad m) => c -> (Elem c -> m b) -> m ()
forM_ = flip mapM_
{-# INLINE forM_ #-}
-- | The sum of a collection of actions, generalizing 'concat'
--
-- @
' reduceWith ' ' getMonadSum '
-- @
msum :: (Generator c, MonadPlus m, m a ~ Elem c) => c -> m a
msum = reduceWith getMonadSum
# INLINE msum #
-- | Efficiently 'mapReduce' a 'Generator' using the 'WrappedMonoid' monoid. A specialized version of its namesake from "Data.Foldable"
--
-- @
-- 'mapReduceWith' 'unwrapMonoid'
-- @
foldMap :: (Monoid m, Generator c) => (Elem c -> m) -> c -> m
foldMap = mapReduceWith unwrapMonoid
{-# INLINE foldMap #-}
-- | Type specialization of "foldMap" above
concatMap :: Generator c => (Elem c -> [b]) -> c -> [b]
concatMap = foldMap
# INLINE concatMap #
-- | Efficiently 'reduce' a 'Generator' using the 'WrappedMonoid' monoid. A specialized version of its namesake from "Data.Foldable"
--
-- @
-- 'reduceWith' 'unwrapMonoid'
-- @
fold :: (Monoid m, Generator c, Elem c ~ m) => c -> m
fold = reduceWith unwrapMonoid
# INLINE fold #
-- | Convert any 'Generator' to a list of its contents. Specialization of 'reduce'
toList :: Generator c => c -> [Elem c]
toList = reduce
# INLINE toList #
-- | Efficiently 'reduce' a 'Generator' that contains values of type 'Bool'
--
-- @
-- 'reduceWith' 'getAll'
-- @
and :: (Generator c, Elem c ~ Bool) => c -> Bool
and = reduceWith getAll
# INLINE and #
-- | Efficiently 'reduce' a 'Generator' that contains values of type 'Bool'
--
-- @
-- 'reduceWith' 'getAny'
-- @
or :: (Generator c, Elem c ~ Bool) => c -> Bool
or = reduceWith getAny
# INLINE or #
-- | Efficiently 'mapReduce' any 'Generator' checking to see if any of its values match the supplied predicate
--
-- @
-- 'mapReduceWith' 'getAny'
-- @
any :: Generator c => (Elem c -> Bool) -> c -> Bool
any = mapReduceWith getAny
# INLINE any #
-- | Efficiently 'mapReduce' any 'Generator' checking to see if all of its values match the supplied predicate
--
-- @
-- 'mapReduceWith' 'getAll'
-- @
all :: Generator c => (Elem c -> Bool) -> c -> Bool
all = mapReduceWith getAll
# INLINE all #
-- | Efficiently sum over the members of any 'Generator'
--
-- @
-- 'reduceWith' 'getSum'
-- @
sum :: (Generator c, Num (Elem c)) => c -> Elem c
sum = reduceWith getSum
# INLINE sum #
-- | Efficiently take the product of every member of a 'Generator'
--
-- @
' reduceWith ' ' '
-- @
product :: (Generator c, Num (Elem c)) => c -> Elem c
product = reduceWith getProduct
# INLINE product #
-- | Check to see if 'any' member of the 'Generator' matches the supplied value
elem :: (Generator c, Eq (Elem c)) => Elem c -> c -> Bool
elem = any . (==)
# INLINE elem #
-- | Check to make sure that the supplied value is not a member of the 'Generator'
notElem :: (Generator c, Eq (Elem c)) => Elem c -> c -> Bool
notElem x = not . elem x
# INLINE notElem #
-- | Efficiently 'mapReduce' a subset of the elements in a 'Generator'
filter :: (Generator c, Reducer (Elem c) m, Monoid m) => (Elem c -> Bool) -> c -> m
filter p = foldMap f where
f x | p x = unit x
| otherwise = mempty
# INLINE filter #
-- | Allows idiomatic specialization of filter by proving a function that will be used to transform the output
filterWith :: (Generator c, Reducer (Elem c) m, Monoid m) => (m -> n) -> (Elem c -> Bool) -> c -> n
filterWith f p = f . filter p
# INLINE filterWith #
-- | A specialization of ' filter ' using the ' First ' ' Monoid ' , analogous to ' Data.List.find '
--
-- @
-- ' filterWith ' ' getFirst '
-- @
find : : Generator c = > ( Bool ) - > c - > Maybe ( Elem c )
find = filterWith getFirst
{ - # INLINE find #
-- | A specialization of 'filter' using the 'First' 'Monoid', analogous to 'Data.List.find'
--
-- @
-- 'filterWith' 'getFirst'
-- @
find :: Generator c => (Elem c -> Bool) -> c -> Maybe (Elem c)
find = filterWith getFirst
{-# INLINE find #-}
-}
| null | https://raw.githubusercontent.com/ekmett/reducers/605d56e5824d35276df50ad0875f63c73cf4659d/src/Data/Generator/Combinators.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.Generator.Combinators
License : BSD-style
Maintainer :
Stability : experimental
Portability : non-portable (type families, MPTCs)
"Data.Foldable", "Control.Monad" or elsewhere. Intended to be imported qualified.
> import Data.Generator.Combinators as Generator
---------------------------------------------------------------------------
* Monadic Reduction
* Applicative Reduction
* Logical Reduction
* Monoidal Reduction
* List-Like Reduction
, find
| Efficiently 'mapReduce' a 'Generator' using the 'Traversal' monoid. A specialized version of its namesake from "Data.Foldable"
@
'mapReduce' 'getTraversal'
@
# INLINE traverse_ #
| Convenience function as found in "Data.Foldable"
@
'flip' 'traverse_'
@
# INLINE for_ #
| The sum of a collection of actions, generalizing 'concat'
@
'reduceWith' 'getAlt'
@
| Efficiently 'mapReduce' a 'Generator' using the 'Action' monoid. A specialized version of its namesake from "Data.Foldable" and "Control.Monad"
@
@
# INLINE mapM_ #
| Convenience function as found in "Data.Foldable" and "Control.Monad"
@
'flip' 'mapM_'
@
# INLINE forM_ #
| The sum of a collection of actions, generalizing 'concat'
@
@
| Efficiently 'mapReduce' a 'Generator' using the 'WrappedMonoid' monoid. A specialized version of its namesake from "Data.Foldable"
@
'mapReduceWith' 'unwrapMonoid'
@
# INLINE foldMap #
| Type specialization of "foldMap" above
| Efficiently 'reduce' a 'Generator' using the 'WrappedMonoid' monoid. A specialized version of its namesake from "Data.Foldable"
@
'reduceWith' 'unwrapMonoid'
@
| Convert any 'Generator' to a list of its contents. Specialization of 'reduce'
| Efficiently 'reduce' a 'Generator' that contains values of type 'Bool'
@
'reduceWith' 'getAll'
@
| Efficiently 'reduce' a 'Generator' that contains values of type 'Bool'
@
'reduceWith' 'getAny'
@
| Efficiently 'mapReduce' any 'Generator' checking to see if any of its values match the supplied predicate
@
'mapReduceWith' 'getAny'
@
| Efficiently 'mapReduce' any 'Generator' checking to see if all of its values match the supplied predicate
@
'mapReduceWith' 'getAll'
@
| Efficiently sum over the members of any 'Generator'
@
'reduceWith' 'getSum'
@
| Efficiently take the product of every member of a 'Generator'
@
@
| Check to see if 'any' member of the 'Generator' matches the supplied value
| Check to make sure that the supplied value is not a member of the 'Generator'
| Efficiently 'mapReduce' a subset of the elements in a 'Generator'
| Allows idiomatic specialization of filter by proving a function that will be used to transform the output
| A specialization of ' filter ' using the ' First ' ' Monoid ' , analogous to ' Data.List.find '
@
' filterWith ' ' getFirst '
@
| A specialization of 'filter' using the 'First' 'Monoid', analogous to 'Data.List.find'
@
'filterWith' 'getFirst'
@
# INLINE find # | # LANGUAGE CPP #
# LANGUAGE UndecidableInstances , TypeOperators , FlexibleContexts , MultiParamTypeClasses , FlexibleInstances , TypeFamilies #
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 702 && __GLASGOW_HASKELL__ < 710
# LANGUAGE Trustworthy #
#endif
Copyright : ( c ) 2009
Utilities for working with Monoids that conflict with names from the " Prelude " ,
module Data.Generator.Combinators
(
mapM_
, forM_
, msum
, traverse_
, for_
, asum
, and
, or
, any
, all
, foldMap
, fold
, toList
, concatMap
, elem
, filter
, filterWith
, sum
, product
, notElem
) where
import Prelude hiding
( mapM_, any, all, elem, filter, concatMap, and, or
, sum, product, notElem, replicate, cycle, repeat
#if __GLASGOW_HASKELL__ >= 710
, foldMap
#endif
)
import Control.Applicative (Alternative)
import Control.Monad (MonadPlus)
import Data.Generator
import Data.Semigroup (Sum(..), Product(..), All(..), Any(..), WrappedMonoid(..))
import Data.Semigroup.Applicative (Traversal(..))
import Data.Semigroup.Alternative (Alternate(..))
import Data.Semigroup.Monad (Action(..))
import Data.Semigroup.MonadPlus (MonadSum(..))
import Data.Semigroup.Reducer (Reducer(..))
#if !(MIN_VERSION_base(4,8,0))
import Control.Applicative (Applicative)
import Data.Monoid (Monoid(..))
#endif
traverse_ :: (Generator c, Applicative f) => (Elem c -> f b) -> c -> f ()
traverse_ = mapReduceWith getTraversal
for_ :: (Generator c, Applicative f) => c -> (Elem c -> f b) -> f ()
for_ = flip traverse_
asum :: (Generator c, Alternative f, f a ~ Elem c) => c -> f a
asum = reduceWith getAlternate
# INLINE asum #
' ' ' getAction '
mapM_ :: (Generator c, Monad m) => (Elem c -> m b) -> c -> m ()
mapM_ = mapReduceWith getAction
forM_ :: (Generator c, Monad m) => c -> (Elem c -> m b) -> m ()
forM_ = flip mapM_
' reduceWith ' ' getMonadSum '
msum :: (Generator c, MonadPlus m, m a ~ Elem c) => c -> m a
msum = reduceWith getMonadSum
# INLINE msum #
foldMap :: (Monoid m, Generator c) => (Elem c -> m) -> c -> m
foldMap = mapReduceWith unwrapMonoid
concatMap :: Generator c => (Elem c -> [b]) -> c -> [b]
concatMap = foldMap
# INLINE concatMap #
fold :: (Monoid m, Generator c, Elem c ~ m) => c -> m
fold = reduceWith unwrapMonoid
# INLINE fold #
toList :: Generator c => c -> [Elem c]
toList = reduce
# INLINE toList #
and :: (Generator c, Elem c ~ Bool) => c -> Bool
and = reduceWith getAll
# INLINE and #
or :: (Generator c, Elem c ~ Bool) => c -> Bool
or = reduceWith getAny
# INLINE or #
any :: Generator c => (Elem c -> Bool) -> c -> Bool
any = mapReduceWith getAny
# INLINE any #
all :: Generator c => (Elem c -> Bool) -> c -> Bool
all = mapReduceWith getAll
# INLINE all #
sum :: (Generator c, Num (Elem c)) => c -> Elem c
sum = reduceWith getSum
# INLINE sum #
' reduceWith ' ' '
product :: (Generator c, Num (Elem c)) => c -> Elem c
product = reduceWith getProduct
# INLINE product #
elem :: (Generator c, Eq (Elem c)) => Elem c -> c -> Bool
elem = any . (==)
# INLINE elem #
notElem :: (Generator c, Eq (Elem c)) => Elem c -> c -> Bool
notElem x = not . elem x
# INLINE notElem #
filter :: (Generator c, Reducer (Elem c) m, Monoid m) => (Elem c -> Bool) -> c -> m
filter p = foldMap f where
f x | p x = unit x
| otherwise = mempty
# INLINE filter #
filterWith :: (Generator c, Reducer (Elem c) m, Monoid m) => (m -> n) -> (Elem c -> Bool) -> c -> n
filterWith f p = f . filter p
# INLINE filterWith #
find : : Generator c = > ( Bool ) - > c - > Maybe ( Elem c )
find = filterWith getFirst
{ - # INLINE find #
find :: Generator c => (Elem c -> Bool) -> c -> Maybe (Elem c)
find = filterWith getFirst
-}
|
8d87627db66130a33cd97325dc5660090810bfde81020c32086cec8d24cd2572 | jwiegley/notes | MCompose.hs | module Compose where
import Control.Applicative
import Control.Monad
import Data.Functor.Compose
import Data.Functor.Identity
import Data.Distributive
import Data.Monoid
import Data.Tuple (swap)
import Data.Traversable
instance ( Monad f , Distributive f , , )
-- => Monad (Compose f g) where
-- return x = Compose $ return (return x)
-- Compose m >>= f =
-- let x = fmap (fmap (getCompose . f)) m in
-- let y = fmap distribute x in
-- Compose $ fmap join (join y)
instance ( Monad f , , ) = > Monad ( Compose f g ) where
-- return x = Compose $ return (return x)
-- Compose m >>= f = Compose $ do
-- m' <- m
-- let x = fmap (getCompose . f) m'
-- m'' <- sequenceA x
-- return $ join m''
instance (Monad f, Applicative f, Monad g, Traversable g)
=> Monad (Compose f g) where
return x = Compose $ return (return x)
Compose m >>= f = Compose $ do
let x = liftM (fmap (getCompose . f)) m
liftM join (join (liftM sequenceA x))
compose_join :: (Monad f, Applicative f, Monad g, Traversable g)
=> Compose f g (Compose f g a) -> Compose f g a
compose_join (Compose m) = Compose $
let x = fmap (fmap getCompose) m in
liftM join (join (liftM sequenceA x))
type ReaderT e = Compose ((->) e)
ask :: Monad m => ReaderT e m e
ask = Compose return
runReaderT :: ReaderT e m a -> e -> m a
runReaderT = getCompose
-- This only works when 'm' is Distributive.
type WriterT w (m :: * -> *) = Compose m ((,) w)
tell :: Monad m => w -> WriterT w m ()
tell w = Compose (return (w, ()))
runWriterT :: Monad m => WriterT w m a -> m (a, w)
runWriterT = liftM swap . getCompose
-- This would only work if 'm' is Distributive, and 's' is a Monoid so that
( ( , ) s ) can be a Monad .
type StateT s m = Compose ((->) s) (Compose m ((,) s))
instance Monoid s => Monad ((,) s) where
return x = (mempty, x)
(s,x) >>= f = let (s',x') = f x in (s <> s', x')
get :: Monad m => StateT s m s
get = Compose $ \s -> Compose (return (s, s))
put :: Monad m => s -> StateT s m ()
put s = Compose $ \_ -> Compose (return (s, ()))
runStateT :: Monad m => StateT s m a -> s -> m (a, s)
runStateT (Compose m) s = liftM swap (getCompose (m s))
type IdentityT m = Compose m Identity
runIdentityT :: Monad m => IdentityT m a -> m a
runIdentityT = liftM runIdentity . getCompose
type MaybeT m = Compose m Maybe
runMaybeT :: MaybeT m a -> m (Maybe a)
runMaybeT = getCompose
type EitherT e m = Compose m (Either e)
runEitherT :: EitherT e m a -> m (Either e a)
runEitherT = getCompose
type ListT m = Compose m []
runListT :: ListT m a -> m [a]
runListT = getCompose
newtype Hom r m a = Hom { getHom :: a -> m r }
-- This can't work either, since `Hom r m` isn't even a Functor.
type ContT r m a = Compose (Hom r m) (Hom r m) a
runContT :: ContT r m a -> (a -> m r) -> m r
runContT (Compose m) = getHom m . Hom
callCC :: ((a -> ContT r m b) -> ContT r m a) -> ContT r m a
callCC f = Compose $ Hom $ \(Hom c) ->
runContT (f (\x -> Compose $ Hom $ \_ -> c x)) c
test :: Identity String
test = do
x1 <- flip runReaderT (10 :: Int) $ do
x <- ask
return (x + 20)
unless (x1 == 30) $ error "x1"
x2 <- runWriterT $ do
tell ("Hello," :: String)
tell " World!"
unless (x2 == ((), "Hello, World!")) $ error "x2"
x3 <- flip runStateT (Sum (10 :: Int)) $ do
x <- get
put (x + 10)
unless (x3 == ((), Sum 20)) $ error "x3"
x4 <- runIdentityT $
return (20 :: Int)
unless (x4 == 20) $ error "x4"
x5 <- runMaybeT $
return (20 :: Int)
unless (x5 == Just 20) $ error "x5"
x6 <- runEitherT $ do
return (Left (30 :: Int))
return (20 :: Int)
unless (x6 == Left 30) $ error "x6"
-- x7 <- runListT $ do
x < - return [ 1 : : Int , 2 ]
y < - return [ 4 : : Int , 5 ]
-- return (x, y)
unless ( x7 = = [ ( 1,4 ) , ( 1,5 ) , ( 2,4 ) , ( 2,5 ) ] ) $ error " x7 "
x8 < - flip runContT return $ do
-- x <- callCC $ \k -> do
k ( 15 : : Int )
-- return x
unless ( x8 = = 15 ) $ error " 83 "
return "done"
main :: IO ()
main = print (runIdentity test)
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/f719a3d41696d48f6005/MCompose.hs | haskell | => Monad (Compose f g) where
return x = Compose $ return (return x)
Compose m >>= f =
let x = fmap (fmap (getCompose . f)) m in
let y = fmap distribute x in
Compose $ fmap join (join y)
return x = Compose $ return (return x)
Compose m >>= f = Compose $ do
m' <- m
let x = fmap (getCompose . f) m'
m'' <- sequenceA x
return $ join m''
This only works when 'm' is Distributive.
This would only work if 'm' is Distributive, and 's' is a Monoid so that
This can't work either, since `Hom r m` isn't even a Functor.
x7 <- runListT $ do
return (x, y)
x <- callCC $ \k -> do
return x | module Compose where
import Control.Applicative
import Control.Monad
import Data.Functor.Compose
import Data.Functor.Identity
import Data.Distributive
import Data.Monoid
import Data.Tuple (swap)
import Data.Traversable
instance ( Monad f , Distributive f , , )
instance ( Monad f , , ) = > Monad ( Compose f g ) where
instance (Monad f, Applicative f, Monad g, Traversable g)
=> Monad (Compose f g) where
return x = Compose $ return (return x)
Compose m >>= f = Compose $ do
let x = liftM (fmap (getCompose . f)) m
liftM join (join (liftM sequenceA x))
compose_join :: (Monad f, Applicative f, Monad g, Traversable g)
=> Compose f g (Compose f g a) -> Compose f g a
compose_join (Compose m) = Compose $
let x = fmap (fmap getCompose) m in
liftM join (join (liftM sequenceA x))
type ReaderT e = Compose ((->) e)
ask :: Monad m => ReaderT e m e
ask = Compose return
runReaderT :: ReaderT e m a -> e -> m a
runReaderT = getCompose
type WriterT w (m :: * -> *) = Compose m ((,) w)
tell :: Monad m => w -> WriterT w m ()
tell w = Compose (return (w, ()))
runWriterT :: Monad m => WriterT w m a -> m (a, w)
runWriterT = liftM swap . getCompose
( ( , ) s ) can be a Monad .
type StateT s m = Compose ((->) s) (Compose m ((,) s))
instance Monoid s => Monad ((,) s) where
return x = (mempty, x)
(s,x) >>= f = let (s',x') = f x in (s <> s', x')
get :: Monad m => StateT s m s
get = Compose $ \s -> Compose (return (s, s))
put :: Monad m => s -> StateT s m ()
put s = Compose $ \_ -> Compose (return (s, ()))
runStateT :: Monad m => StateT s m a -> s -> m (a, s)
runStateT (Compose m) s = liftM swap (getCompose (m s))
type IdentityT m = Compose m Identity
runIdentityT :: Monad m => IdentityT m a -> m a
runIdentityT = liftM runIdentity . getCompose
type MaybeT m = Compose m Maybe
runMaybeT :: MaybeT m a -> m (Maybe a)
runMaybeT = getCompose
type EitherT e m = Compose m (Either e)
runEitherT :: EitherT e m a -> m (Either e a)
runEitherT = getCompose
type ListT m = Compose m []
runListT :: ListT m a -> m [a]
runListT = getCompose
newtype Hom r m a = Hom { getHom :: a -> m r }
type ContT r m a = Compose (Hom r m) (Hom r m) a
runContT :: ContT r m a -> (a -> m r) -> m r
runContT (Compose m) = getHom m . Hom
callCC :: ((a -> ContT r m b) -> ContT r m a) -> ContT r m a
callCC f = Compose $ Hom $ \(Hom c) ->
runContT (f (\x -> Compose $ Hom $ \_ -> c x)) c
test :: Identity String
test = do
x1 <- flip runReaderT (10 :: Int) $ do
x <- ask
return (x + 20)
unless (x1 == 30) $ error "x1"
x2 <- runWriterT $ do
tell ("Hello," :: String)
tell " World!"
unless (x2 == ((), "Hello, World!")) $ error "x2"
x3 <- flip runStateT (Sum (10 :: Int)) $ do
x <- get
put (x + 10)
unless (x3 == ((), Sum 20)) $ error "x3"
x4 <- runIdentityT $
return (20 :: Int)
unless (x4 == 20) $ error "x4"
x5 <- runMaybeT $
return (20 :: Int)
unless (x5 == Just 20) $ error "x5"
x6 <- runEitherT $ do
return (Left (30 :: Int))
return (20 :: Int)
unless (x6 == Left 30) $ error "x6"
x < - return [ 1 : : Int , 2 ]
y < - return [ 4 : : Int , 5 ]
unless ( x7 = = [ ( 1,4 ) , ( 1,5 ) , ( 2,4 ) , ( 2,5 ) ] ) $ error " x7 "
x8 < - flip runContT return $ do
k ( 15 : : Int )
unless ( x8 = = 15 ) $ error " 83 "
return "done"
main :: IO ()
main = print (runIdentity test)
|
a469256de434720594b029aad844d1bff10ada43cdf3f5cc85cab081a9e8aa66 | basho/machi | machi_flu0_test.erl | %% -------------------------------------------------------------------
%%
%% Machi: a small village of replicated files
%%
Copyright ( c ) 2014 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(machi_flu0_test).
-include("machi.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
-endif.
-ifdef(TEST).
-ifndef(PULSE).
repair_status_test() ->
{ok, F} = machi_flu0:start_link(one),
try
ok = machi_flu0:set_fake_repairing_status(F, true),
true = machi_flu0:get_fake_repairing_status(F),
ok = machi_flu0:set_fake_repairing_status(F, false),
false = machi_flu0:get_fake_repairing_status(F)
after
ok = machi_flu0:stop(F)
end.
concuerror1_test() ->
ok.
concuerror2_test() ->
{ok, F} = machi_flu0:start_link(one),
ok = machi_flu0:stop(F),
ok.
concuerror3_test() ->
Me = self(),
Inner = fun(Name) -> {ok, F1} = machi_flu0:start_link(Name),
ok = machi_flu0:stop(F1),
Me ! done
end,
P1 = spawn(fun() -> Inner(one) end),
P2 = spawn(fun() -> Inner(two) end),
[receive done -> ok end || _ <- [P1, P2]],
ok.
concuerror4_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
Epoch = 1,
ok = m_proj_write(F1, Epoch, dontcare),
Val = <<"val!">>,
ok = m_write(F1, Epoch, Val),
{error_stale_projection, Epoch} = m_write(F1, Epoch - 1, Val),
Me = self(),
TrimFun = fun() -> Res = m_trim(F1, Epoch),
Me ! {self(), Res}
end,
TrimPids = [spawn(TrimFun), spawn(TrimFun), spawn(TrimFun)],
TrimExpected = [error_trimmed,error_trimmed,ok],
GetFun = fun() -> Res = m_read(F1, Epoch),
Me ! {self(), Res}
end,
GetPids = [spawn(GetFun)],
GetExpected = fun(Results) ->
[] = [X || X <- Results, X == unwritten],
ok
end,
TrimResults = lists:sort([receive
{TrimPid, Res} -> Res
end || TrimPid <- TrimPids]),
TrimExpected = TrimResults,
GetResults = lists:sort([receive
{GetPid, Res} -> Res
end || GetPid <- GetPids]),
ok = GetExpected(GetResults),
ok = m_stop(F1),
ok.
proj_store_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
error_unwritten = m_proj_get_latest_num(F1),
error_unwritten = m_proj_read_latest(F1),
Proj1 = whatever1,
ok = m_proj_write(F1, 1, Proj1),
error_written = m_proj_write(F1, 1, Proj1),
{ok, Proj1} = m_proj_read(F1, 1),
{ok, 1} = m_proj_get_latest_num(F1),
{ok, Proj1} = m_proj_read_latest(F1),
ok = m_stop(F1),
ok.
wedge_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
Epoch1 = 1,
ok = m_proj_write(F1, Epoch1, dontcare),
Val = <<"val!">>,
ok = m_write(F1, Epoch1, Val),
{error_stale_projection, Epoch1} = m_write(F1, Epoch1 - 1, Val),
error_wedged = m_write(F1, Epoch1 + 1, Val),
%% Until we write a newer/bigger projection, all ops are error_wedged
error_wedged = m_read(F1, Epoch1),
error_wedged = m_write(F1, Epoch1, Val),
error_wedged = m_trim(F1, Epoch1),
Epoch2 = Epoch1 + 1,
ok = m_proj_write(F1, Epoch2, dontcare),
{ok, Val} = m_read(F1, Epoch2),
error_written = m_write(F1, Epoch2, Val),
ok = m_trim(F1, Epoch2),
error_trimmed = m_trim(F1, Epoch2),
ok = m_stop(F1),
io : , " XX ~p\n " , [ _ XX ] ) ,
event_shutdown(),
ok.
proj0_test() ->
Me = self(),
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
{ok, F2} = machi_flu0:start_link(two),
FLUs = [F1, F2],
FirstProj = machi_flu0:make_proj(1, FLUs),
Epoch1 = FirstProj#proj.epoch,
[ok = m_proj_write(F, Epoch1, FirstProj) || F <- FLUs],
Proj0 = machi_flu0:make_proj(-42, FLUs),
Val = <<"val!">>,
Pid1 = spawn(fun() ->
{ok, _Proj1} = m_append_page(Proj0, Val),
Me ! {self(), done}
end),
%% Pids = [Pid1],
SecondProj = machi_flu0:make_proj(2, FLUs),
Epoch2 = SecondProj#proj.epoch,
Pid2 = spawn(fun() ->
[ ok = m_proj_write_with_check(F , Epoch2 , SecondProj ) ||
[case m_proj_write(F, Epoch2, SecondProj) of
ok ->
ok;
error_written ->
ok
end || F <- FLUs],
Me ! {self(), done}
end),
Pids = [Pid1, Pid2],
[receive {Pid, _} -> ok end || Pid <- Pids],
[ok = m_stop(F) || F <- FLUs],
io : , " XX ~p\n " , [ _ XX ] ) ,
event_shutdown(),
ok.
%%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%%
m_write(Pid, Epoch1, Val) ->
Res = machi_flu0:write(Pid, Epoch1, Val),
event_add(write, Pid, Res),
Res.
m_read(Pid, Epoch) ->
Res = machi_flu0:read(Pid, Epoch),
event_add(get, Pid, Res),
Res.
m_trim(Pid, Epoch) ->
Res = machi_flu0:trim(Pid, Epoch),
event_add(trim, Pid, Res),
Res.
m_stop(Pid) ->
Res = machi_flu0:stop(Pid),
event_add(stop, Pid, Res),
Res.
m_proj_write(Pid, Epoch, Proj) ->
Res = machi_flu0:proj_write(Pid, Epoch, private, Proj),
event_add(proj_write, Pid, Res),
Res.
m_proj_write_with_check(Pid, Epoch, Proj) ->
case m_proj_write(Pid, Epoch, Proj) of
ok ->
ok;
error_written ->
case m_proj_read(Pid, Epoch) of
{ok, Proj} ->
ok;
{ok, OtherProj} ->
{bummer, other_proj, OtherProj};
Else ->
Else
end
end.
m_proj_read(Pid, Epoch) ->
Res = machi_flu0:proj_read(Pid, Epoch, private),
event_add(proj_read, Pid, Res),
Res.
m_proj_get_latest_num(Pid) ->
Res = machi_flu0:proj_get_latest_num(Pid, private),
event_add(proj_get_latest_num, Pid, Res),
Res.
m_proj_read_latest(Pid) ->
Res = machi_flu0:proj_read_latest(Pid, private),
event_add(proj_read_latest, Pid, Res),
Res.
m_append_page(Proj, Bytes) ->
m_append_page(Proj, Bytes, 5).
m_append_page(Proj, _Bytes, 0) ->
{{error_failed, ?MODULE, ?LINE}, Proj};
m_append_page(Proj, Bytes, Retries) ->
Retry = fun() ->
case poll_for_new_epoch_projection(Proj) of
{ok, NewProj} ->
m_append_page(NewProj, Bytes, Retries - 1);
Else ->
{Else, Proj}
end
end,
case m_append_page2(Proj, Bytes) of
%% lost_race ->
m_append_pageQQ(Proj , Bytes , Retries - 1 ) ;
{error_stale_projection, _} ->
Retry();
error_wedged ->
youbetcha = m_repair_projection_store(Proj),
Retry();
Else ->
{Else, Proj}
end.
m_append_page2(#proj{epoch=Epoch, active=Active}, Bytes) ->
m_append_page3(Active, Epoch, Bytes).
m_append_page3([], _Epoch, _Bytes) ->
ok;
m_append_page3([H|T], Epoch, Bytes) ->
Res = (catch m_write(H, Epoch, Bytes)),
case Res of
ok ->
m_append_page3(T, Epoch, Bytes);
error_unwritten ->
exit({gack, line, ?LINE});
error_written ->
case m_read(H, Epoch) of
{ok, Present} when Present == Bytes ->
m_append_page3(T, Epoch, Bytes);
{error_stale_projection, _}=ESP ->
ESP;
Else ->
Else
end;
Else ->
Else
end.
%% L ->
%% case [x || {error_stale_projection, _} <- L] of
%% [] ->
%% UnwrittenP = lists:member(error_unwritten, L),
WrittenP = lists : , L ) ,
TrimmedP = lists : member(error_trimmed , L ) ,
WedgedP = lists : member(error_wedged , L ) ,
%% if UnwrittenP ->
%% error_unwritten;
%% WrittenP ->
%% error_written;
%% TrimmedP ->
%% error_trimmed;
%% WedgedP ->
%% error_wedged;
%% true ->
%% exit({gack, L})
%% end;
%% _ ->
%% {error_stale_projection, caller_not_looking_here}
%% end
%% end.
get_poll_retries() ->
25.
get_poll_sleep_time() ->
50.
poll_for_new_epoch_projection(P) ->
poll_for_new_epoch_projection(P, get_poll_retries()).
poll_for_new_epoch_projection(_P, 0) ->
exit({ouch, ?MODULE, ?LINE});
poll_for_new_epoch_projection(#proj{all=All} = P, Tries) ->
case multi_call(All, ?MODULE, m_proj_read_latest, []) of
[] ->
timer:sleep(get_poll_sleep_time()),
poll_for_new_epoch_projection(P, Tries - 1);
L ->
Answer = lists:last(lists:sort(lists:flatten(L))),
{ok, Answer}
end.
multi_call([], _Mod, _Fun, _ArgSuffix) ->
[];
multi_call([H|T], Mod, Fun, ArgSuffix) ->
case erlang:apply(Mod,Fun, [H|ArgSuffix]) of
{ok, X} ->
[X|multi_call(T, Mod, Fun, ArgSuffix)];
_ ->
multi_call(T, Mod, Fun, ArgSuffix)
end.
m_repair_projection_store(Proj) ->
[begin
catch m_proj_write(FLU, Proj#proj.epoch, Proj)
end || FLU <- Proj#proj.all],
youbetcha.
%%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%%
event_setup() ->
lamport_clock:reset(),
Tab = ?MODULE,
ok = event_shutdown(),
ets:new(Tab, [named_table, ordered_set, public]).
event_shutdown() ->
Tab = ?MODULE,
(catch ets:delete(Tab)),
ok.
event_add(Key, Who, Description) ->
Tab = ?MODULE,
E = {lamport_clock:get(), Key, Who, Description},
io : , " E = ~p\n " , [ E ] ) ,
ets:insert(Tab, E).
event_get_all() ->
Tab = ?MODULE,
ets:tab2list(Tab).
-endif. % ! PULSE
-endif.
| null | https://raw.githubusercontent.com/basho/machi/e87bd59a9777d805b00f9e9981467eb28e28390c/prototype/chain-manager/test/machi_flu0_test.erl | erlang | -------------------------------------------------------------------
Machi: a small village of replicated files
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
Until we write a newer/bigger projection, all ops are error_wedged
Pids = [Pid1],
%%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%%
lost_race ->
L ->
case [x || {error_stale_projection, _} <- L] of
[] ->
UnwrittenP = lists:member(error_unwritten, L),
if UnwrittenP ->
error_unwritten;
WrittenP ->
error_written;
TrimmedP ->
error_trimmed;
WedgedP ->
error_wedged;
true ->
exit({gack, L})
end;
_ ->
{error_stale_projection, caller_not_looking_here}
end
end.
%%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%% %%%%
! PULSE | Copyright ( c ) 2014 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(machi_flu0_test).
-include("machi.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
-endif.
-ifdef(TEST).
-ifndef(PULSE).
repair_status_test() ->
{ok, F} = machi_flu0:start_link(one),
try
ok = machi_flu0:set_fake_repairing_status(F, true),
true = machi_flu0:get_fake_repairing_status(F),
ok = machi_flu0:set_fake_repairing_status(F, false),
false = machi_flu0:get_fake_repairing_status(F)
after
ok = machi_flu0:stop(F)
end.
concuerror1_test() ->
ok.
concuerror2_test() ->
{ok, F} = machi_flu0:start_link(one),
ok = machi_flu0:stop(F),
ok.
concuerror3_test() ->
Me = self(),
Inner = fun(Name) -> {ok, F1} = machi_flu0:start_link(Name),
ok = machi_flu0:stop(F1),
Me ! done
end,
P1 = spawn(fun() -> Inner(one) end),
P2 = spawn(fun() -> Inner(two) end),
[receive done -> ok end || _ <- [P1, P2]],
ok.
concuerror4_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
Epoch = 1,
ok = m_proj_write(F1, Epoch, dontcare),
Val = <<"val!">>,
ok = m_write(F1, Epoch, Val),
{error_stale_projection, Epoch} = m_write(F1, Epoch - 1, Val),
Me = self(),
TrimFun = fun() -> Res = m_trim(F1, Epoch),
Me ! {self(), Res}
end,
TrimPids = [spawn(TrimFun), spawn(TrimFun), spawn(TrimFun)],
TrimExpected = [error_trimmed,error_trimmed,ok],
GetFun = fun() -> Res = m_read(F1, Epoch),
Me ! {self(), Res}
end,
GetPids = [spawn(GetFun)],
GetExpected = fun(Results) ->
[] = [X || X <- Results, X == unwritten],
ok
end,
TrimResults = lists:sort([receive
{TrimPid, Res} -> Res
end || TrimPid <- TrimPids]),
TrimExpected = TrimResults,
GetResults = lists:sort([receive
{GetPid, Res} -> Res
end || GetPid <- GetPids]),
ok = GetExpected(GetResults),
ok = m_stop(F1),
ok.
proj_store_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
error_unwritten = m_proj_get_latest_num(F1),
error_unwritten = m_proj_read_latest(F1),
Proj1 = whatever1,
ok = m_proj_write(F1, 1, Proj1),
error_written = m_proj_write(F1, 1, Proj1),
{ok, Proj1} = m_proj_read(F1, 1),
{ok, 1} = m_proj_get_latest_num(F1),
{ok, Proj1} = m_proj_read_latest(F1),
ok = m_stop(F1),
ok.
wedge_test() ->
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
Epoch1 = 1,
ok = m_proj_write(F1, Epoch1, dontcare),
Val = <<"val!">>,
ok = m_write(F1, Epoch1, Val),
{error_stale_projection, Epoch1} = m_write(F1, Epoch1 - 1, Val),
error_wedged = m_write(F1, Epoch1 + 1, Val),
error_wedged = m_read(F1, Epoch1),
error_wedged = m_write(F1, Epoch1, Val),
error_wedged = m_trim(F1, Epoch1),
Epoch2 = Epoch1 + 1,
ok = m_proj_write(F1, Epoch2, dontcare),
{ok, Val} = m_read(F1, Epoch2),
error_written = m_write(F1, Epoch2, Val),
ok = m_trim(F1, Epoch2),
error_trimmed = m_trim(F1, Epoch2),
ok = m_stop(F1),
io : , " XX ~p\n " , [ _ XX ] ) ,
event_shutdown(),
ok.
proj0_test() ->
Me = self(),
event_setup(),
{ok, F1} = machi_flu0:start_link(one),
{ok, F2} = machi_flu0:start_link(two),
FLUs = [F1, F2],
FirstProj = machi_flu0:make_proj(1, FLUs),
Epoch1 = FirstProj#proj.epoch,
[ok = m_proj_write(F, Epoch1, FirstProj) || F <- FLUs],
Proj0 = machi_flu0:make_proj(-42, FLUs),
Val = <<"val!">>,
Pid1 = spawn(fun() ->
{ok, _Proj1} = m_append_page(Proj0, Val),
Me ! {self(), done}
end),
SecondProj = machi_flu0:make_proj(2, FLUs),
Epoch2 = SecondProj#proj.epoch,
Pid2 = spawn(fun() ->
[ ok = m_proj_write_with_check(F , Epoch2 , SecondProj ) ||
[case m_proj_write(F, Epoch2, SecondProj) of
ok ->
ok;
error_written ->
ok
end || F <- FLUs],
Me ! {self(), done}
end),
Pids = [Pid1, Pid2],
[receive {Pid, _} -> ok end || Pid <- Pids],
[ok = m_stop(F) || F <- FLUs],
io : , " XX ~p\n " , [ _ XX ] ) ,
event_shutdown(),
ok.
m_write(Pid, Epoch1, Val) ->
Res = machi_flu0:write(Pid, Epoch1, Val),
event_add(write, Pid, Res),
Res.
m_read(Pid, Epoch) ->
Res = machi_flu0:read(Pid, Epoch),
event_add(get, Pid, Res),
Res.
m_trim(Pid, Epoch) ->
Res = machi_flu0:trim(Pid, Epoch),
event_add(trim, Pid, Res),
Res.
m_stop(Pid) ->
Res = machi_flu0:stop(Pid),
event_add(stop, Pid, Res),
Res.
m_proj_write(Pid, Epoch, Proj) ->
Res = machi_flu0:proj_write(Pid, Epoch, private, Proj),
event_add(proj_write, Pid, Res),
Res.
m_proj_write_with_check(Pid, Epoch, Proj) ->
case m_proj_write(Pid, Epoch, Proj) of
ok ->
ok;
error_written ->
case m_proj_read(Pid, Epoch) of
{ok, Proj} ->
ok;
{ok, OtherProj} ->
{bummer, other_proj, OtherProj};
Else ->
Else
end
end.
m_proj_read(Pid, Epoch) ->
Res = machi_flu0:proj_read(Pid, Epoch, private),
event_add(proj_read, Pid, Res),
Res.
m_proj_get_latest_num(Pid) ->
Res = machi_flu0:proj_get_latest_num(Pid, private),
event_add(proj_get_latest_num, Pid, Res),
Res.
m_proj_read_latest(Pid) ->
Res = machi_flu0:proj_read_latest(Pid, private),
event_add(proj_read_latest, Pid, Res),
Res.
m_append_page(Proj, Bytes) ->
m_append_page(Proj, Bytes, 5).
m_append_page(Proj, _Bytes, 0) ->
{{error_failed, ?MODULE, ?LINE}, Proj};
m_append_page(Proj, Bytes, Retries) ->
Retry = fun() ->
case poll_for_new_epoch_projection(Proj) of
{ok, NewProj} ->
m_append_page(NewProj, Bytes, Retries - 1);
Else ->
{Else, Proj}
end
end,
case m_append_page2(Proj, Bytes) of
m_append_pageQQ(Proj , Bytes , Retries - 1 ) ;
{error_stale_projection, _} ->
Retry();
error_wedged ->
youbetcha = m_repair_projection_store(Proj),
Retry();
Else ->
{Else, Proj}
end.
m_append_page2(#proj{epoch=Epoch, active=Active}, Bytes) ->
m_append_page3(Active, Epoch, Bytes).
m_append_page3([], _Epoch, _Bytes) ->
ok;
m_append_page3([H|T], Epoch, Bytes) ->
Res = (catch m_write(H, Epoch, Bytes)),
case Res of
ok ->
m_append_page3(T, Epoch, Bytes);
error_unwritten ->
exit({gack, line, ?LINE});
error_written ->
case m_read(H, Epoch) of
{ok, Present} when Present == Bytes ->
m_append_page3(T, Epoch, Bytes);
{error_stale_projection, _}=ESP ->
ESP;
Else ->
Else
end;
Else ->
Else
end.
WrittenP = lists : , L ) ,
TrimmedP = lists : member(error_trimmed , L ) ,
WedgedP = lists : member(error_wedged , L ) ,
get_poll_retries() ->
25.
get_poll_sleep_time() ->
50.
poll_for_new_epoch_projection(P) ->
poll_for_new_epoch_projection(P, get_poll_retries()).
poll_for_new_epoch_projection(_P, 0) ->
exit({ouch, ?MODULE, ?LINE});
poll_for_new_epoch_projection(#proj{all=All} = P, Tries) ->
case multi_call(All, ?MODULE, m_proj_read_latest, []) of
[] ->
timer:sleep(get_poll_sleep_time()),
poll_for_new_epoch_projection(P, Tries - 1);
L ->
Answer = lists:last(lists:sort(lists:flatten(L))),
{ok, Answer}
end.
multi_call([], _Mod, _Fun, _ArgSuffix) ->
[];
multi_call([H|T], Mod, Fun, ArgSuffix) ->
case erlang:apply(Mod,Fun, [H|ArgSuffix]) of
{ok, X} ->
[X|multi_call(T, Mod, Fun, ArgSuffix)];
_ ->
multi_call(T, Mod, Fun, ArgSuffix)
end.
m_repair_projection_store(Proj) ->
[begin
catch m_proj_write(FLU, Proj#proj.epoch, Proj)
end || FLU <- Proj#proj.all],
youbetcha.
event_setup() ->
lamport_clock:reset(),
Tab = ?MODULE,
ok = event_shutdown(),
ets:new(Tab, [named_table, ordered_set, public]).
event_shutdown() ->
Tab = ?MODULE,
(catch ets:delete(Tab)),
ok.
event_add(Key, Who, Description) ->
Tab = ?MODULE,
E = {lamport_clock:get(), Key, Who, Description},
io : , " E = ~p\n " , [ E ] ) ,
ets:insert(Tab, E).
event_get_all() ->
Tab = ?MODULE,
ets:tab2list(Tab).
-endif.
|
2d9a52c60551152340d0818ba92f5218992ecba2336a70022feca31ab60e3e37 | tenpureto/tenpureto | UI.hs | # LANGUAGE TemplateHaskell #
module Tenpureto.Effects.UI where
import Polysemy
import Polysemy.Error
import Data.Bool
import Data.Foldable
import Data.Maybe
import qualified Data.Set as Set
import Tenpureto.Data
import Tenpureto.Effects.FileSystem
import Tenpureto.Effects.Git ( BranchRef )
import Tenpureto.Effects.Terminal
import Tenpureto.Effects.UI.Internal
import Tenpureto.Messages
import qualified Tenpureto.OrderedMap as OrderedMap
import Tenpureto.TemplateLoader
data UIException = UnattendedNotPossible Text
| NoBaseBranchesException
| NoPreviousCommitException
instance Pretty UIException where
pretty (UnattendedNotPossible missing) =
"Input required when running in an unattended mode:" <+> pretty missing
pretty NoBaseBranchesException =
"Repository does not contain template branches"
pretty NoPreviousCommitException
= "Cannot find a previous template commit, either the repository was not created with tenpureto or the commit history has been amended in an incompatible way"
data ConflictResolutionStrategy = AlreadyResolved | MergeTool
data UI m a where
InputTemplateConfiguration ::PreliminaryProjectConfiguration -> UI m FinalTemplateConfiguration
InputUpdateConfiguration ::PreliminaryProjectConfiguration -> UI m FinalUpdateConfiguration
InputProjectConfiguration ::TemplateInformation -> PreliminaryProjectConfiguration -> UI m FinalProjectConfiguration
InputResolutionStrategy ::Path Abs Dir -> [Path Rel File] -> UI m ConflictResolutionStrategy
ConfirmShellToAmend ::UI m Bool
ConfirmPush ::[BranchRef] -> [BranchRef] -> [BranchRef] -> UI m Bool
ConfirmPullRequest ::[BranchRef] -> Int -> UI m Bool
makeSem ''UI
runUIInTerminal
:: Members '[FileSystem , Error UIException , Terminal , TerminalInput] r
=> Sem (UI ': r) a
-> Sem r a
runUIInTerminal = interpret $ \case
InputTemplateConfiguration PreliminaryProjectConfiguration { preSelectedTemplate = mbt, preTargetDirectory = mbtd }
-> FinalTemplateConfiguration
<$> maybe inputTemplate return mbt
<*> maybe inputTarget return mbtd
InputUpdateConfiguration PreliminaryProjectConfiguration { prePreviousTemplateCommit = mbc }
-> FinalUpdateConfiguration
<$> maybe (throw NoPreviousCommitException) return mbc
InputProjectConfiguration templateInformation providedConfiguration -> do
let psb = fromMaybe mempty (preSelectedBranches providedConfiguration)
let branches = filterTemplateBranches BranchFilterIsFeatureBranch
templateInformation
sbi <- inputBranches branches psb
let sbvars = foldr OrderedMap.union OrderedMap.empty
$ fmap branchVariables (Set.toList sbi)
let
cvars = fromMaybe OrderedMap.empty
(preVariableValues providedConfiguration)
let vars = withDefaults
sbvars
cvars
(preVariableDefaultReplacements providedConfiguration)
varVals <- inputVariables vars
return FinalProjectConfiguration { projectBranches = Set.toList sbi
, variableValues = varVals
}
InputResolutionStrategy repo mergeConflicts -> do
sayLn "The following files have merge conflicts:"
traverse_ (\c -> sayLn (" " <> pretty c)) mergeConflicts
sayLn $ "Repository path: " <> pretty repo
result <- confirm "Run \"git mergetool\"" (Just True)
return $ bool AlreadyResolved MergeTool result
ConfirmShellToAmend -> confirm confirmShellToAmendMessage (Just False)
ConfirmPush deletes creates updates ->
confirm (confirmPushMessage deletes creates updates) (Just False)
ConfirmPullRequest updates cleanups ->
confirm (confirmPullRequestMessage updates cleanups) (Just False)
runUIUnattended :: Member (Error UIException) r => Sem (UI ': r) a -> Sem r a
runUIUnattended = interpret $ \case
InputTemplateConfiguration PreliminaryProjectConfiguration { preSelectedTemplate = mbt, preTargetDirectory = mbtd }
-> FinalTemplateConfiguration
<$> maybe (notPossible "template name") return mbt
<*> maybe (notPossible "target directory") return mbtd
InputUpdateConfiguration PreliminaryProjectConfiguration { prePreviousTemplateCommit = mbc }
-> FinalUpdateConfiguration
<$> maybe (notPossible "previous template commit") return mbc
InputProjectConfiguration templateInformation providedConfiguration ->
FinalProjectConfiguration
<$> maybe (notPossible "selected branches")
(return . templateBranchesByNames templateInformation)
(preSelectedBranches providedConfiguration)
<*> maybe (notPossible "variable values")
return
(preVariableValues providedConfiguration)
InputResolutionStrategy _ _ -> notPossible "merge conflicts"
ConfirmShellToAmend -> return False
ConfirmPush{} -> return True
ConfirmPullRequest{} -> return True
where
notPossible :: Member (Error UIException) r => Text -> Sem r a
notPossible msg = throw $ UnattendedNotPossible msg
| null | https://raw.githubusercontent.com/tenpureto/tenpureto/886df860200e1a6f44ce07c24a5e7597009f71ef/src/Tenpureto/Effects/UI.hs | haskell | # LANGUAGE TemplateHaskell #
module Tenpureto.Effects.UI where
import Polysemy
import Polysemy.Error
import Data.Bool
import Data.Foldable
import Data.Maybe
import qualified Data.Set as Set
import Tenpureto.Data
import Tenpureto.Effects.FileSystem
import Tenpureto.Effects.Git ( BranchRef )
import Tenpureto.Effects.Terminal
import Tenpureto.Effects.UI.Internal
import Tenpureto.Messages
import qualified Tenpureto.OrderedMap as OrderedMap
import Tenpureto.TemplateLoader
data UIException = UnattendedNotPossible Text
| NoBaseBranchesException
| NoPreviousCommitException
instance Pretty UIException where
pretty (UnattendedNotPossible missing) =
"Input required when running in an unattended mode:" <+> pretty missing
pretty NoBaseBranchesException =
"Repository does not contain template branches"
pretty NoPreviousCommitException
= "Cannot find a previous template commit, either the repository was not created with tenpureto or the commit history has been amended in an incompatible way"
data ConflictResolutionStrategy = AlreadyResolved | MergeTool
data UI m a where
InputTemplateConfiguration ::PreliminaryProjectConfiguration -> UI m FinalTemplateConfiguration
InputUpdateConfiguration ::PreliminaryProjectConfiguration -> UI m FinalUpdateConfiguration
InputProjectConfiguration ::TemplateInformation -> PreliminaryProjectConfiguration -> UI m FinalProjectConfiguration
InputResolutionStrategy ::Path Abs Dir -> [Path Rel File] -> UI m ConflictResolutionStrategy
ConfirmShellToAmend ::UI m Bool
ConfirmPush ::[BranchRef] -> [BranchRef] -> [BranchRef] -> UI m Bool
ConfirmPullRequest ::[BranchRef] -> Int -> UI m Bool
makeSem ''UI
runUIInTerminal
:: Members '[FileSystem , Error UIException , Terminal , TerminalInput] r
=> Sem (UI ': r) a
-> Sem r a
runUIInTerminal = interpret $ \case
InputTemplateConfiguration PreliminaryProjectConfiguration { preSelectedTemplate = mbt, preTargetDirectory = mbtd }
-> FinalTemplateConfiguration
<$> maybe inputTemplate return mbt
<*> maybe inputTarget return mbtd
InputUpdateConfiguration PreliminaryProjectConfiguration { prePreviousTemplateCommit = mbc }
-> FinalUpdateConfiguration
<$> maybe (throw NoPreviousCommitException) return mbc
InputProjectConfiguration templateInformation providedConfiguration -> do
let psb = fromMaybe mempty (preSelectedBranches providedConfiguration)
let branches = filterTemplateBranches BranchFilterIsFeatureBranch
templateInformation
sbi <- inputBranches branches psb
let sbvars = foldr OrderedMap.union OrderedMap.empty
$ fmap branchVariables (Set.toList sbi)
let
cvars = fromMaybe OrderedMap.empty
(preVariableValues providedConfiguration)
let vars = withDefaults
sbvars
cvars
(preVariableDefaultReplacements providedConfiguration)
varVals <- inputVariables vars
return FinalProjectConfiguration { projectBranches = Set.toList sbi
, variableValues = varVals
}
InputResolutionStrategy repo mergeConflicts -> do
sayLn "The following files have merge conflicts:"
traverse_ (\c -> sayLn (" " <> pretty c)) mergeConflicts
sayLn $ "Repository path: " <> pretty repo
result <- confirm "Run \"git mergetool\"" (Just True)
return $ bool AlreadyResolved MergeTool result
ConfirmShellToAmend -> confirm confirmShellToAmendMessage (Just False)
ConfirmPush deletes creates updates ->
confirm (confirmPushMessage deletes creates updates) (Just False)
ConfirmPullRequest updates cleanups ->
confirm (confirmPullRequestMessage updates cleanups) (Just False)
runUIUnattended :: Member (Error UIException) r => Sem (UI ': r) a -> Sem r a
runUIUnattended = interpret $ \case
InputTemplateConfiguration PreliminaryProjectConfiguration { preSelectedTemplate = mbt, preTargetDirectory = mbtd }
-> FinalTemplateConfiguration
<$> maybe (notPossible "template name") return mbt
<*> maybe (notPossible "target directory") return mbtd
InputUpdateConfiguration PreliminaryProjectConfiguration { prePreviousTemplateCommit = mbc }
-> FinalUpdateConfiguration
<$> maybe (notPossible "previous template commit") return mbc
InputProjectConfiguration templateInformation providedConfiguration ->
FinalProjectConfiguration
<$> maybe (notPossible "selected branches")
(return . templateBranchesByNames templateInformation)
(preSelectedBranches providedConfiguration)
<*> maybe (notPossible "variable values")
return
(preVariableValues providedConfiguration)
InputResolutionStrategy _ _ -> notPossible "merge conflicts"
ConfirmShellToAmend -> return False
ConfirmPush{} -> return True
ConfirmPullRequest{} -> return True
where
notPossible :: Member (Error UIException) r => Text -> Sem r a
notPossible msg = throw $ UnattendedNotPossible msg
| |
516756237537b1a3ca187e36e4be0d13ae7ffab51c4319f544fd8c54563549dc | zeniuseducation/poly-euler | p84.clj | (ns alfa.special.p84
(:require
[clojure.set :refer [union difference intersection subset?]]
[clojure.core.reducers :as r]
[clojure.string :refer [split-lines]]
[alfa.common :refer :all]
[clojure.string :as cs]))
(def board [:GO :A1 :CC1 :A2 :T1 :R1 :B1 :CH1 :B2 :B3
:JAIL :C1 :U1 :C2 :C3 :R2 :D1 :CC2 :D2 :D3
:FP :E1 :CH2 :E2 :E3 :R3 :F1 :F2 :U2 :F3
:G2J :G1 :G2 :CC3 :G3 :R4 :CH3 :H1 :T2 :H2])
(def b2i (zipmap board (range)))
(def i2b (zipmap (range) board))
(def cc-cards (atom (shuffle (concat [:JAIL :GO] (repeat 14 0)))))
(def ch-cards (atom (shuffle (concat (repeat 6 0)
[:GO :JAIL :C1 :E3 :H2 :R1]
(repeat 2 :next-r)
[:next-u -3]))))
(defn dice-4
[]
(inc (rand-int 4)))
(defn land-on
[init dices]
(i2b (rem (+ dices (b2i init)) 40)))
(defn move
[init double]
(let [d1 (dice-4) d2 (dice-4) ds (+ d1 d2)]
(if (== d1 d2)
(if (<= 0 double 1)
[(land-on init ds) (inc double)]
[:JAIL 0])
[(land-on init ds) 0])))
(defn next-r
[land]
(first (filter #{:R1 :R2 :R3 :R4} (drop (inc (b2i land)) (cycle board)))))
(defn next-u
[land]
(first (filter #{:U1 :U2} (drop (inc (b2i land)) (cycle board)))))
(defn block
[land]
(cond
(some #{land} [:CC1 :CC2 :CC3])
(let [card (first @cc-cards)]
(do (reset! cc-cards (concat (rest @cc-cards) [(first @cc-cards)]))
(if (integer? card) (land-on land card) card)))
(some #{land} [:CH1 :CH2 :CH3])
(let [card (first @ch-cards)]
(do (reset! ch-cards (concat (rest @ch-cards) [(first @ch-cards)]))
(cond
(= -3 card) (block (i2b (- (b2i land) 3)))
(integer? card) (land-on land card)
(= card :next-r) (next-r land)
(= card :next-u) (next-u land)
:else card)))
(= land :G2J) :JAIL
:else land))
(defn play
[lim]
(loop [i (int 0) pos :GO double 0 moves {}]
(if (> i lim)
(map #(let [[a b] %]
[(b2i a) b]) (sort-by val > moves))
(let [[npos1 ndouble] (move pos double)
npos (block npos1)]
(recur (inc i) npos ndouble (merge-with + moves {npos 1}))))))
| null | https://raw.githubusercontent.com/zeniuseducation/poly-euler/734fdcf1ddd096a8730600b684bf7398d071d499/Alfa/src/alfa/special/p84.clj | clojure | (ns alfa.special.p84
(:require
[clojure.set :refer [union difference intersection subset?]]
[clojure.core.reducers :as r]
[clojure.string :refer [split-lines]]
[alfa.common :refer :all]
[clojure.string :as cs]))
(def board [:GO :A1 :CC1 :A2 :T1 :R1 :B1 :CH1 :B2 :B3
:JAIL :C1 :U1 :C2 :C3 :R2 :D1 :CC2 :D2 :D3
:FP :E1 :CH2 :E2 :E3 :R3 :F1 :F2 :U2 :F3
:G2J :G1 :G2 :CC3 :G3 :R4 :CH3 :H1 :T2 :H2])
(def b2i (zipmap board (range)))
(def i2b (zipmap (range) board))
(def cc-cards (atom (shuffle (concat [:JAIL :GO] (repeat 14 0)))))
(def ch-cards (atom (shuffle (concat (repeat 6 0)
[:GO :JAIL :C1 :E3 :H2 :R1]
(repeat 2 :next-r)
[:next-u -3]))))
(defn dice-4
[]
(inc (rand-int 4)))
(defn land-on
[init dices]
(i2b (rem (+ dices (b2i init)) 40)))
(defn move
[init double]
(let [d1 (dice-4) d2 (dice-4) ds (+ d1 d2)]
(if (== d1 d2)
(if (<= 0 double 1)
[(land-on init ds) (inc double)]
[:JAIL 0])
[(land-on init ds) 0])))
(defn next-r
[land]
(first (filter #{:R1 :R2 :R3 :R4} (drop (inc (b2i land)) (cycle board)))))
(defn next-u
[land]
(first (filter #{:U1 :U2} (drop (inc (b2i land)) (cycle board)))))
(defn block
[land]
(cond
(some #{land} [:CC1 :CC2 :CC3])
(let [card (first @cc-cards)]
(do (reset! cc-cards (concat (rest @cc-cards) [(first @cc-cards)]))
(if (integer? card) (land-on land card) card)))
(some #{land} [:CH1 :CH2 :CH3])
(let [card (first @ch-cards)]
(do (reset! ch-cards (concat (rest @ch-cards) [(first @ch-cards)]))
(cond
(= -3 card) (block (i2b (- (b2i land) 3)))
(integer? card) (land-on land card)
(= card :next-r) (next-r land)
(= card :next-u) (next-u land)
:else card)))
(= land :G2J) :JAIL
:else land))
(defn play
[lim]
(loop [i (int 0) pos :GO double 0 moves {}]
(if (> i lim)
(map #(let [[a b] %]
[(b2i a) b]) (sort-by val > moves))
(let [[npos1 ndouble] (move pos double)
npos (block npos1)]
(recur (inc i) npos ndouble (merge-with + moves {npos 1}))))))
| |
60654d7205d9cf74a0ce6ffce9868e24895b212352181d40a65783fe98352a3c | sheyll/newtype-zoo | NewtypeZoo.hs | # LANGUAGE KindSignatures #
# LANGUAGE TypeOperators #
-- | General Purpose Newtype Wrappers
--
-- A zoo of /newtype/ wrappers.
--
In everyday programming newtype wrappers occur again and again , to
-- convey some extra meaning for the maintainer of the code, and to prevent
-- mixing up parameters to functions with the same type.
--
-- For example:
--
-- @
applyConfiguration : : IO MyConfig
-- @
--
-- What the user actually wanted to express was:
--
-- @
-- applyConfiguration :: `Wanted` MyConfig -> `Current` MyConfig -> IO (`Active` MyConfig)
-- @
module NewtypeZoo
(
module X
) where
import NewtypeZoo.Wanted as X
import NewtypeZoo.Valid as X
import NewtypeZoo.Updated as X
import NewtypeZoo.Unwanted as X
import NewtypeZoo.Unchecked as X
import NewtypeZoo.Single as X
import NewtypeZoo.Responded as X
import NewtypeZoo.Required as X
import NewtypeZoo.Requested as X
import NewtypeZoo.Replied as X
import NewtypeZoo.Remaining as X
import NewtypeZoo.Rejected as X
import NewtypeZoo.Proposed as X
import NewtypeZoo.Previous as X
import NewtypeZoo.Pending as X
import NewtypeZoo.Partial as X
import NewtypeZoo.Old as X
import NewtypeZoo.Offered as X
import NewtypeZoo.Obsolete as X
import NewtypeZoo.Next as X
import NewtypeZoo.New as X
import NewtypeZoo.Needed as X
import NewtypeZoo.Multiple as X
import NewtypeZoo.Missing as X
import NewtypeZoo.Marked as X
import NewtypeZoo.Invalid as X
import NewtypeZoo.Inconsistent as X
import NewtypeZoo.Failed as X
import NewtypeZoo.Existing as X
import NewtypeZoo.Established as X
import NewtypeZoo.Enabled as X
import NewtypeZoo.Disabled as X
import NewtypeZoo.Current as X
import NewtypeZoo.Consistent as X
import NewtypeZoo.Completed as X
import NewtypeZoo.Complete as X
import NewtypeZoo.Cached as X
import NewtypeZoo.Broken as X
import NewtypeZoo.Available as X
import NewtypeZoo.Allocated as X
import NewtypeZoo.Active as X
| null | https://raw.githubusercontent.com/sheyll/newtype-zoo/0e67717cbcd9233d9c26b6aacb4c6f8bba6ef5f7/src/NewtypeZoo.hs | haskell | | General Purpose Newtype Wrappers
A zoo of /newtype/ wrappers.
convey some extra meaning for the maintainer of the code, and to prevent
mixing up parameters to functions with the same type.
For example:
@
@
What the user actually wanted to express was:
@
applyConfiguration :: `Wanted` MyConfig -> `Current` MyConfig -> IO (`Active` MyConfig)
@ | # LANGUAGE KindSignatures #
# LANGUAGE TypeOperators #
In everyday programming newtype wrappers occur again and again , to
applyConfiguration : : IO MyConfig
module NewtypeZoo
(
module X
) where
import NewtypeZoo.Wanted as X
import NewtypeZoo.Valid as X
import NewtypeZoo.Updated as X
import NewtypeZoo.Unwanted as X
import NewtypeZoo.Unchecked as X
import NewtypeZoo.Single as X
import NewtypeZoo.Responded as X
import NewtypeZoo.Required as X
import NewtypeZoo.Requested as X
import NewtypeZoo.Replied as X
import NewtypeZoo.Remaining as X
import NewtypeZoo.Rejected as X
import NewtypeZoo.Proposed as X
import NewtypeZoo.Previous as X
import NewtypeZoo.Pending as X
import NewtypeZoo.Partial as X
import NewtypeZoo.Old as X
import NewtypeZoo.Offered as X
import NewtypeZoo.Obsolete as X
import NewtypeZoo.Next as X
import NewtypeZoo.New as X
import NewtypeZoo.Needed as X
import NewtypeZoo.Multiple as X
import NewtypeZoo.Missing as X
import NewtypeZoo.Marked as X
import NewtypeZoo.Invalid as X
import NewtypeZoo.Inconsistent as X
import NewtypeZoo.Failed as X
import NewtypeZoo.Existing as X
import NewtypeZoo.Established as X
import NewtypeZoo.Enabled as X
import NewtypeZoo.Disabled as X
import NewtypeZoo.Current as X
import NewtypeZoo.Consistent as X
import NewtypeZoo.Completed as X
import NewtypeZoo.Complete as X
import NewtypeZoo.Cached as X
import NewtypeZoo.Broken as X
import NewtypeZoo.Available as X
import NewtypeZoo.Allocated as X
import NewtypeZoo.Active as X
|
4589cd7cca40651bbab7900f39581457d1b84ccc4f329d48e13ce3355dc471de | charlieg/Sparser | find.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
copyright ( c ) 1992,1993,1994 -- all rights reserved
;;;
;;; File: "find"
;;; Module: "objects;model:individuals:"
version : 0.4 May 1994
;; initiated 7/16/92 v2.3
0.1 ( 11/10 ) fixing the semantics of some cases of the find operation
0.2 ( 11/23 ) adjusted how it passed arguments to simplify things
( 6/12/93 ) added define - or - find - individual to vary arg . pattern
0.3 ( 10/22 ) allowed a category to be passed to Find - individual as well
;; as symbols naming them since find/individual needs its bindings
;; in a list
( 1/30/94 ) added find / individual / seq - keys / both by obvious c&s
0.4 ( 5/24 ) fixed long - standing bug in find / indiv of hashed keys
(in-package :sparser)
;;;--------------
;;; Find or Make
;;;--------------
(defun find-or-make/individual (category
bindings-instructions)
called from Ref / instantiate - individual as part of completing
;; a rule. All these individuals will be temporary.
(or (find/individual category bindings-instructions)
(make/individual category bindings-instructions)))
(defun define-or-find-individual (category &rest binding-plist)
;; same idea, different packaging of the arguments
;; to fit calls from category-specific code
(etypecase category
(referential-category category)
(symbol (let ((name category))
(setq category (category-named name))
(unless category
(error "There is no category named ~A" name)))))
(let ((binding-instructions
(decode-category-specific-binding-instr-exps
category binding-plist)))
(or (find/individual category binding-instructions)
(make/individual category binding-instructions))))
;;;------------------------------
;;; Find -- input as expressions
;;;------------------------------
(defun find-individual (symbol-for-category &rest binding-plist )
(let* ((category
(etypecase symbol-for-category
(symbol
(resolve-symbol-to-category/check symbol-for-category))
(referential-category symbol-for-category)))
(binding-instructions
(decode-category-specific-binding-instr-exps
category binding-plist)))
(find/individual category binding-instructions)))
;;;------
;;; Find
;;;------
(defun find/individual (category binding-instructions)
;; Looks up the appropriate Find function for this category
;; and calls it with these binding-instructions.
;; All the arguments must be objects.
(let ((fn-data
;; either just the name of a function, or a function plus
;; some data such as a variable to act as a key
(cat-ops-find (cat-operations category))))
(if (listp fn-data)
(funcall (car fn-data) (cadr fn-data) category binding-instructions)
(funcall fn-data category binding-instructions))))
;;;---------------
;;; List versions
;;;---------------
(defun find/simple-list (category binding-instructions)
(dolist (individual (cat-instances category))
(when (check-bindings individual binding-instructions)
(return-from find/simple-list individual)))
nil )
(defun find/simple-list/both (category binding-instructions)
the field looks like ( < list of > : permanent < list of perms > )
(let ((instance-field (cat-instances category)))
(dolist (individual (cdr instance-field)) ;; the temporaries
(when (check-bindings individual binding-instructions)
(return-from find/simple-list/both individual)))
(dolist (individual (car instance-field)) ;; the permanents
(when (check-bindings individual binding-instructions)
(return-from find/simple-list/both individual)))
nil ))
;;;---------------
;;; Hash versions
;;;---------------
(defun find/individual/key/hash (variable ;; the key
category
binding-instructions)
;; We are to find an individual from the table that fits these
;; binding instructions, where one of the instructions (i.e. variable
;; value pairs) supplied the value for the key into the table,
;; namely the instruction for the designated variable.
(let ((table (cat-instances category))
(value (cadr (assoc variable binding-instructions))))
(unless value
(break "Find was called with binding instructions that don't ~
include the designated key variable:~% ~A~%~A"
variable binding-instructions))
(let ((candidates (gethash value table)))
(when candidates
is there more than one ?
(if (null (cadr binding-instructions)) ;; only the key?
(first candidates)
(when (check-bindings
(first candidates) binding-instructions)
(first candidates)))
(break "Find code for multiple instances not yet written"))))))
(defun find/individual/key/hash/both (variable ;; the key
category
binding-instructions)
(let ((instance-field (cat-instances category))
(value (cadr (assoc variable binding-instructions))))
(unless value
(break "Find was called with binding instructions that don't ~
include the designated key variable:~% ~A~%~A"
variable binding-instructions))
(let ((candidates
(or (gethash value (car instance-field))
(gethash value (cdr instance-field)))))
(when candidates
i.e. there 's just one
(let ((individual (car candidates)))
(if (null (cadr binding-instructions))
;; don't bother checking the bindings, as we just indexed
;; off it.
individual
(when (check-bindings individual binding-instructions)
individual )))
(break "Find code for multiple instances with the same ~
index value not yet written"))))))
(defun find/individual/seq-keys (key-sequence
category binding-instructions)
;; it's an alist of alists
(let ((instances (cat-instances category)))
(when instances
(let ((unit?
(f/i/seq-keys instances
key-sequence category binding-instructions)))
unit? ))))
(defun find/individual/seq-keys/both (key-sequence
category binding-instructions)
(let* ((catalog (cat-instances category)))
;; hack to get around bad encoding
(when catalog
(let ((unit?
(f/i/seq-keys catalog
key-sequence category binding-instructions)))
unit? ))))
#|(if unit?
unit?
(when permanent-instances
(let ((unit?
(f/i/seq-keys permanent-instances
key-sequence category binding-instructions)))
unit? )))|#
(defun f/i/seq-keys (instances keys cat instr)
(let* ((variable (pop keys))
(value (cadr (assoc variable instr))))
(let ((entry (cdr (assoc value instances))))
(when entry
(if (null keys)
entry
(f/i/seq-keys entry keys cat instr))))))
;;;----------------------------------------
;;; general routines for listing all cases
;;;----------------------------------------
(defun list-instances/hash-table (category)
(let* ((table (cat-instances category))
instances )
(maphash #'(lambda (key value)
(declare (ignore key))
(push value instances))
table)
(pl instances)
(hash-table-count table)))
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/objects/model/individuals/find.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
File: "find"
Module: "objects;model:individuals:"
initiated 7/16/92 v2.3
as symbols naming them since find/individual needs its bindings
in a list
--------------
Find or Make
--------------
a rule. All these individuals will be temporary.
same idea, different packaging of the arguments
to fit calls from category-specific code
------------------------------
Find -- input as expressions
------------------------------
------
Find
------
Looks up the appropriate Find function for this category
and calls it with these binding-instructions.
All the arguments must be objects.
either just the name of a function, or a function plus
some data such as a variable to act as a key
---------------
List versions
---------------
the temporaries
the permanents
---------------
Hash versions
---------------
the key
We are to find an individual from the table that fits these
binding instructions, where one of the instructions (i.e. variable
value pairs) supplied the value for the key into the table,
namely the instruction for the designated variable.
only the key?
the key
don't bother checking the bindings, as we just indexed
off it.
it's an alist of alists
hack to get around bad encoding
(if unit?
unit?
(when permanent-instances
(let ((unit?
(f/i/seq-keys permanent-instances
key-sequence category binding-instructions)))
unit? )))
----------------------------------------
general routines for listing all cases
---------------------------------------- | copyright ( c ) 1992,1993,1994 -- all rights reserved
version : 0.4 May 1994
0.1 ( 11/10 ) fixing the semantics of some cases of the find operation
0.2 ( 11/23 ) adjusted how it passed arguments to simplify things
( 6/12/93 ) added define - or - find - individual to vary arg . pattern
0.3 ( 10/22 ) allowed a category to be passed to Find - individual as well
( 1/30/94 ) added find / individual / seq - keys / both by obvious c&s
0.4 ( 5/24 ) fixed long - standing bug in find / indiv of hashed keys
(in-package :sparser)
(defun find-or-make/individual (category
bindings-instructions)
called from Ref / instantiate - individual as part of completing
(or (find/individual category bindings-instructions)
(make/individual category bindings-instructions)))
(defun define-or-find-individual (category &rest binding-plist)
(etypecase category
(referential-category category)
(symbol (let ((name category))
(setq category (category-named name))
(unless category
(error "There is no category named ~A" name)))))
(let ((binding-instructions
(decode-category-specific-binding-instr-exps
category binding-plist)))
(or (find/individual category binding-instructions)
(make/individual category binding-instructions))))
(defun find-individual (symbol-for-category &rest binding-plist )
(let* ((category
(etypecase symbol-for-category
(symbol
(resolve-symbol-to-category/check symbol-for-category))
(referential-category symbol-for-category)))
(binding-instructions
(decode-category-specific-binding-instr-exps
category binding-plist)))
(find/individual category binding-instructions)))
(defun find/individual (category binding-instructions)
(let ((fn-data
(cat-ops-find (cat-operations category))))
(if (listp fn-data)
(funcall (car fn-data) (cadr fn-data) category binding-instructions)
(funcall fn-data category binding-instructions))))
(defun find/simple-list (category binding-instructions)
(dolist (individual (cat-instances category))
(when (check-bindings individual binding-instructions)
(return-from find/simple-list individual)))
nil )
(defun find/simple-list/both (category binding-instructions)
the field looks like ( < list of > : permanent < list of perms > )
(let ((instance-field (cat-instances category)))
(when (check-bindings individual binding-instructions)
(return-from find/simple-list/both individual)))
(when (check-bindings individual binding-instructions)
(return-from find/simple-list/both individual)))
nil ))
category
binding-instructions)
(let ((table (cat-instances category))
(value (cadr (assoc variable binding-instructions))))
(unless value
(break "Find was called with binding instructions that don't ~
include the designated key variable:~% ~A~%~A"
variable binding-instructions))
(let ((candidates (gethash value table)))
(when candidates
is there more than one ?
(first candidates)
(when (check-bindings
(first candidates) binding-instructions)
(first candidates)))
(break "Find code for multiple instances not yet written"))))))
category
binding-instructions)
(let ((instance-field (cat-instances category))
(value (cadr (assoc variable binding-instructions))))
(unless value
(break "Find was called with binding instructions that don't ~
include the designated key variable:~% ~A~%~A"
variable binding-instructions))
(let ((candidates
(or (gethash value (car instance-field))
(gethash value (cdr instance-field)))))
(when candidates
i.e. there 's just one
(let ((individual (car candidates)))
(if (null (cadr binding-instructions))
individual
(when (check-bindings individual binding-instructions)
individual )))
(break "Find code for multiple instances with the same ~
index value not yet written"))))))
(defun find/individual/seq-keys (key-sequence
category binding-instructions)
(let ((instances (cat-instances category)))
(when instances
(let ((unit?
(f/i/seq-keys instances
key-sequence category binding-instructions)))
unit? ))))
(defun find/individual/seq-keys/both (key-sequence
category binding-instructions)
(let* ((catalog (cat-instances category)))
(when catalog
(let ((unit?
(f/i/seq-keys catalog
key-sequence category binding-instructions)))
unit? ))))
(defun f/i/seq-keys (instances keys cat instr)
(let* ((variable (pop keys))
(value (cadr (assoc variable instr))))
(let ((entry (cdr (assoc value instances))))
(when entry
(if (null keys)
entry
(f/i/seq-keys entry keys cat instr))))))
(defun list-instances/hash-table (category)
(let* ((table (cat-instances category))
instances )
(maphash #'(lambda (key value)
(declare (ignore key))
(push value instances))
table)
(pl instances)
(hash-table-count table)))
|
4dee857e8f9c60c0870de5b36e3b7197d54f36efb67c9e46d010be156964a6a7 | fhur/eaml | compiler_test.clj | (ns eaml.compiler-test
(:require [eaml.compiler :refer :all]
[eaml.test-helpers :refer :all]
[eaml.fixtures.simple-res :refer :all]
[presto.core :refer :all]
[clojure.test :refer :all]))
(expected-when "Transpiling simple resources" transpile-str
when [fix-simple-colors]
= {:default
(resources
(color "red" "#f00")
(color "green" "#0f0")
(color "blue" "#00f")
(color "main_color" "@color/red"))}
when [fix-simple-dimen]
= {:default
(resources
(dimen "small_margins" "8dp")
(dimen "medium_margins" "12dp")
(dimen "large_margins" "24dp")
(dimen "default_margins" "@dimen/medium_margins"))}
when [fix-simple-strings]
= {:default
(resources
(string "hello_world" "Hello World!")
(string "name" "Pizza 123"))}
when [fix-simple-bools]
= {:default
(resources
(bool "is_true" "true")
(bool "aint_true" "false")
(bool "a_boolean" "@bool/is_true"))})
(expected-when "Transpiling simple resources that support multiple configs" transpile-str
when [fix-simple-res-with-configs]
= {:default (resources
(dimen "padding" "12dp")
(string "supports_ripples" "nope")
(color "main_color" "#f00")
(color "button_color" "@color/main_color"))
:v21 (resources
(dimen "padding" "24dp")
(string "supports_ripples" "yes")
(color "button_color" "@drawable/btn_ripple"))
:land (resources
(dimen "padding" "30dp"))})
(expected-when "Transpiling styles" transpile-str
when ["color foo: #fff;
mixin redColored {
color: #f00;
bar: 12dp;
}
style Foo {
foo: foo;
redColored();
}"]
= {:default (resources
(color "foo" "#fff")
(style {:name "Foo"}
(item "foo" "@color/foo")
(item "color" "#f00")
(item "bar" "12dp")))}
when ["style AppTheme < Theme.AppCompat.Light.NoActionBar {
android:windowBackground: @null;
colorPrimary: @color/red_1;
colorPrimaryDark: @android:color/black;
foo123: @style/SpinnerItem;
bar123qwe: @style/Foo.Bar.Baz123;
}"]
= {:default (resources
(style {:name "AppTheme" :parent "Theme.AppCompat.Light.NoActionBar"}
(item "android:windowBackground" "@null")
(item "colorPrimary" "@color/red_1")
(item "colorPrimaryDark" "@android:color/black")
(item "foo123" "@style/SpinnerItem")
(item "bar123qwe" "@style/Foo.Bar.Baz123")))}
when ["style Button < BaseButton {
android:textSize: 12dp;
android:background: @drawable/btn_back;
&:v21,v22 {
android:background: @drawable/btn_ripple;
}
&:land {
android:textSize: 10dp;
}
}"]
= {:default (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_back")))
:v21 (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_ripple")))
:v22 (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_ripple")))
:land (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:background" "@drawable/btn_back")
(item "android:textSize" "10dp")))})
(expected-when "mixins override any style attrs set by the style" transpile-str
when ["mixin mixinA { attr: 12dp; }
mixin mixinB {
attr: 14dp;
&:v21 { attr: 16dp; }
}
style Foo {
attr: 10dp;
mixinA();
&:v21 {
attr: 20dp;
}
mixinB();
}"]
= {:default (resources
(style {:name "Foo"}
(item "attr" "14dp")))
:v21 (resources
(style {:name "Foo"}
(item "attr" "16dp")))})
(expected-when "mixin provide a form of including common style attributes"
transpile-str
when ["color main_color: #f0f0f0;
color main_color_lighter: #f2f2f2;
mixin mainBackgroundColored {
android:background: main_color;
&:v21 {
android:background: main_color_lighter;
}
}
style Foo {
mainBackgroundColored();
}"]
= {:default (resources
(color "main_color" "#f0f0f0")
(color "main_color_lighter" "#f2f2f2")
(style {:name "Foo"}
(item "android:background" "@color/main_color")))
:v21 (resources
(style {:name "Foo"}
(item "android:background" "@color/main_color_lighter")))})
| null | https://raw.githubusercontent.com/fhur/eaml/ee398417d4ec76966f3b88b61ffc9332741eeb28/test/eaml/compiler_test.clj | clojure |
}
}
| (ns eaml.compiler-test
(:require [eaml.compiler :refer :all]
[eaml.test-helpers :refer :all]
[eaml.fixtures.simple-res :refer :all]
[presto.core :refer :all]
[clojure.test :refer :all]))
(expected-when "Transpiling simple resources" transpile-str
when [fix-simple-colors]
= {:default
(resources
(color "red" "#f00")
(color "green" "#0f0")
(color "blue" "#00f")
(color "main_color" "@color/red"))}
when [fix-simple-dimen]
= {:default
(resources
(dimen "small_margins" "8dp")
(dimen "medium_margins" "12dp")
(dimen "large_margins" "24dp")
(dimen "default_margins" "@dimen/medium_margins"))}
when [fix-simple-strings]
= {:default
(resources
(string "hello_world" "Hello World!")
(string "name" "Pizza 123"))}
when [fix-simple-bools]
= {:default
(resources
(bool "is_true" "true")
(bool "aint_true" "false")
(bool "a_boolean" "@bool/is_true"))})
(expected-when "Transpiling simple resources that support multiple configs" transpile-str
when [fix-simple-res-with-configs]
= {:default (resources
(dimen "padding" "12dp")
(string "supports_ripples" "nope")
(color "main_color" "#f00")
(color "button_color" "@color/main_color"))
:v21 (resources
(dimen "padding" "24dp")
(string "supports_ripples" "yes")
(color "button_color" "@drawable/btn_ripple"))
:land (resources
(dimen "padding" "30dp"))})
(expected-when "Transpiling styles" transpile-str
mixin redColored {
}
style Foo {
}"]
= {:default (resources
(color "foo" "#fff")
(style {:name "Foo"}
(item "foo" "@color/foo")
(item "color" "#f00")
(item "bar" "12dp")))}
when ["style AppTheme < Theme.AppCompat.Light.NoActionBar {
}"]
= {:default (resources
(style {:name "AppTheme" :parent "Theme.AppCompat.Light.NoActionBar"}
(item "android:windowBackground" "@null")
(item "colorPrimary" "@color/red_1")
(item "colorPrimaryDark" "@android:color/black")
(item "foo123" "@style/SpinnerItem")
(item "bar123qwe" "@style/Foo.Bar.Baz123")))}
when ["style Button < BaseButton {
&:v21,v22 {
}
&:land {
}
}"]
= {:default (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_back")))
:v21 (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_ripple")))
:v22 (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:textSize" "12dp")
(item "android:background" "@drawable/btn_ripple")))
:land (resources
(style {:name "Button" :parent "BaseButton"}
(item "android:background" "@drawable/btn_back")
(item "android:textSize" "10dp")))})
(expected-when "mixins override any style attrs set by the style" transpile-str
mixin mixinB {
}
style Foo {
&:v21 {
}
}"]
= {:default (resources
(style {:name "Foo"}
(item "attr" "14dp")))
:v21 (resources
(style {:name "Foo"}
(item "attr" "16dp")))})
(expected-when "mixin provide a form of including common style attributes"
transpile-str
mixin mainBackgroundColored {
&:v21 {
}
}
style Foo {
}"]
= {:default (resources
(color "main_color" "#f0f0f0")
(color "main_color_lighter" "#f2f2f2")
(style {:name "Foo"}
(item "android:background" "@color/main_color")))
:v21 (resources
(style {:name "Foo"}
(item "android:background" "@color/main_color_lighter")))})
|
b492e9a10389a275f094f7e7cddd88657d8cff224c2708b79e0797c1d464c580 | nkaretnikov/OOHaskell | Rectangle.hs |
( C ) 2004 - 2005 , Oleg Kiselyov &
's overlooked object system
module Rectangle where
import Shape
-- The delta of rectangles
data RectangleDelta w =
RectangleDelta { getWidth :: Int
, getHeight :: Int
, rectangleTail :: w }
-- An extension of Shape
type Rectangle w = Shape (RectangleDelta w)
-- A "closed" constructor
rectangle x y w h
= shape x y $ RectangleDelta {
getWidth = w
, getHeight = h
, rectangleTail = () }
-- Setters
setHeight :: Int -> Rectangle w -> Rectangle w
setHeight i s = s { shapeTail = (shapeTail s) { getHeight = i } }
setWidth :: Int -> Rectangle w -> Rectangle w
setWidth i s = s { shapeTail = (shapeTail s) { getWidth = i } }
-- Implement abstract draw method
instance Draw (RectangleDelta w) where
draw s
= putStrLn $ concat ["Drawing a Rectangle at:", show (getX s,getY s),
", width ", show (getWidth (shapeTail s)),
", height ", show (getHeight (shapeTail s))]
| null | https://raw.githubusercontent.com/nkaretnikov/OOHaskell/ddf42cfa62f8bd27643ff6db136dec6c14466232/repository/shapes/Haskell/Shapes2/Rectangle.hs | haskell | The delta of rectangles
An extension of Shape
A "closed" constructor
Setters
Implement abstract draw method |
( C ) 2004 - 2005 , Oleg Kiselyov &
's overlooked object system
module Rectangle where
import Shape
data RectangleDelta w =
RectangleDelta { getWidth :: Int
, getHeight :: Int
, rectangleTail :: w }
type Rectangle w = Shape (RectangleDelta w)
rectangle x y w h
= shape x y $ RectangleDelta {
getWidth = w
, getHeight = h
, rectangleTail = () }
setHeight :: Int -> Rectangle w -> Rectangle w
setHeight i s = s { shapeTail = (shapeTail s) { getHeight = i } }
setWidth :: Int -> Rectangle w -> Rectangle w
setWidth i s = s { shapeTail = (shapeTail s) { getWidth = i } }
instance Draw (RectangleDelta w) where
draw s
= putStrLn $ concat ["Drawing a Rectangle at:", show (getX s,getY s),
", width ", show (getWidth (shapeTail s)),
", height ", show (getHeight (shapeTail s))]
|
6aa2bd921937f86f26ce6a008cad8a698902f96c49030ce22e7e959b25b8bdae | zoedsoupe/estrutura_de_dados | CombinacaoSpec.hs | module LE1.Recursao.CombinacaoSpec where
import Test.Hspec
import LE1.Recursao.Combinacao
spec :: Spec
spec = do
describe "testa o algoritmo de combinação" $ do
it "deve retornar 1 se k == 0" $ do
combina 2 0 `shouldBe` 1
combina 10 3 `shouldBe` 120
it "deve retornar 1 se n == k" $ do
combina 2 2 `shouldBe` 1
combina 6 2 `shouldBe` 15
it "deve retornar 1 caso k < 0" $ do
combina 2 (-1) `shouldBe` 1
combina 2 1 `shouldBe` 2
it "deve retornar 0 se k > n" $ do
combina 2 3 `shouldBe` 0
combina 20 3 `shouldBe` 1140
it "combina corretamente n e k" $ do
combina 20 4 `shouldBe` 4845
combina 15 7 `shouldBe` 6435
combina 49 6 `shouldBe` 13983816
| null | https://raw.githubusercontent.com/zoedsoupe/estrutura_de_dados/01ffa048e373636769bb7ce1843a1e19aaad3a6c/test/LE1/Recursao/CombinacaoSpec.hs | haskell | module LE1.Recursao.CombinacaoSpec where
import Test.Hspec
import LE1.Recursao.Combinacao
spec :: Spec
spec = do
describe "testa o algoritmo de combinação" $ do
it "deve retornar 1 se k == 0" $ do
combina 2 0 `shouldBe` 1
combina 10 3 `shouldBe` 120
it "deve retornar 1 se n == k" $ do
combina 2 2 `shouldBe` 1
combina 6 2 `shouldBe` 15
it "deve retornar 1 caso k < 0" $ do
combina 2 (-1) `shouldBe` 1
combina 2 1 `shouldBe` 2
it "deve retornar 0 se k > n" $ do
combina 2 3 `shouldBe` 0
combina 20 3 `shouldBe` 1140
it "combina corretamente n e k" $ do
combina 20 4 `shouldBe` 4845
combina 15 7 `shouldBe` 6435
combina 49 6 `shouldBe` 13983816
| |
70a8b23417d395c1aa9ba72fd28dc4c588993a6fc8c1edd6ee69594486f766c3 | jamesdbrock/replace-attoparsec | TestText.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE CPP #
module TestText ( tests ) where
import Distribution.TestSuite as TestSuite
import Replace.Attoparsec.Text
import Data.Attoparsec.Text as A
import Data.Attoparsec.Combinator
import qualified Data.Text as T
import Text.Parser.Char (upper)
import Control.Applicative
import Data.Bifunctor
import qualified TestTextLazy
findAllCap' :: Parser a -> Parser [Either T.Text (T.Text, a)]
findAllCap' sep = sepCap (match sep)
findAll' :: Parser b -> Parser [Either T.Text T.Text]
findAll' sep = (fmap.fmap) (second fst) $ sepCap (match sep)
tests :: IO [Test]
tests = liftA2 (<>)
(pure
[ Test $ runParserTest "findAllCap upperChar"
(findAllCap' (upper :: Parser Char))
("aBcD" :: T.Text)
[Left "a", Right ("B", 'B'), Left "c", Right ("D", 'D')]
check that sepCap can progress even when parser consumes nothing
-- and succeeds.
, Test $ runParserTest "zero-consumption parser"
(sepCap (many (upper :: Parser Char)))
("aBcD" :: T.Text)
[Left "a", Right "B", Left "c", Right "D"]
, Test $ runParserTest "scinum"
(sepCap scinum)
("1E3")
([Right (1,3)])
, Test $ runParserTest "monad fail"
(sepCap (fail "" :: Parser ()))
("xxx")
([Left "xxx"])
#if MIN_VERSION_GLASGOW_HASKELL(8,6,0,0)
, Test $ runParserTest "read fail"
(sepCap (return (read "a" :: Int) :: Parser Int))
("a")
([Left "a"])
#endif
, Test $ runParserTest "findAll astral"
(findAll' ((A.takeWhile (=='𝅘𝅥𝅯') :: Parser T.Text)))
("𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥" :: T.Text)
[Left "𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥", Right "𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯", Left "𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥"]
, Test $ runParserFeed "const string"
(sepCap (string "aa"))
(" a") ("a ")
([Left " ",Right"aa",Left" "])
, Test $ runParserFeed "findAll astral"
(findAll' ((A.takeWhile (=='𝅘𝅥𝅯') :: Parser T.Text)))
("𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅯𝅘𝅥𝅯") ("𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥" :: T.Text)
[Left "𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥", Right "𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯", Left "𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥"]
, Test $ runParserTest "empty input" (sepCap (fail "" :: Parser ())) "" []
, Test $ streamEditTest "x to o" (string "x") (const "o") "x x x" "o o o"
, Test $ streamEditTest "x to o inner" (string "x") (const "o") " x x x " " o o o "
, Test $ streamEditTest "ordering" (string "456") (const "ABC") "123456789" "123ABC789"
, Test $ streamEditTest "empty input" (match (fail "")) (fst) "" ""
, Test $ breakCapTest "basic" upper "aAa" (Just ("a", 'A', "a"))
, Test $ breakCapTest "first" upper "Aa" (Just ("", 'A', "a"))
, Test $ breakCapTest "last" upper "aA" (Just ("a", 'A', ""))
, Test $ breakCapTest "fail" upper "aaa" Nothing
, Test $ breakCapTest "match" (match upper) "aAa" (Just ("a", ("A",'A'), "a"))
, Test $ breakCapTest "zero-width" (lookAhead upper) "aAa" (Just ("a",'A', "Aa"))
, Test $ breakCapTest "empty input" upper "" Nothing
, Test $ breakCapTest "empty input zero-width" (return () :: Parser ()) "" (Just ("", (), ""))
]) TestTextLazy.tests
where
runParserTest nam p input expected = TestInstance
{ run = do
case parseOnly p input of
Left e -> return (Finished $ TestSuite.Fail $ show e)
Right output ->
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "parseOnly sepCap " <> nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
runParserFeed nam p input1 input2 expected = TestInstance
{ run = do
case parse p input1 of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial cont1 -> case cont1 input2 of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial cont2 -> case cont2 "" of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial _ -> return (Finished $ TestSuite.Fail $ "Should not ask for more input")
A.Done _i output ->
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
A.Done _i _output -> return (Finished $ TestSuite.Fail $ "Should ask for more input")
A.Done _i _output -> return (Finished $ TestSuite.Fail $ "Should ask for more input")
, name = "parse Partial sepCap " <> nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
streamEditTest nam sep editor input expected = TestInstance
{ run = do
let output = streamEdit sep editor input
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "streamEdit " ++ nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
breakCapTest nam sep input expected = TestInstance
{ run = do
let output = breakCap sep input
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "breakCap " ++ nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
scinum :: Parser (Double, Integer)
scinum = do
m <- (fromIntegral :: Integer -> Double) <$> decimal
_ <- string "E"
e <- decimal
return (m, e)
| null | https://raw.githubusercontent.com/jamesdbrock/replace-attoparsec/188f43b751d1f55761ce2364e68f93e7e3eb12e5/tests/TestText.hs | haskell | # LANGUAGE OverloadedStrings #
and succeeds. | # LANGUAGE FlexibleContexts #
# LANGUAGE CPP #
module TestText ( tests ) where
import Distribution.TestSuite as TestSuite
import Replace.Attoparsec.Text
import Data.Attoparsec.Text as A
import Data.Attoparsec.Combinator
import qualified Data.Text as T
import Text.Parser.Char (upper)
import Control.Applicative
import Data.Bifunctor
import qualified TestTextLazy
findAllCap' :: Parser a -> Parser [Either T.Text (T.Text, a)]
findAllCap' sep = sepCap (match sep)
findAll' :: Parser b -> Parser [Either T.Text T.Text]
findAll' sep = (fmap.fmap) (second fst) $ sepCap (match sep)
tests :: IO [Test]
tests = liftA2 (<>)
(pure
[ Test $ runParserTest "findAllCap upperChar"
(findAllCap' (upper :: Parser Char))
("aBcD" :: T.Text)
[Left "a", Right ("B", 'B'), Left "c", Right ("D", 'D')]
check that sepCap can progress even when parser consumes nothing
, Test $ runParserTest "zero-consumption parser"
(sepCap (many (upper :: Parser Char)))
("aBcD" :: T.Text)
[Left "a", Right "B", Left "c", Right "D"]
, Test $ runParserTest "scinum"
(sepCap scinum)
("1E3")
([Right (1,3)])
, Test $ runParserTest "monad fail"
(sepCap (fail "" :: Parser ()))
("xxx")
([Left "xxx"])
#if MIN_VERSION_GLASGOW_HASKELL(8,6,0,0)
, Test $ runParserTest "read fail"
(sepCap (return (read "a" :: Int) :: Parser Int))
("a")
([Left "a"])
#endif
, Test $ runParserTest "findAll astral"
(findAll' ((A.takeWhile (=='𝅘𝅥𝅯') :: Parser T.Text)))
("𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥" :: T.Text)
[Left "𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥", Right "𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯", Left "𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥"]
, Test $ runParserFeed "const string"
(sepCap (string "aa"))
(" a") ("a ")
([Left " ",Right"aa",Left" "])
, Test $ runParserFeed "findAll astral"
(findAll' ((A.takeWhile (=='𝅘𝅥𝅯') :: Parser T.Text)))
("𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥𝅯𝅘𝅥𝅯") ("𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥" :: T.Text)
[Left "𝄞𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥", Right "𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯𝅘𝅥𝅯", Left "𝅘𝅥𝅘𝅥𝅘𝅥𝅘𝅥"]
, Test $ runParserTest "empty input" (sepCap (fail "" :: Parser ())) "" []
, Test $ streamEditTest "x to o" (string "x") (const "o") "x x x" "o o o"
, Test $ streamEditTest "x to o inner" (string "x") (const "o") " x x x " " o o o "
, Test $ streamEditTest "ordering" (string "456") (const "ABC") "123456789" "123ABC789"
, Test $ streamEditTest "empty input" (match (fail "")) (fst) "" ""
, Test $ breakCapTest "basic" upper "aAa" (Just ("a", 'A', "a"))
, Test $ breakCapTest "first" upper "Aa" (Just ("", 'A', "a"))
, Test $ breakCapTest "last" upper "aA" (Just ("a", 'A', ""))
, Test $ breakCapTest "fail" upper "aaa" Nothing
, Test $ breakCapTest "match" (match upper) "aAa" (Just ("a", ("A",'A'), "a"))
, Test $ breakCapTest "zero-width" (lookAhead upper) "aAa" (Just ("a",'A', "Aa"))
, Test $ breakCapTest "empty input" upper "" Nothing
, Test $ breakCapTest "empty input zero-width" (return () :: Parser ()) "" (Just ("", (), ""))
]) TestTextLazy.tests
where
runParserTest nam p input expected = TestInstance
{ run = do
case parseOnly p input of
Left e -> return (Finished $ TestSuite.Fail $ show e)
Right output ->
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "parseOnly sepCap " <> nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
runParserFeed nam p input1 input2 expected = TestInstance
{ run = do
case parse p input1 of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial cont1 -> case cont1 input2 of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial cont2 -> case cont2 "" of
A.Fail _i _ e -> return (Finished $ TestSuite.Fail $ show e)
A.Partial _ -> return (Finished $ TestSuite.Fail $ "Should not ask for more input")
A.Done _i output ->
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
A.Done _i _output -> return (Finished $ TestSuite.Fail $ "Should ask for more input")
A.Done _i _output -> return (Finished $ TestSuite.Fail $ "Should ask for more input")
, name = "parse Partial sepCap " <> nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
streamEditTest nam sep editor input expected = TestInstance
{ run = do
let output = streamEdit sep editor input
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "streamEdit " ++ nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
breakCapTest nam sep input expected = TestInstance
{ run = do
let output = breakCap sep input
if (output == expected)
then return (Finished Pass)
else return (Finished $ TestSuite.Fail
$ "got " <> show output <> " expected " <> show expected)
, name = "breakCap " ++ nam
, tags = []
, options = []
, setOption = \_ _ -> Left "no options supported"
}
scinum :: Parser (Double, Integer)
scinum = do
m <- (fromIntegral :: Integer -> Double) <$> decimal
_ <- string "E"
e <- decimal
return (m, e)
|
531145f53a5136e9cbe9e398824b7b35f4e99ad49bbe82fe6b2aaeb3be622f9c | kenbot/church | PeanoNat.hs | module PeanoNat where
data PNat = Zero | Succ PNat
deriving Show
p0, p1, p2, p3, p4 :: PNat
p0 = Zero
p1 = Succ p0
p2 = Succ p1
p3 = Succ p2
p4 = Succ p3
pPlus :: PNat -> PNat -> PNat
pPlus Zero b = b
pPlus (Succ x) b = pPlus x (Succ b)
pMult :: PNat -> PNat -> PNat
pMult Zero _ = Zero
pMult (Succ x) b = pPlus b (pMult x b)
pNatToInt :: PNat -> Int
pNatToInt Zero = 0
pNatToInt (Succ n) = 1 + pNatToInt n
intToPNat :: Int -> PNat
intToPNat 0 = Zero
intToPNat n = Succ (intToPNat (n - 1))
| null | https://raw.githubusercontent.com/kenbot/church/a3da46b584dde00b66da14943154f225f062eb86/PeanoNat.hs | haskell | module PeanoNat where
data PNat = Zero | Succ PNat
deriving Show
p0, p1, p2, p3, p4 :: PNat
p0 = Zero
p1 = Succ p0
p2 = Succ p1
p3 = Succ p2
p4 = Succ p3
pPlus :: PNat -> PNat -> PNat
pPlus Zero b = b
pPlus (Succ x) b = pPlus x (Succ b)
pMult :: PNat -> PNat -> PNat
pMult Zero _ = Zero
pMult (Succ x) b = pPlus b (pMult x b)
pNatToInt :: PNat -> Int
pNatToInt Zero = 0
pNatToInt (Succ n) = 1 + pNatToInt n
intToPNat :: Int -> PNat
intToPNat 0 = Zero
intToPNat n = Succ (intToPNat (n - 1))
| |
93292fbf001281aca169e5c70155054185bfba5f19cce260847b22f3d4c6aeec | htm-community/clortex | date.clj | (ns clortex.domain.sensors.date
(require [clj-time.core :as tc]
[clj-time.format :as tf]))
(def opf-timestamp-re #"(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):([0-9.]+)")
(defn strip-leading-zeros [s] (clojure.string/replace-first s #"^0+([1-9.])" "$1"))
(defn old-parse-opf-date
[s]
(let [m (re-matches opf-timestamp-re s)]
(if m (let [rev (reverse (map strip-leading-zeros (rest m)))
secs (java.lang.Double/parseDouble (first rev))
items (map #(. Integer parseInt %) (rest rev))
]
(apply tc/date-time (reverse (conj items secs)))))))
(def opf-format (tf/formatter "yyyy-MM-dd HH:mm:ss.SS"))
( tf / parse opf - format " 16:13:49:06 on 2013 - 04 - 06 " )
(defn parse-opf-date [s] (tf/parse opf-format s))
| null | https://raw.githubusercontent.com/htm-community/clortex/69003a352140510f47c6b8e18ad6a98a7b5a3bba/src/clortex/domain/sensors/date.clj | clojure | (ns clortex.domain.sensors.date
(require [clj-time.core :as tc]
[clj-time.format :as tf]))
(def opf-timestamp-re #"(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):([0-9.]+)")
(defn strip-leading-zeros [s] (clojure.string/replace-first s #"^0+([1-9.])" "$1"))
(defn old-parse-opf-date
[s]
(let [m (re-matches opf-timestamp-re s)]
(if m (let [rev (reverse (map strip-leading-zeros (rest m)))
secs (java.lang.Double/parseDouble (first rev))
items (map #(. Integer parseInt %) (rest rev))
]
(apply tc/date-time (reverse (conj items secs)))))))
(def opf-format (tf/formatter "yyyy-MM-dd HH:mm:ss.SS"))
( tf / parse opf - format " 16:13:49:06 on 2013 - 04 - 06 " )
(defn parse-opf-date [s] (tf/parse opf-format s))
| |
e96acab0a8e1a7ba003bad14337ea5f19f9a140a4a025356c8911f94a8797172 | helvm/helma | WrapTokenList.hs | module HelVM.HelMA.Automaton.WrapTokenList where
import qualified Text.Read
import qualified Text.Show
newtype WrapTokenList a = WrapTokenList { unWrapTokenList :: a }
deriving stock (Eq)
----
instance Show a => Show (WrapTokenList [a]) where
show (WrapTokenList tokens) = show =<< tokens
instance Read a => Read (WrapTokenList [a]) where
readsPrec _ source = [( WrapTokenList $ maybeToList . readMaybe . one =<< source , "")]
| null | https://raw.githubusercontent.com/helvm/helma/b4b5a0a4cc170329cae5d626239a24a6044d7c87/hs/src/HelVM/HelMA/Automaton/WrapTokenList.hs | haskell | -- | module HelVM.HelMA.Automaton.WrapTokenList where
import qualified Text.Read
import qualified Text.Show
newtype WrapTokenList a = WrapTokenList { unWrapTokenList :: a }
deriving stock (Eq)
instance Show a => Show (WrapTokenList [a]) where
show (WrapTokenList tokens) = show =<< tokens
instance Read a => Read (WrapTokenList [a]) where
readsPrec _ source = [( WrapTokenList $ maybeToList . readMaybe . one =<< source , "")]
|
091a9b5b31e03fbeee0ecd0a4f72eecd357aadab7efae5aa56755eefb63e10aa | archaelus/erlmail | imapd_util.erl | %%%---------------------------------------------------------------------------------------
@author < > [ ]
2006 - 2007 Simple Enigma , Inc. All Rights Reserved .
%%% @doc IMAP server utility functions
@reference See < a href=" / modules / erlmail " target="_top">Erlang Software Framework</a > for more information
%%% @reference See <a href="" target="_top">ErlMail Google Code Repository</a> for more information
%%% @version 0.0.6
@since 0.0.6
%%% @end
%%%
%%%
The MIT License
%%%
Copyright ( c ) 2007 , Simple Enigma , Inc. All Righs Reserved
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a copy
%%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%%% furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%%% THE SOFTWARE.
%%%
%%%
%%%---------------------------------------------------------------------------------------
-module(imapd_util).
-author('').
-include("../include/imap.hrl").
-include("../include/erlmail.hrl").
-include("../include/mime.hrl").
-export([clean/1,copy/3,expunge/1]).
-export([flags_resp/1,flags_resp/2]).
-export([greeting/0,greeting_capability/0]).
-export([heirachy_char/0,inbox/1]).
-export([mailbox_info/1,mailbox_info/2,mailbox_info/3]).
-export([out/2,out/3,send/2]).
-export([parse/2,parse_addresses/1]).
-export([quote/1,quote/2,unquote/1]).
-export([response/1,re_split/1,re_split/4]).
-export([split_at/1,split_at/2]).
-export([status_flags/1,status_resp/1,status_info/2]).
-export([seq_to_list/1,list_to_seq/1,seq_message_names/2,uidseq_message_names/2,uidseq_to_list/2]).
-export([store/4]).
-export([list_to_flags/1]).
%%-------------------------------------------------------------------------
( ) ) - > string ( )
@doc Removes whitespace and Doule Quotes from a string .
%% @end
%%-------------------------------------------------------------------------
clean({UserName,Password}) ->
{clean(UserName),clean(Password)};
clean(String) ->
S = string:strip(String,both,32),
S2 = string:strip(S,both,34),
string:strip(S2,both,32).
%%-------------------------------------------------------------------------
@spec ( Dest::mailbox_store(),Messages::list(),State::imapd_fsm ( ) ) - > NewDest::mailbox_store ( )
%% @doc copies existing messages to Dest mailbox
%% @end
%%-------------------------------------------------------------------------
copy(Dest,Messages,#imapd_fsm{user = User} = _State) ->
MessageStore = erlmail_util:get_app_env(store_type_message,mnesia_store),
MailBoxStore = erlmail_util:get_app_env(store_type_mailbox_store,mnesia_store),
{UserName,DomainName} = User#user.name,
NewDest = lists:foldl(fun(MessageName,MailBox) ->
M = MessageStore:select({MessageName,UserName,DomainName}),
NewName = MessageStore:message_name(now()),
NewM = M#message{
name = {NewName,UserName,DomainName},
uid = MailBox#mailbox_store.uidnext,
flags = lists:usort([recent|M#message.flags])},
MessageStore:insert(NewM),
MailBox#mailbox_store{messages=lists:usort([NewName|MailBox#mailbox_store.messages]),
uidnext = MailBox#mailbox_store.uidnext + 1}
end,Dest,Messages),
MailBoxStore:update(NewDest),
NewDest.
%%-------------------------------------------------------------------------
( ( ) ) - > { NewMailBox::mailbox_store(),Respones::list ( ) }
@doc Permanently removes all messages with DELETED flag from MailBox
%% @end
%%-------------------------------------------------------------------------
expunge(MailBox) when is_record(MailBox,mailbox_store) ->
Store = erlmail_util:get_app_env(store_type_message,mnesia_store),
{_MailBoxName,UserName,DomainName} = MailBox#mailbox_store.name,
{Messages,Responses,_Position} = lists:foldl(fun(MessageName,{M,R,Pos}) ->
Message = Store:select({MessageName,UserName,DomainName}),
case lists:member(deleted,Message#message.flags) of
true ->
Store:delete(Message),
Resp = #imap_resp{tag = '*', code = Pos, cmd = expunge},
{M,[Resp|R],Pos};
false -> {[MessageName|M],R,Pos + 1}
end
end,{[],[],1},MailBox#mailbox_store.messages),
{MailBox#mailbox_store{messages=lists:usort(Messages)},lists:reverse(Responses)}.
%%-------------------------------------------------------------------------
@spec flags_resp(list ( ) ) - > string ( )
%% @doc Takes a list of flags and returns a response string.
%% @end
%%-------------------------------------------------------------------------
flags_resp([]) -> "()";
flags_resp(List) -> flags_resp(List,[]).
%%-------------------------------------------------------------------------
@spec flags_resp(list(),list ( ) ) - > string ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
flags_resp([H|T],Acc) when is_atom(H) ->
flags_resp(T,[http_util:to_upper(atom_to_list(H)),92,32|Acc]);
flags_resp([],Acc) -> "(" ++ string:strip(lists:flatten(lists:reverse(Acc))) ++ ")".
%%-------------------------------------------------------------------------
( ) - > string ( )
@doc Returns IMAP greeting string from config file or uses .
%% @end
%%-------------------------------------------------------------------------
greeting() ->
case erlmail_util:get_app_env(server_imap_greeting,"ErlMail IMAP4 Server ready") of
[] -> "ErlMail IMAP4 Server ready";
Greeting -> Greeting
end.
%%-------------------------------------------------------------------------
( ) - > bool ( )
%% @doc Check if capability data should be returned in greeting.
%% Default: false
%% @end
%%-------------------------------------------------------------------------
greeting_capability() ->
case erlmail_util:get_app_env(server_imap_greeting_capability,false) of
true -> true;
_ -> false
end.
%%-------------------------------------------------------------------------
( ) - > string ( )
%% @doc Gets Heirarchy chara from config file.
%% Default: "/"
%% @end
%%-------------------------------------------------------------------------
heirachy_char() -> erlmail_util:get_app_env(server_imap_hierarchy,"/").
%%-------------------------------------------------------------------------
%% @spec (MailBoxName::string()) -> string()
@doc Make sure that the string INBOX is always capitalized at the
%% begining of the mailbox name
@todo work with longer mailbox names that start with INBOX
%% @end
%%-------------------------------------------------------------------------
inbox(MailBoxName) ->
case to_lower_atom(MailBoxName) of
inbox -> "INBOX";
_ -> MailBoxName
end.
%%-------------------------------------------------------------------------
( ( ) ) - > string ( )
%% @doc Converts a list of integers into an IMAP sequence
%% @end
%%-------------------------------------------------------------------------
list_to_seq(List) -> list_to_seq(List,0,[]).
%%-------------------------------------------------------------------------
( list(),integer(),list ( ) ) - > string ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
list_to_seq([],_,Acc) -> lists:flatten(lists:reverse(Acc));
list_to_seq([H],Start,Acc) when is_integer(H), Start > 0 ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(H),
list_to_seq([],0,[String|Acc]);
list_to_seq([H],_,Acc) when is_integer(H) ->
list_to_seq([],0,[integer_to_list(H)|Acc]);
list_to_seq([H|[I|_] = T],Start,Acc) when H == I - 1, Start == 0 ->
list_to_seq(T,H,Acc);
list_to_seq([H|[I] = _T],Start,Acc) when H == I - 1, is_integer(I) ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(I),
list_to_seq([],Start,[String|Acc]);
list_to_seq([H|[I|_J] = T],Start,Acc) when H == I - 1 ->
list_to_seq(T,Start,Acc);
list_to_seq([H|[I|_J] = T],Start,Acc) when H /= I - 1, Start > 0 ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(H),
list_to_seq(T,0,[44,String|Acc]);
list_to_seq([H|[_I|_] = T],_Start,Acc) ->
list_to_seq(T,0,[44,integer_to_list(H)|Acc]).
%%-------------------------------------------------------------------------
@spec ( String::string ( ) ) - > list ( )
%% @doc Converts a Flag string into a list of flags
%% @end
%%-------------------------------------------------------------------------
list_to_flags(String) ->
Tokens = string:tokens(String, [32,40,41,92]), % " ()\"
lists:map(fun(T) ->
to_lower_atom(T)
end,Tokens).
to_lower_atom(String) -> to_lower_atom(String,false).
to_lower_atom(String,false) when is_list(String) -> list_to_atom(http_util:to_lower(String));
to_lower_atom(String,true) when is_list(String) -> list_to_atom(http_util:to_lower(string:strip(String))).
%%-------------------------------------------------------------------------
%% @spec (tuple()) -> tuple()
%% @doc Takes a #mailbox_store{} record and returns all information in a
%% #mailbox{} record
%% @end
%%-------------------------------------------------------------------------
mailbox_info(MailBoxStore) -> mailbox_info(MailBoxStore,all).
%%-------------------------------------------------------------------------
( tuple(),Flags::list ( ) ) - > tuple ( )
@doc Takes a # mailbox_store { } record and returns information from Flags
in a # mailbox { } record
%% @end
%%-------------------------------------------------------------------------
mailbox_info(MailBoxStore,Args) when is_record(MailBoxStore,mailbox_store) ->
{MailBoxName,UserName,DomainName} = MailBoxStore#mailbox_store.name,
mailbox_info(#mailbox{name = MailBoxName},{UserName,DomainName},Args);
mailbox_info(MailBox,{UserName,DomainName}) -> mailbox_info(MailBox,{UserName,DomainName},all).
%%-------------------------------------------------------------------------
( tuple(),tuple(),Flags::list ( ) ) - > tuple ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
mailbox_info(MailBox,{UserName,DomainName},all) -> mailbox_info(MailBox,{UserName,DomainName},[exists,messages,unseen,recent,flags,permanentflags]);
mailbox_info(MailBox,{UserName,DomainName},[exists|T]) ->
case erlmail_store:select({MailBox#mailbox.name,{UserName,DomainName}}) of
[] -> mailbox_info(MailBox,{UserName,DomainName},T);
MailBoxStore -> mailbox_info(MailBox#mailbox{exists=length(MailBoxStore#mailbox_store.messages)},{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[messages|T]) ->
case erlmail_store:select({MailBox#mailbox.name,{UserName,DomainName}}) of
[] -> mailbox_info(MailBox,{UserName,DomainName},T);
MailBoxStore -> mailbox_info(MailBox#mailbox{messages=length(MailBoxStore#mailbox_store.messages)},{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[unseen|T]) ->
case erlmail_store:unseen({MailBox#mailbox.name,UserName,DomainName}) of
{_Seen,Unseen} -> mailbox_info(MailBox#mailbox{unseen=length(Unseen)},{UserName,DomainName},T);
_ -> mailbox_info(MailBox,{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[recent|T]) ->
case erlmail_store:recent({MailBox#mailbox.name,UserName,DomainName}) of
{Recent,_NotRecent} when is_list(Recent) -> mailbox_info(MailBox#mailbox{recent=length(Recent)},{UserName,DomainName},T);
_ -> mailbox_info(MailBox,{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[flags|T]) ->
mailbox_info(MailBox#mailbox{flags=[answered,flagged,draft,deleted,seen]},{UserName,DomainName},T);
% @todo UIDNEXT
% @todo UIDVALIDITY
mailbox_info(MailBox,{UserName,DomainName},[_H|T]) ->
mailbox_info(MailBox,{UserName,DomainName},T);
mailbox_info(MailBox,{_UserName,_DomainName},[]) -> MailBox.
%%-------------------------------------------------------------------------
%% @hidden
%% @end
%%-------------------------------------------------------------------------
out(Command,State) -> io:format("~p ~p~n",[State#imapd_fsm.addr,Command]).
%%-------------------------------------------------------------------------
%% @hidden
%% @end
%%-------------------------------------------------------------------------
out(Command,Param,State) -> io:format("~p ~p ~p~n",[State#imapd_fsm.addr,Command,Param]).
%%-------------------------------------------------------------------------
( Line::string(),State::imapd_fsm ( ) ) - > imap_cmd ( )
%% @type imap_cmd() = {imap_cmd,line(),tag(),comamnd(),cmd_data()}
%% @type line() = string()
%% @type command() = atom()
%% @type tag() = atom()
%% @type cmd_data() = term()
%% @doc Takes a command line from the connected IMAP client and parses the
%% it into an imap_cmd{} record
%% @todo parse DATA differently depending on command
%% @end
%%-------------------------------------------------------------------------
parse(Line,State) ->
case split_at(Line,32) of
{Tag,[]} ->
Command = [],
Data = [];
{Tag,Rest} ->
case split_at(Rest,32) of
{Command,[]} ->
Data = [];
{Command,Data} -> ok
end
end,
NextData = case to_lower_atom(Command,true) of
Cmd = login -> clean(split_at(Data,32));
Cmd = select -> clean(inbox(Data));
Cmd = create -> clean(inbox(Data));
Cmd = delete -> clean(inbox(Data));
Cmd = rename ->
{Src,Dst} = re_split(Data),
{clean(Src),clean(Dst)};
Cmd = subscribe -> clean(inbox(Data));
Cmd = unsubscribe -> clean(inbox(Data));
Cmd = status ->
{MailBox,Flags} = re_split(Data),
{clean(inbox(MailBox)),Flags};
Cmd = store ->
{Seq,FlagData} = imapd_util:split_at(Data),
{Action,Flags} = imapd_util:split_at(FlagData),
{seq_to_list(Seq), to_lower_atom(Action), list_to_flags(Flags)};
Cmd = list ->
{Ref,MailBox} = re_split(Data),
{Ref,clean(MailBox)};
Cmd = lsub ->
{Ref,MailBox} = re_split(Data),
{Ref,clean(MailBox)};
Cmd = fetch ->
{Seq,NameString} = clean(split_at(Data)),
{seq_to_list(Seq),imapd_fetch:tokens(NameString)};
Cmd = copy ->
{Seq,MailBoxName} = clean(re_split(Data)),
{seq_to_list(Seq),MailBoxName};
Cmd = uid ->
{TypeString,Args} = clean(split_at(Data)),
Type = to_lower_atom(TypeString),
case Type of
fetch ->
{Seq,MessageData} = clean(split_at(Args)),
{fetch,uidseq_to_list(Seq,State),imapd_fetch:tokens(MessageData)};
copy -> {copy,Args};
store ->
{Seq,ItemFlags} = clean(split_at(Args)),
{ItemName,Flags} = clean(split_at(ItemFlags)),
{store,uidseq_to_list(Seq,State),to_lower_atom(ItemName),list_to_flags(Flags)};
_ -> []
end;
Cmd -> Data
end,
#imap_cmd{
line = Line,
tag = list_to_atom(string:strip(Tag)),
cmd = Cmd,
data = NextData}.
parse_addresses(#addr{} = Addr) -> parse_addresses([Addr],[]);
parse_addresses([#addr{} = _H |_Rest] = List) -> parse_addresses(List,[]);
parse_addresses(String) -> parse_addresses(string:tokens(String,[44]),[]).
parse_addresses([],Acc) -> lists:reverse(Acc);
parse_addresses([#addr{} = H|T],Acc) ->
parse_addresses(T,[#address{addr_name=H#addr.description, addr_mailbox = H#addr.username, addr_host = H#addr.domainname} | Acc]);
parse_addresses([H|T],Acc) ->
case regexp:split(H,"[<>@\"]") of
{ok,[_,PersonalName,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_name=PersonalName, addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[_,PersonalName,_,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_name=PersonalName, addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[_,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[MailBoxName,HostName]} ->
parse_addresses(T,[#address{addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{error,Reason} -> {error,Reason};
Other -> Other
end.
%%-------------------------------------------------------------------------
@spec ( String::string ( ) ) - > string ( )
%% @doc Determines if the given string needs to have Double Quotes
%% around it or not
%% @end
%%-------------------------------------------------------------------------
quote([]) -> [34,34];
quote(String) -> quote(String,optional).
%%-------------------------------------------------------------------------
%% @spec (String::string(),Options::quoteoptions()) -> string()
%% @type quoteoptions() = true | false | optional
%% @doc Determines if the given string needs to have Double Quotes
%% around it or not based on the given options. Default = false
%% @end
%%-------------------------------------------------------------------------
quote(Atom,Boolean) when is_atom(Atom) -> quote(atom_to_list(Atom),Boolean);
quote("NIL",_) -> "NIL";
quote(String,true) -> [34] ++ String ++ [34];
quote(String,optional) ->
case string:chr(String,32) of
0 -> String;
_ -> [34] ++ String ++ [34]
end;
quote(String,false) -> String;
quote(String,_UnKnown) -> String.
%%-------------------------------------------------------------------------
%% @spec response(imap_resp()) -> string()
%% @type imap_resp() = {imap_resp,record}
%% @doc Take an #imap_resp{} record and returns a response string
%% @end
%%-------------------------------------------------------------------------
response(Resp) when is_record(Resp,imap_resp) -> response(Resp,[]).
%%-------------------------------------------------------------------------
@spec response(imap_resp(),list ( ) ) - > string ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
TAG
response(#imap_resp{_ = []} = _Resp,Acc) -> string:strip(lists:flatten(lists:reverse(Acc)));
response(#imap_resp{tag = Tag} = Resp,Acc) when is_atom(Tag), Tag /= [] ->
response(Resp#imap_resp{tag = []},[32,atom_to_list(Tag)|Acc]);
response(#imap_resp{tag = Tag} = Resp,Acc) when is_list(Tag), Tag /= [] ->
response(Resp#imap_resp{tag = []},[32,Tag|Acc]);
%% STATUS
response(#imap_resp{status = Status} = Resp,Acc) when is_atom(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,http_util:to_upper(atom_to_list(Status))|Acc]);
response(#imap_resp{status = Status} = Resp,Acc) when is_list(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,http_util:to_upper(Status)|Acc]);
response(#imap_resp{status = Status} = Resp,Acc) when is_integer(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,integer_to_list(Status)|Acc]);
%% CODE
response(#imap_resp{code = Integer} = Resp,Acc) when is_integer(Integer) ->
response(Resp#imap_resp{code = []},[32,integer_to_list(Integer)|Acc]);
response(#imap_resp{code = trycreate} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"TRYCREATE",91|Acc]);
response(#imap_resp{code = {capability,Capability}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,Capability,91|Acc]);
response(#imap_resp{code = {unseen,UnSeen}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UnSeen),32,"UNSEEN",91|Acc]);
response(#imap_resp{code = {uidvalidity,UIDValidity}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UIDValidity),32,"UIDVALIDITY",91|Acc]);
response(#imap_resp{code = {uidnext,UIDNext}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UIDNext),32,"UIDNEXT",91|Acc]);
response(#imap_resp{code = {permanentflags,PermanentFlags}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,flags_resp(PermanentFlags),32,"PERMANENTFLAGS",91|Acc]);
response(#imap_resp{code = 'read-write'} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"READ-WRITE",91|Acc]);
response(#imap_resp{code = 'read-only'} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"READ-ONLY",91|Acc]);
response(#imap_resp{code = Code} = Resp,Acc) when is_list(Code), Code /= [] ->
response(Resp#imap_resp{code = []},[32,93,[],91|Acc]);
%% CMD
response(#imap_resp{cmd = Cmd} = Resp,Acc) when is_atom(Cmd), Cmd /= [] ->
response(Resp#imap_resp{cmd = []},[32,http_util:to_upper(atom_to_list(Cmd))|Acc]);
response(#imap_resp{cmd = Cmd} = Resp,Acc) when is_list(Cmd), Cmd /= [] ->
response(Resp#imap_resp{cmd = []},[32,http_util:to_upper(Cmd)|Acc]);
%% DATA
response(#imap_resp{data = {flags,Flags}} = Resp,Acc) ->
response(Resp#imap_resp{data = []},[32,flags_resp(Flags)|Acc]);
response(#imap_resp{data = {status,MailBoxName,Info}} = Resp,Acc) ->
response(Resp#imap_resp{data = []},[32,status_resp(Info),32,MailBoxName|Acc]);
response(#imap_resp{data = {list,Flags}} = Resp,Acc) ->
Data = flags_resp(Flags),
response(Resp#imap_resp{data = []},[32,Data|Acc]);
response(#imap_resp{data = {lsub,Flags}} = Resp,Acc) ->
Data = flags_resp(Flags),
response(Resp#imap_resp{data = []},[32,Data|Acc]);
response(#imap_resp{data = Data} = Resp,Acc) when is_list(Data), Data /= [] ->
response(Resp#imap_resp{data = []},[32,41,[],40|Acc]);
%% INFO
response(#imap_resp{info = {list,Heirachy,Name}} = Resp,Acc) ->
Info = [quote(Heirachy,true),32,quote(Name,true)],
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(#imap_resp{info = {lsub,Heirachy,Name}} = Resp,Acc) ->
Info = [quote(Heirachy,true),32,quote(Name,true)],
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(#imap_resp{info = Info} = Resp,Acc) when is_list(Info), Info /= [] ->
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(Resp,Acc) ->
?D({Resp,Acc}),
{error,unkown_response}.
%%-------------------------------------------------------------------------
@spec ( String::string ( ) ) - > { string(),string ( ) }
%% @doc Finds space to break string when double quotes strings are found
%% @end
%%-------------------------------------------------------------------------
re_split(String) -> re_split(String,"^(\"[^\"]*\")",32,34).
%%-------------------------------------------------------------------------
( String::string(),RegExp::string(),Space::integer(),Quote::integer ( ) ) - > { string(),string ( ) }
%% @hidden
%% @end
%%-------------------------------------------------------------------------
re_split(String,RegExp,Space,Quote) ->
{One,Two} = case string:chr(String, Space) of
0 -> {String,[]};
Pos ->
case string:chr(String, Quote) of
0 ->
case lists:split(Pos,String) of
{O,T} -> {O,T};
Other -> Other
end;
_ ->
case regexp:match(String,RegExp) of
{match,Start,Length} when Start + Length >= length(String) ->
lists:split(Pos,String);
{match,Start,Length} when Start < Pos ->
case lists:prefix([34,34],String) of
true ->
{_O,T} = lists:split(3,String),
{[],T};
false -> lists:split(Start + Length,String)
end;
nomatch ->
case lists:split(Pos,String) of
{O,T} ->
case regexp:match(T,RegExp) of
{match,_,_} -> {O,imapd_util:clean(T)};
nomatch -> {O,T}
end;
Other -> Other
end
end
end
end,
{imapd_util:clean(One),Two}.
%%-------------------------------------------------------------------------
%% @spec (Message::string(),Socket::port()) -> ok | {error,string()}
%% @doc Sends a Message to Socket adds CRLF if needed.
%% @end
%%-------------------------------------------------------------------------
send([],_State) -> ok;
send(Resp,State) when is_record(Resp,imap_resp) -> send(response(Resp),State);
send([#imap_resp{}|_Rest] = RespList,State) when is_list(RespList) ->
Msg = lists:flatten(lists:map(fun(R) ->
M = imapd_util:response(R#imap_resp{pid= [], timestamp = []}),
[M,13,10]
end,RespList)),
send(Msg,State);
send(Msg,State) when is_record(State,imapd_fsm) -> send(Msg,State#imapd_fsm.socket);
send(Message,Socket) ->
Msg = case string:right(Message,2) of
?CRLF -> [Message];
_ -> [Message,?CRLF]
end,
gen_tcp:send(Socket,Msg).
%%-------------------------------------------------------------------------
%% @spec (Sequence::string()) -> list()
%% @doc Converts an IMAP sequence string into a lsit of intgers
%% @end
%%-------------------------------------------------------------------------
seq_to_list([I|_] = Seq) when is_integer(I) -> seq_to_list(string:tokens(Seq,","));
seq_to_list(Seq) -> seq_to_list(Seq,[]).
%%-------------------------------------------------------------------------
@spec ( Sequence::string(),list ( ) ) - > list ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
seq_to_list([],Acc) -> lists:usort(lists:flatten(lists:reverse(Acc)));
seq_to_list([H|T],Acc) ->
case catch list_to_integer(H) of
Int when is_integer(Int) -> seq_to_list(T,[Int|Acc]);
_ ->
[S,E] = string:tokens(H,":"),
Start = list_to_integer(S),
End = list_to_integer(E), % use * as a 'get rest' command
seq_to_list(T,[lists:seq(Start,End)|Acc])
end.
%%-------------------------------------------------------------------------
( Seq::list(),MailBox::mailbox_store ( ) ) - > MessageNameList::list ( )
%% @doc Retrieves the message name from #mailbox_store for each number in
%% the given sequence.
%% @end
%%-------------------------------------------------------------------------
seq_message_names(Seq,MailBox) ->
lists:map(fun(N) ->
lists:nth(N,MailBox#mailbox_store.messages)
end,Seq).
%%-------------------------------------------------------------------------
( ) ) - > { string(),string ( ) }
@doc Splits the given string into two strings at the first SPACE ( chr(32 ) )
%% @end
%%-------------------------------------------------------------------------
split_at(String) -> split_at(String,32).
%%-------------------------------------------------------------------------
%% @spec split_at(String::string(),Chr::char()) -> {string(),string()}
@doc Splits the given string into two strings at the first instace of Chr
%% @end
%%-------------------------------------------------------------------------
split_at(String,Chr) ->
case string:chr(String, Chr) of
0 -> {String,[]};
Pos ->
case lists:split(Pos,String) of
{One,Two} -> {string:strip(One),Two};
Other -> Other
end
end.
%%-------------------------------------------------------------------------
%% @spec status_flags(string()) -> list()
%% @doc Takes a string os status requests and returns a list of
%% status requests
%% @end
%%-------------------------------------------------------------------------
status_flags(String) ->
Tokens = string:tokens(String," ()"),
lists:map(fun(S) -> to_lower_atom(S) end,Tokens).
%%-------------------------------------------------------------------------
@spec status_info(MailBoxInfo::tuple(),List::list ( ) ) - > list ( )
%% @doc Takes a list of status flags or a status string and returns
%% information for each requested flag
%% @end
%%-------------------------------------------------------------------------
status_info(MailBoxInfo,[H|_] = List) when is_integer(H) -> status_info(MailBoxInfo,status_flags(List));
status_info(MailBoxInfo,[H|_] = List) when is_atom(H) -> status_info(MailBoxInfo,List,[]).
%%-------------------------------------------------------------------------
( ) ) - > list ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
status_info(#mailbox{messages = Messages} = MailBoxInfo,[messages|T],Acc) ->
status_info(MailBoxInfo,T,[{messages, Messages}|Acc]);
status_info(#mailbox{recent = Recent} = MailBoxInfo,[recent|T],Acc) ->
status_info(MailBoxInfo,T,[{recent, Recent}|Acc]);
status_info(#mailbox{uidnext = UIDNext} = MailBoxInfo,[uidnext|T],Acc) ->
status_info(MailBoxInfo,T,[{uidnext, UIDNext}|Acc]);
status_info(#mailbox{uidvalidity = UIDValidity} = MailBoxInfo,[uidvalidity|T],Acc) ->
status_info(MailBoxInfo,T,[{uidvalidity, UIDValidity}|Acc]);
status_info(#mailbox{unseen = UnSeen} = MailBoxInfo,[unseen|T],Acc) ->
status_info(MailBoxInfo,T,[{unseen, UnSeen}|Acc]);
status_info(_MailBoxInfo,[],Acc) -> lists:reverse(Acc).
%%-------------------------------------------------------------------------
%% @spec status_resp(list()) -> string()
%% @doc Takes a list of status information and returns a response string
%% @end
%%-------------------------------------------------------------------------
status_resp([]) -> "()";
status_resp(List) -> status_resp(List,[]).
%%-------------------------------------------------------------------------
@spec status_resp(list(),list ( ) ) - > string ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
status_resp([{Type,Info}|T],Acc) ->
status_resp(T,[32,integer_to_list(Info),32,http_util:to_upper(atom_to_list(Type))|Acc]);
status_resp([],Acc) -> "(" ++ string:strip(lists:flatten(lists:reverse(Acc))) ++ ")".
store(Messages,State,'flags',Flags) ->
store_flags(Messages,State,replace,Flags);
store(Messages,State,'flags.silent',Flags) ->
store_flags(Messages,State,replace,Flags),
[];
store(Messages,State,'+flags',Flags) ->
store_flags(Messages,State,add,Flags);
store(Messages,State,'+flags.silent',Flags) ->
store_flags(Messages,State,add,Flags),
[];
store(Messages,State,'-flags',Flags) ->
store_flags(Messages,State,delete,Flags);
store(Messages,State,'-flags.silent',Flags) ->
store_flags(Messages,State,delete,Flags),
[];
store(_Messages,_State,Action,Flags) ->
?D({"Unknown Store Action: ",Action,Flags}),
[].
store_flags(Messages,State,Action,Flags) ->
Resp = lists:map(fun(MessageName) ->
Message = case MessageName of
{Name,_UID} -> gen_store:select(message,Name,State);
MessageName -> gen_store:select(message,MessageName,State)
end,
NewMessage = lists:foldl(fun(Flag,Acc) ->
flags(Action,Flag,Acc)
end,Message,Flags),
gen_store:update(NewMessage,State),
imapd_fetch:fetch([MessageName],[#imap_fetch_cmd{name=flags,string="FLAGS"}],State)
end,Messages),
lists:flatten(Resp).
flags(add,Flag,Message) ->
Flags = Message#message.flags,
Message#message{flags = lists:usort([Flag|Flags])};
flags(replace,Flags,Message) ->
Flags = Message#message.flags,
NewFlags = case lists:member(recent,Flags) of
true -> [recent|Flags];
false -> Flags
end,
Message#message{flags = lists:usort(NewFlags)};
flags(delete,Flag,Message) ->
Flags = Message#message.flags,
NewFlags = lists:delete(Flag,Flags),
Message#message{flags = lists:usort(NewFlags)}.
%%-------------------------------------------------------------------------
( ( ) ) - > list ( )
%% @doc Converts an IMAP sequence string into a lsit of intgers
%% @end
%%-------------------------------------------------------------------------
uidseq_to_list([I|_] = Seq,State) when is_integer(I) -> uidseq_to_list(string:tokens(Seq,","),State);
uidseq_to_list(Seq,State) -> uidseq_to_list(Seq,State,[]).
%%-------------------------------------------------------------------------
( Sequence::string(),State::imapd_fsm(),list ( ) ) - > list ( )
%% @hidden
%% @end
%%-------------------------------------------------------------------------
uidseq_to_list([],_State,Acc) -> lists:usort(lists:flatten(lists:reverse(Acc)));
uidseq_to_list([H|T],State,Acc) ->
case catch list_to_integer(H) of
Int when is_integer(Int) -> seq_to_list(T,[Int|Acc]);
_ ->
[S,E] = string:tokens(H,":"),
Start = list_to_integer(S),
End = case E of
"*" ->
MailBox = State#imapd_fsm.mailbox,
MailBox#mailbox_store.uidnext - 1;
'*' ->
MailBox = State#imapd_fsm.mailbox,
MailBox#mailbox_store.uidnext - 1;
E when is_list(E) -> list_to_integer(E)
end,
if
Start =< End -> uidseq_to_list(T,State,[lists:seq(Start,End)|Acc]);
true -> uidseq_to_list(T,State,Acc)
end
end.
Important to complete UID command
uidseq_message_names(UIDSeq,MailBox) ->
{_MailBoxName,UserName,DomainName} = MailBox#mailbox_store.name,
Store = erlmail_util:get_app_env(store_type_message,mnesia_store),
Messages = lists:foldl(fun(MessageName,Acc) ->
Message = Store:select({MessageName,UserName,DomainName}),
case lists:member(Message#message.uid,UIDSeq) of
true -> [{MessageName,Message#message.uid}|Acc];
false -> Acc
end
end,[],MailBox#mailbox_store.messages),
lists:keysort(2,Messages).
%%-------------------------------------------------------------------------
@spec ( String::string ( ) ) - > string ( )
%% @doc Removes Double Quotes and white space from both sides of a string
%% @end
%%-------------------------------------------------------------------------
unquote(String) ->
S2 = string:strip(String,both,32),
string:strip(S2,both,34). | null | https://raw.githubusercontent.com/archaelus/erlmail/fe69b0e936f1512b4f349666e56c31c0af7b672c/src/imapd_util.erl | erlang | ---------------------------------------------------------------------------------------
@doc IMAP server utility functions
@reference See <a href="" target="_top">ErlMail Google Code Repository</a> for more information
@version 0.0.6
@end
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---------------------------------------------------------------------------------------
-------------------------------------------------------------------------
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc copies existing messages to Dest mailbox
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Takes a list of flags and returns a response string.
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Check if capability data should be returned in greeting.
Default: false
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Gets Heirarchy chara from config file.
Default: "/"
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec (MailBoxName::string()) -> string()
begining of the mailbox name
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Converts a list of integers into an IMAP sequence
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Converts a Flag string into a list of flags
@end
-------------------------------------------------------------------------
" ()\"
-------------------------------------------------------------------------
@spec (tuple()) -> tuple()
@doc Takes a #mailbox_store{} record and returns all information in a
#mailbox{} record
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
@todo UIDNEXT
@todo UIDVALIDITY
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@type imap_cmd() = {imap_cmd,line(),tag(),comamnd(),cmd_data()}
@type line() = string()
@type command() = atom()
@type tag() = atom()
@type cmd_data() = term()
@doc Takes a command line from the connected IMAP client and parses the
it into an imap_cmd{} record
@todo parse DATA differently depending on command
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Determines if the given string needs to have Double Quotes
around it or not
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec (String::string(),Options::quoteoptions()) -> string()
@type quoteoptions() = true | false | optional
@doc Determines if the given string needs to have Double Quotes
around it or not based on the given options. Default = false
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec response(imap_resp()) -> string()
@type imap_resp() = {imap_resp,record}
@doc Take an #imap_resp{} record and returns a response string
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
STATUS
CODE
CMD
DATA
INFO
-------------------------------------------------------------------------
@doc Finds space to break string when double quotes strings are found
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec (Message::string(),Socket::port()) -> ok | {error,string()}
@doc Sends a Message to Socket adds CRLF if needed.
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec (Sequence::string()) -> list()
@doc Converts an IMAP sequence string into a lsit of intgers
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
use * as a 'get rest' command
-------------------------------------------------------------------------
@doc Retrieves the message name from #mailbox_store for each number in
the given sequence.
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec split_at(String::string(),Chr::char()) -> {string(),string()}
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec status_flags(string()) -> list()
@doc Takes a string os status requests and returns a list of
status requests
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Takes a list of status flags or a status string and returns
information for each requested flag
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@spec status_resp(list()) -> string()
@doc Takes a list of status information and returns a response string
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Converts an IMAP sequence string into a lsit of intgers
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@hidden
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc Removes Double Quotes and white space from both sides of a string
@end
-------------------------------------------------------------------------
| @author < > [ ]
2006 - 2007 Simple Enigma , Inc. All Rights Reserved .
@reference See < a href=" / modules / erlmail " target="_top">Erlang Software Framework</a > for more information
@since 0.0.6
The MIT License
Copyright ( c ) 2007 , Simple Enigma , Inc. All Righs Reserved
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(imapd_util).
-author('').
-include("../include/imap.hrl").
-include("../include/erlmail.hrl").
-include("../include/mime.hrl").
-export([clean/1,copy/3,expunge/1]).
-export([flags_resp/1,flags_resp/2]).
-export([greeting/0,greeting_capability/0]).
-export([heirachy_char/0,inbox/1]).
-export([mailbox_info/1,mailbox_info/2,mailbox_info/3]).
-export([out/2,out/3,send/2]).
-export([parse/2,parse_addresses/1]).
-export([quote/1,quote/2,unquote/1]).
-export([response/1,re_split/1,re_split/4]).
-export([split_at/1,split_at/2]).
-export([status_flags/1,status_resp/1,status_info/2]).
-export([seq_to_list/1,list_to_seq/1,seq_message_names/2,uidseq_message_names/2,uidseq_to_list/2]).
-export([store/4]).
-export([list_to_flags/1]).
( ) ) - > string ( )
@doc Removes whitespace and Doule Quotes from a string .
clean({UserName,Password}) ->
{clean(UserName),clean(Password)};
clean(String) ->
S = string:strip(String,both,32),
S2 = string:strip(S,both,34),
string:strip(S2,both,32).
@spec ( Dest::mailbox_store(),Messages::list(),State::imapd_fsm ( ) ) - > NewDest::mailbox_store ( )
copy(Dest,Messages,#imapd_fsm{user = User} = _State) ->
MessageStore = erlmail_util:get_app_env(store_type_message,mnesia_store),
MailBoxStore = erlmail_util:get_app_env(store_type_mailbox_store,mnesia_store),
{UserName,DomainName} = User#user.name,
NewDest = lists:foldl(fun(MessageName,MailBox) ->
M = MessageStore:select({MessageName,UserName,DomainName}),
NewName = MessageStore:message_name(now()),
NewM = M#message{
name = {NewName,UserName,DomainName},
uid = MailBox#mailbox_store.uidnext,
flags = lists:usort([recent|M#message.flags])},
MessageStore:insert(NewM),
MailBox#mailbox_store{messages=lists:usort([NewName|MailBox#mailbox_store.messages]),
uidnext = MailBox#mailbox_store.uidnext + 1}
end,Dest,Messages),
MailBoxStore:update(NewDest),
NewDest.
( ( ) ) - > { NewMailBox::mailbox_store(),Respones::list ( ) }
@doc Permanently removes all messages with DELETED flag from MailBox
expunge(MailBox) when is_record(MailBox,mailbox_store) ->
Store = erlmail_util:get_app_env(store_type_message,mnesia_store),
{_MailBoxName,UserName,DomainName} = MailBox#mailbox_store.name,
{Messages,Responses,_Position} = lists:foldl(fun(MessageName,{M,R,Pos}) ->
Message = Store:select({MessageName,UserName,DomainName}),
case lists:member(deleted,Message#message.flags) of
true ->
Store:delete(Message),
Resp = #imap_resp{tag = '*', code = Pos, cmd = expunge},
{M,[Resp|R],Pos};
false -> {[MessageName|M],R,Pos + 1}
end
end,{[],[],1},MailBox#mailbox_store.messages),
{MailBox#mailbox_store{messages=lists:usort(Messages)},lists:reverse(Responses)}.
@spec flags_resp(list ( ) ) - > string ( )
flags_resp([]) -> "()";
flags_resp(List) -> flags_resp(List,[]).
@spec flags_resp(list(),list ( ) ) - > string ( )
flags_resp([H|T],Acc) when is_atom(H) ->
flags_resp(T,[http_util:to_upper(atom_to_list(H)),92,32|Acc]);
flags_resp([],Acc) -> "(" ++ string:strip(lists:flatten(lists:reverse(Acc))) ++ ")".
( ) - > string ( )
@doc Returns IMAP greeting string from config file or uses .
greeting() ->
case erlmail_util:get_app_env(server_imap_greeting,"ErlMail IMAP4 Server ready") of
[] -> "ErlMail IMAP4 Server ready";
Greeting -> Greeting
end.
( ) - > bool ( )
greeting_capability() ->
case erlmail_util:get_app_env(server_imap_greeting_capability,false) of
true -> true;
_ -> false
end.
( ) - > string ( )
heirachy_char() -> erlmail_util:get_app_env(server_imap_hierarchy,"/").
@doc Make sure that the string INBOX is always capitalized at the
@todo work with longer mailbox names that start with INBOX
inbox(MailBoxName) ->
case to_lower_atom(MailBoxName) of
inbox -> "INBOX";
_ -> MailBoxName
end.
( ( ) ) - > string ( )
list_to_seq(List) -> list_to_seq(List,0,[]).
( list(),integer(),list ( ) ) - > string ( )
list_to_seq([],_,Acc) -> lists:flatten(lists:reverse(Acc));
list_to_seq([H],Start,Acc) when is_integer(H), Start > 0 ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(H),
list_to_seq([],0,[String|Acc]);
list_to_seq([H],_,Acc) when is_integer(H) ->
list_to_seq([],0,[integer_to_list(H)|Acc]);
list_to_seq([H|[I|_] = T],Start,Acc) when H == I - 1, Start == 0 ->
list_to_seq(T,H,Acc);
list_to_seq([H|[I] = _T],Start,Acc) when H == I - 1, is_integer(I) ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(I),
list_to_seq([],Start,[String|Acc]);
list_to_seq([H|[I|_J] = T],Start,Acc) when H == I - 1 ->
list_to_seq(T,Start,Acc);
list_to_seq([H|[I|_J] = T],Start,Acc) when H /= I - 1, Start > 0 ->
String = integer_to_list(Start) ++ ":" ++ integer_to_list(H),
list_to_seq(T,0,[44,String|Acc]);
list_to_seq([H|[_I|_] = T],_Start,Acc) ->
list_to_seq(T,0,[44,integer_to_list(H)|Acc]).
@spec ( String::string ( ) ) - > list ( )
list_to_flags(String) ->
lists:map(fun(T) ->
to_lower_atom(T)
end,Tokens).
to_lower_atom(String) -> to_lower_atom(String,false).
to_lower_atom(String,false) when is_list(String) -> list_to_atom(http_util:to_lower(String));
to_lower_atom(String,true) when is_list(String) -> list_to_atom(http_util:to_lower(string:strip(String))).
mailbox_info(MailBoxStore) -> mailbox_info(MailBoxStore,all).
( tuple(),Flags::list ( ) ) - > tuple ( )
@doc Takes a # mailbox_store { } record and returns information from Flags
in a # mailbox { } record
mailbox_info(MailBoxStore,Args) when is_record(MailBoxStore,mailbox_store) ->
{MailBoxName,UserName,DomainName} = MailBoxStore#mailbox_store.name,
mailbox_info(#mailbox{name = MailBoxName},{UserName,DomainName},Args);
mailbox_info(MailBox,{UserName,DomainName}) -> mailbox_info(MailBox,{UserName,DomainName},all).
( tuple(),tuple(),Flags::list ( ) ) - > tuple ( )
mailbox_info(MailBox,{UserName,DomainName},all) -> mailbox_info(MailBox,{UserName,DomainName},[exists,messages,unseen,recent,flags,permanentflags]);
mailbox_info(MailBox,{UserName,DomainName},[exists|T]) ->
case erlmail_store:select({MailBox#mailbox.name,{UserName,DomainName}}) of
[] -> mailbox_info(MailBox,{UserName,DomainName},T);
MailBoxStore -> mailbox_info(MailBox#mailbox{exists=length(MailBoxStore#mailbox_store.messages)},{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[messages|T]) ->
case erlmail_store:select({MailBox#mailbox.name,{UserName,DomainName}}) of
[] -> mailbox_info(MailBox,{UserName,DomainName},T);
MailBoxStore -> mailbox_info(MailBox#mailbox{messages=length(MailBoxStore#mailbox_store.messages)},{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[unseen|T]) ->
case erlmail_store:unseen({MailBox#mailbox.name,UserName,DomainName}) of
{_Seen,Unseen} -> mailbox_info(MailBox#mailbox{unseen=length(Unseen)},{UserName,DomainName},T);
_ -> mailbox_info(MailBox,{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[recent|T]) ->
case erlmail_store:recent({MailBox#mailbox.name,UserName,DomainName}) of
{Recent,_NotRecent} when is_list(Recent) -> mailbox_info(MailBox#mailbox{recent=length(Recent)},{UserName,DomainName},T);
_ -> mailbox_info(MailBox,{UserName,DomainName},T)
end;
mailbox_info(MailBox,{UserName,DomainName},[flags|T]) ->
mailbox_info(MailBox#mailbox{flags=[answered,flagged,draft,deleted,seen]},{UserName,DomainName},T);
mailbox_info(MailBox,{UserName,DomainName},[_H|T]) ->
mailbox_info(MailBox,{UserName,DomainName},T);
mailbox_info(MailBox,{_UserName,_DomainName},[]) -> MailBox.
out(Command,State) -> io:format("~p ~p~n",[State#imapd_fsm.addr,Command]).
out(Command,Param,State) -> io:format("~p ~p ~p~n",[State#imapd_fsm.addr,Command,Param]).
( Line::string(),State::imapd_fsm ( ) ) - > imap_cmd ( )
parse(Line,State) ->
case split_at(Line,32) of
{Tag,[]} ->
Command = [],
Data = [];
{Tag,Rest} ->
case split_at(Rest,32) of
{Command,[]} ->
Data = [];
{Command,Data} -> ok
end
end,
NextData = case to_lower_atom(Command,true) of
Cmd = login -> clean(split_at(Data,32));
Cmd = select -> clean(inbox(Data));
Cmd = create -> clean(inbox(Data));
Cmd = delete -> clean(inbox(Data));
Cmd = rename ->
{Src,Dst} = re_split(Data),
{clean(Src),clean(Dst)};
Cmd = subscribe -> clean(inbox(Data));
Cmd = unsubscribe -> clean(inbox(Data));
Cmd = status ->
{MailBox,Flags} = re_split(Data),
{clean(inbox(MailBox)),Flags};
Cmd = store ->
{Seq,FlagData} = imapd_util:split_at(Data),
{Action,Flags} = imapd_util:split_at(FlagData),
{seq_to_list(Seq), to_lower_atom(Action), list_to_flags(Flags)};
Cmd = list ->
{Ref,MailBox} = re_split(Data),
{Ref,clean(MailBox)};
Cmd = lsub ->
{Ref,MailBox} = re_split(Data),
{Ref,clean(MailBox)};
Cmd = fetch ->
{Seq,NameString} = clean(split_at(Data)),
{seq_to_list(Seq),imapd_fetch:tokens(NameString)};
Cmd = copy ->
{Seq,MailBoxName} = clean(re_split(Data)),
{seq_to_list(Seq),MailBoxName};
Cmd = uid ->
{TypeString,Args} = clean(split_at(Data)),
Type = to_lower_atom(TypeString),
case Type of
fetch ->
{Seq,MessageData} = clean(split_at(Args)),
{fetch,uidseq_to_list(Seq,State),imapd_fetch:tokens(MessageData)};
copy -> {copy,Args};
store ->
{Seq,ItemFlags} = clean(split_at(Args)),
{ItemName,Flags} = clean(split_at(ItemFlags)),
{store,uidseq_to_list(Seq,State),to_lower_atom(ItemName),list_to_flags(Flags)};
_ -> []
end;
Cmd -> Data
end,
#imap_cmd{
line = Line,
tag = list_to_atom(string:strip(Tag)),
cmd = Cmd,
data = NextData}.
parse_addresses(#addr{} = Addr) -> parse_addresses([Addr],[]);
parse_addresses([#addr{} = _H |_Rest] = List) -> parse_addresses(List,[]);
parse_addresses(String) -> parse_addresses(string:tokens(String,[44]),[]).
parse_addresses([],Acc) -> lists:reverse(Acc);
parse_addresses([#addr{} = H|T],Acc) ->
parse_addresses(T,[#address{addr_name=H#addr.description, addr_mailbox = H#addr.username, addr_host = H#addr.domainname} | Acc]);
parse_addresses([H|T],Acc) ->
case regexp:split(H,"[<>@\"]") of
{ok,[_,PersonalName,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_name=PersonalName, addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[_,PersonalName,_,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_name=PersonalName, addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[_,MailBoxName,HostName,_]} ->
parse_addresses(T,[#address{addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{ok,[MailBoxName,HostName]} ->
parse_addresses(T,[#address{addr_mailbox = MailBoxName, addr_host = HostName}|Acc]);
{error,Reason} -> {error,Reason};
Other -> Other
end.
@spec ( String::string ( ) ) - > string ( )
quote([]) -> [34,34];
quote(String) -> quote(String,optional).
quote(Atom,Boolean) when is_atom(Atom) -> quote(atom_to_list(Atom),Boolean);
quote("NIL",_) -> "NIL";
quote(String,true) -> [34] ++ String ++ [34];
quote(String,optional) ->
case string:chr(String,32) of
0 -> String;
_ -> [34] ++ String ++ [34]
end;
quote(String,false) -> String;
quote(String,_UnKnown) -> String.
response(Resp) when is_record(Resp,imap_resp) -> response(Resp,[]).
@spec response(imap_resp(),list ( ) ) - > string ( )
TAG
response(#imap_resp{_ = []} = _Resp,Acc) -> string:strip(lists:flatten(lists:reverse(Acc)));
response(#imap_resp{tag = Tag} = Resp,Acc) when is_atom(Tag), Tag /= [] ->
response(Resp#imap_resp{tag = []},[32,atom_to_list(Tag)|Acc]);
response(#imap_resp{tag = Tag} = Resp,Acc) when is_list(Tag), Tag /= [] ->
response(Resp#imap_resp{tag = []},[32,Tag|Acc]);
response(#imap_resp{status = Status} = Resp,Acc) when is_atom(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,http_util:to_upper(atom_to_list(Status))|Acc]);
response(#imap_resp{status = Status} = Resp,Acc) when is_list(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,http_util:to_upper(Status)|Acc]);
response(#imap_resp{status = Status} = Resp,Acc) when is_integer(Status), Status /= [] ->
response(Resp#imap_resp{status = []},[32,integer_to_list(Status)|Acc]);
response(#imap_resp{code = Integer} = Resp,Acc) when is_integer(Integer) ->
response(Resp#imap_resp{code = []},[32,integer_to_list(Integer)|Acc]);
response(#imap_resp{code = trycreate} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"TRYCREATE",91|Acc]);
response(#imap_resp{code = {capability,Capability}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,Capability,91|Acc]);
response(#imap_resp{code = {unseen,UnSeen}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UnSeen),32,"UNSEEN",91|Acc]);
response(#imap_resp{code = {uidvalidity,UIDValidity}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UIDValidity),32,"UIDVALIDITY",91|Acc]);
response(#imap_resp{code = {uidnext,UIDNext}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,integer_to_list(UIDNext),32,"UIDNEXT",91|Acc]);
response(#imap_resp{code = {permanentflags,PermanentFlags}} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,flags_resp(PermanentFlags),32,"PERMANENTFLAGS",91|Acc]);
response(#imap_resp{code = 'read-write'} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"READ-WRITE",91|Acc]);
response(#imap_resp{code = 'read-only'} = Resp,Acc) ->
response(Resp#imap_resp{code = []},[32,93,"READ-ONLY",91|Acc]);
response(#imap_resp{code = Code} = Resp,Acc) when is_list(Code), Code /= [] ->
response(Resp#imap_resp{code = []},[32,93,[],91|Acc]);
response(#imap_resp{cmd = Cmd} = Resp,Acc) when is_atom(Cmd), Cmd /= [] ->
response(Resp#imap_resp{cmd = []},[32,http_util:to_upper(atom_to_list(Cmd))|Acc]);
response(#imap_resp{cmd = Cmd} = Resp,Acc) when is_list(Cmd), Cmd /= [] ->
response(Resp#imap_resp{cmd = []},[32,http_util:to_upper(Cmd)|Acc]);
response(#imap_resp{data = {flags,Flags}} = Resp,Acc) ->
response(Resp#imap_resp{data = []},[32,flags_resp(Flags)|Acc]);
response(#imap_resp{data = {status,MailBoxName,Info}} = Resp,Acc) ->
response(Resp#imap_resp{data = []},[32,status_resp(Info),32,MailBoxName|Acc]);
response(#imap_resp{data = {list,Flags}} = Resp,Acc) ->
Data = flags_resp(Flags),
response(Resp#imap_resp{data = []},[32,Data|Acc]);
response(#imap_resp{data = {lsub,Flags}} = Resp,Acc) ->
Data = flags_resp(Flags),
response(Resp#imap_resp{data = []},[32,Data|Acc]);
response(#imap_resp{data = Data} = Resp,Acc) when is_list(Data), Data /= [] ->
response(Resp#imap_resp{data = []},[32,41,[],40|Acc]);
response(#imap_resp{info = {list,Heirachy,Name}} = Resp,Acc) ->
Info = [quote(Heirachy,true),32,quote(Name,true)],
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(#imap_resp{info = {lsub,Heirachy,Name}} = Resp,Acc) ->
Info = [quote(Heirachy,true),32,quote(Name,true)],
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(#imap_resp{info = Info} = Resp,Acc) when is_list(Info), Info /= [] ->
response(Resp#imap_resp{info = []},[32,Info|Acc]);
response(Resp,Acc) ->
?D({Resp,Acc}),
{error,unkown_response}.
@spec ( String::string ( ) ) - > { string(),string ( ) }
re_split(String) -> re_split(String,"^(\"[^\"]*\")",32,34).
( String::string(),RegExp::string(),Space::integer(),Quote::integer ( ) ) - > { string(),string ( ) }
re_split(String,RegExp,Space,Quote) ->
{One,Two} = case string:chr(String, Space) of
0 -> {String,[]};
Pos ->
case string:chr(String, Quote) of
0 ->
case lists:split(Pos,String) of
{O,T} -> {O,T};
Other -> Other
end;
_ ->
case regexp:match(String,RegExp) of
{match,Start,Length} when Start + Length >= length(String) ->
lists:split(Pos,String);
{match,Start,Length} when Start < Pos ->
case lists:prefix([34,34],String) of
true ->
{_O,T} = lists:split(3,String),
{[],T};
false -> lists:split(Start + Length,String)
end;
nomatch ->
case lists:split(Pos,String) of
{O,T} ->
case regexp:match(T,RegExp) of
{match,_,_} -> {O,imapd_util:clean(T)};
nomatch -> {O,T}
end;
Other -> Other
end
end
end
end,
{imapd_util:clean(One),Two}.
send([],_State) -> ok;
send(Resp,State) when is_record(Resp,imap_resp) -> send(response(Resp),State);
send([#imap_resp{}|_Rest] = RespList,State) when is_list(RespList) ->
Msg = lists:flatten(lists:map(fun(R) ->
M = imapd_util:response(R#imap_resp{pid= [], timestamp = []}),
[M,13,10]
end,RespList)),
send(Msg,State);
send(Msg,State) when is_record(State,imapd_fsm) -> send(Msg,State#imapd_fsm.socket);
send(Message,Socket) ->
Msg = case string:right(Message,2) of
?CRLF -> [Message];
_ -> [Message,?CRLF]
end,
gen_tcp:send(Socket,Msg).
seq_to_list([I|_] = Seq) when is_integer(I) -> seq_to_list(string:tokens(Seq,","));
seq_to_list(Seq) -> seq_to_list(Seq,[]).
@spec ( Sequence::string(),list ( ) ) - > list ( )
seq_to_list([],Acc) -> lists:usort(lists:flatten(lists:reverse(Acc)));
seq_to_list([H|T],Acc) ->
case catch list_to_integer(H) of
Int when is_integer(Int) -> seq_to_list(T,[Int|Acc]);
_ ->
[S,E] = string:tokens(H,":"),
Start = list_to_integer(S),
seq_to_list(T,[lists:seq(Start,End)|Acc])
end.
( Seq::list(),MailBox::mailbox_store ( ) ) - > MessageNameList::list ( )
seq_message_names(Seq,MailBox) ->
lists:map(fun(N) ->
lists:nth(N,MailBox#mailbox_store.messages)
end,Seq).
( ) ) - > { string(),string ( ) }
@doc Splits the given string into two strings at the first SPACE ( chr(32 ) )
split_at(String) -> split_at(String,32).
@doc Splits the given string into two strings at the first instace of Chr
split_at(String,Chr) ->
case string:chr(String, Chr) of
0 -> {String,[]};
Pos ->
case lists:split(Pos,String) of
{One,Two} -> {string:strip(One),Two};
Other -> Other
end
end.
status_flags(String) ->
Tokens = string:tokens(String," ()"),
lists:map(fun(S) -> to_lower_atom(S) end,Tokens).
@spec status_info(MailBoxInfo::tuple(),List::list ( ) ) - > list ( )
status_info(MailBoxInfo,[H|_] = List) when is_integer(H) -> status_info(MailBoxInfo,status_flags(List));
status_info(MailBoxInfo,[H|_] = List) when is_atom(H) -> status_info(MailBoxInfo,List,[]).
( ) ) - > list ( )
status_info(#mailbox{messages = Messages} = MailBoxInfo,[messages|T],Acc) ->
status_info(MailBoxInfo,T,[{messages, Messages}|Acc]);
status_info(#mailbox{recent = Recent} = MailBoxInfo,[recent|T],Acc) ->
status_info(MailBoxInfo,T,[{recent, Recent}|Acc]);
status_info(#mailbox{uidnext = UIDNext} = MailBoxInfo,[uidnext|T],Acc) ->
status_info(MailBoxInfo,T,[{uidnext, UIDNext}|Acc]);
status_info(#mailbox{uidvalidity = UIDValidity} = MailBoxInfo,[uidvalidity|T],Acc) ->
status_info(MailBoxInfo,T,[{uidvalidity, UIDValidity}|Acc]);
status_info(#mailbox{unseen = UnSeen} = MailBoxInfo,[unseen|T],Acc) ->
status_info(MailBoxInfo,T,[{unseen, UnSeen}|Acc]);
status_info(_MailBoxInfo,[],Acc) -> lists:reverse(Acc).
status_resp([]) -> "()";
status_resp(List) -> status_resp(List,[]).
@spec status_resp(list(),list ( ) ) - > string ( )
status_resp([{Type,Info}|T],Acc) ->
status_resp(T,[32,integer_to_list(Info),32,http_util:to_upper(atom_to_list(Type))|Acc]);
status_resp([],Acc) -> "(" ++ string:strip(lists:flatten(lists:reverse(Acc))) ++ ")".
store(Messages,State,'flags',Flags) ->
store_flags(Messages,State,replace,Flags);
store(Messages,State,'flags.silent',Flags) ->
store_flags(Messages,State,replace,Flags),
[];
store(Messages,State,'+flags',Flags) ->
store_flags(Messages,State,add,Flags);
store(Messages,State,'+flags.silent',Flags) ->
store_flags(Messages,State,add,Flags),
[];
store(Messages,State,'-flags',Flags) ->
store_flags(Messages,State,delete,Flags);
store(Messages,State,'-flags.silent',Flags) ->
store_flags(Messages,State,delete,Flags),
[];
store(_Messages,_State,Action,Flags) ->
?D({"Unknown Store Action: ",Action,Flags}),
[].
store_flags(Messages,State,Action,Flags) ->
Resp = lists:map(fun(MessageName) ->
Message = case MessageName of
{Name,_UID} -> gen_store:select(message,Name,State);
MessageName -> gen_store:select(message,MessageName,State)
end,
NewMessage = lists:foldl(fun(Flag,Acc) ->
flags(Action,Flag,Acc)
end,Message,Flags),
gen_store:update(NewMessage,State),
imapd_fetch:fetch([MessageName],[#imap_fetch_cmd{name=flags,string="FLAGS"}],State)
end,Messages),
lists:flatten(Resp).
flags(add,Flag,Message) ->
Flags = Message#message.flags,
Message#message{flags = lists:usort([Flag|Flags])};
flags(replace,Flags,Message) ->
Flags = Message#message.flags,
NewFlags = case lists:member(recent,Flags) of
true -> [recent|Flags];
false -> Flags
end,
Message#message{flags = lists:usort(NewFlags)};
flags(delete,Flag,Message) ->
Flags = Message#message.flags,
NewFlags = lists:delete(Flag,Flags),
Message#message{flags = lists:usort(NewFlags)}.
( ( ) ) - > list ( )
uidseq_to_list([I|_] = Seq,State) when is_integer(I) -> uidseq_to_list(string:tokens(Seq,","),State);
uidseq_to_list(Seq,State) -> uidseq_to_list(Seq,State,[]).
( Sequence::string(),State::imapd_fsm(),list ( ) ) - > list ( )
uidseq_to_list([],_State,Acc) -> lists:usort(lists:flatten(lists:reverse(Acc)));
uidseq_to_list([H|T],State,Acc) ->
case catch list_to_integer(H) of
Int when is_integer(Int) -> seq_to_list(T,[Int|Acc]);
_ ->
[S,E] = string:tokens(H,":"),
Start = list_to_integer(S),
End = case E of
"*" ->
MailBox = State#imapd_fsm.mailbox,
MailBox#mailbox_store.uidnext - 1;
'*' ->
MailBox = State#imapd_fsm.mailbox,
MailBox#mailbox_store.uidnext - 1;
E when is_list(E) -> list_to_integer(E)
end,
if
Start =< End -> uidseq_to_list(T,State,[lists:seq(Start,End)|Acc]);
true -> uidseq_to_list(T,State,Acc)
end
end.
Important to complete UID command
uidseq_message_names(UIDSeq,MailBox) ->
{_MailBoxName,UserName,DomainName} = MailBox#mailbox_store.name,
Store = erlmail_util:get_app_env(store_type_message,mnesia_store),
Messages = lists:foldl(fun(MessageName,Acc) ->
Message = Store:select({MessageName,UserName,DomainName}),
case lists:member(Message#message.uid,UIDSeq) of
true -> [{MessageName,Message#message.uid}|Acc];
false -> Acc
end
end,[],MailBox#mailbox_store.messages),
lists:keysort(2,Messages).
@spec ( String::string ( ) ) - > string ( )
unquote(String) ->
S2 = string:strip(String,both,32),
string:strip(S2,both,34). |
e55d6e25e786991c9d0a087b78cb26f1a3f71c56f7df1a13196fee0b08735985 | bakul/s9fes | inet-server.scm | Scheme 9 from Empty Space , Unix Function Library
By , 2010
; Placed in the Public Domain
;
( inet - server procedure^5 ) = = > undefined
( inet - server procedure^5 integer ) = = > undefined
;
Create a server socket , bind it to host STRING1 , port STRING2 and
; listen for requests on that socket. When a request comes in, spawn
; a child process to handle the request. PROCEDURE^5 will be called
; by the child process. When the procedure returns, the child will
; close the connection and terminate.
;
; The following arguments are passed to PROCEDURE^5:
;
; IN an input-port for reading data from the connection
; OUT an output-port for sending data over the connection
PEER a list of the form ( IP PORT ) where IP is the remote
; IP address and PORT is the remote-side port of the
; connection
TIME the time of establishing the connection ( Unix timestamp )
DELTA the time span in seconds in which the most recent
; requests of *this* client were received; #F if the client
did not yet send NREQ resuests ; see below for details
;
When an INTEGER argument ( ) is passed to INET - SERVER , it will keep
track of the most recent requests received from each IP address .
When a client sends more than NREQ-1 requests , INET - SERVER will send
; the difference between the time of the most recent request and the
time of the oldest tracked request to PROCEDURE^5 in the DELTA argument .
; The request handler can use this value, for instance, to limit the
; number of requests per unit of time that it accepts from each client.
;
; (Example): (load-from-library "read-line.scm")
;
; (define (handle-echo-request in out . ignore)
; (display (read-line in) out)
; (newline out))
;
; (inet-server "localhost" "12345" handle-echo-request)
(require-extension sys-unix net-unix)
(load-from-library "hash-table.scm")
(load-from-library "remove.scm")
(define (inet-server host port request-handler . nreq)
(let ((hosts (make-hash-table))
(n-track (if (null? nreq)
5
(car nreq))))
(letrec
((cut-last!
(lambda (x)
(cond ((null? x))
((null? (cdr x)))
((null? (cddr x))
(set-cdr! x '()))
(else
(cut-last! (cdr x))))))
(collect-zombies
(lambda (procs)
(remp sys:waitpid procs))))
(let ((s (sys:inet-listen host port 5)))
(let connect-loop ((procs '()))
(let* ((conn (sys:inet-accept s))
(peer (sys:inet-getpeername conn))
(time (sys:time))
(hist (hash-table-ref hosts (and peer (car peer))))
(hist (if hist
(car hist)
'())))
(let* ((hist (if (< (length hist) n-track)
(cons time hist)
(begin (cut-last! hist)
(cons time hist))))
(delta (if (>= (length hist) n-track)
(- (car hist) (car (reverse hist)))
#f)))
(if peer
(hash-table-set! hosts (car peer) hist))
(if (> (hash-table-length hosts) 1000)
(set! hosts (make-hash-table)))
(set! procs (collect-zombies procs))
(let ((pid (sys:fork)))
(if (zero? pid)
(let ((in (sys:make-input-port conn))
(out (sys:make-output-port conn)))
(request-handler in out peer time delta)
(close-output-port out)
(close-input-port in)
(sys:exit 0))
(begin (sys:close conn)
(connect-loop (cons pid procs))))))))))))
| null | https://raw.githubusercontent.com/bakul/s9fes/74c14c0db5f07f5bc6d94131e9e4ee15a29275aa/ext/sys-unix/inet-server.scm | scheme | Placed in the Public Domain
listen for requests on that socket. When a request comes in, spawn
a child process to handle the request. PROCEDURE^5 will be called
by the child process. When the procedure returns, the child will
close the connection and terminate.
The following arguments are passed to PROCEDURE^5:
IN an input-port for reading data from the connection
OUT an output-port for sending data over the connection
IP address and PORT is the remote-side port of the
connection
requests of *this* client were received; #F if the client
see below for details
the difference between the time of the most recent request and the
The request handler can use this value, for instance, to limit the
number of requests per unit of time that it accepts from each client.
(Example): (load-from-library "read-line.scm")
(define (handle-echo-request in out . ignore)
(display (read-line in) out)
(newline out))
(inet-server "localhost" "12345" handle-echo-request) | Scheme 9 from Empty Space , Unix Function Library
By , 2010
( inet - server procedure^5 ) = = > undefined
( inet - server procedure^5 integer ) = = > undefined
Create a server socket , bind it to host STRING1 , port STRING2 and
PEER a list of the form ( IP PORT ) where IP is the remote
TIME the time of establishing the connection ( Unix timestamp )
DELTA the time span in seconds in which the most recent
When an INTEGER argument ( ) is passed to INET - SERVER , it will keep
track of the most recent requests received from each IP address .
When a client sends more than NREQ-1 requests , INET - SERVER will send
time of the oldest tracked request to PROCEDURE^5 in the DELTA argument .
(require-extension sys-unix net-unix)
(load-from-library "hash-table.scm")
(load-from-library "remove.scm")
(define (inet-server host port request-handler . nreq)
(let ((hosts (make-hash-table))
(n-track (if (null? nreq)
5
(car nreq))))
(letrec
((cut-last!
(lambda (x)
(cond ((null? x))
((null? (cdr x)))
((null? (cddr x))
(set-cdr! x '()))
(else
(cut-last! (cdr x))))))
(collect-zombies
(lambda (procs)
(remp sys:waitpid procs))))
(let ((s (sys:inet-listen host port 5)))
(let connect-loop ((procs '()))
(let* ((conn (sys:inet-accept s))
(peer (sys:inet-getpeername conn))
(time (sys:time))
(hist (hash-table-ref hosts (and peer (car peer))))
(hist (if hist
(car hist)
'())))
(let* ((hist (if (< (length hist) n-track)
(cons time hist)
(begin (cut-last! hist)
(cons time hist))))
(delta (if (>= (length hist) n-track)
(- (car hist) (car (reverse hist)))
#f)))
(if peer
(hash-table-set! hosts (car peer) hist))
(if (> (hash-table-length hosts) 1000)
(set! hosts (make-hash-table)))
(set! procs (collect-zombies procs))
(let ((pid (sys:fork)))
(if (zero? pid)
(let ((in (sys:make-input-port conn))
(out (sys:make-output-port conn)))
(request-handler in out peer time delta)
(close-output-port out)
(close-input-port in)
(sys:exit 0))
(begin (sys:close conn)
(connect-loop (cons pid procs))))))))))))
|
7bc09fe9578b6ec77bbd754fc9a7cb4379a8aef5d44f7dfef65e38016e42907a | mransan/raft-udp | raft_utl_logger.mli | (** Utilities around Lwt default logger *)
val start : basename:string -> interval:int -> unit -> unit Lwt.t
* [ start ~basename ~interval ( ) ] returns a perpetual thread which
implements a rotating Lwt logger every [ interval ] seconds . Only the 3
latest log file are kept . The log file name format is
< basename>_<timestamp>.log
implements a rotating Lwt logger every [interval] seconds. Only the 3
latest log file are kept. The log file name format is
<basename>_<timestamp>.log *)
| null | https://raw.githubusercontent.com/mransan/raft-udp/ffa307fa6d8bdaa3133f3cc66149ac7dfda5fc7c/src/utl/raft_utl_logger.mli | ocaml | * Utilities around Lwt default logger |
val start : basename:string -> interval:int -> unit -> unit Lwt.t
* [ start ~basename ~interval ( ) ] returns a perpetual thread which
implements a rotating Lwt logger every [ interval ] seconds . Only the 3
latest log file are kept . The log file name format is
< basename>_<timestamp>.log
implements a rotating Lwt logger every [interval] seconds. Only the 3
latest log file are kept. The log file name format is
<basename>_<timestamp>.log *)
|
69addf4f103ad8d5a922f0bda286e3895011db7dee8a4a99b1414d1c510dc19a | haskell/hackage-server | State.hs | # LANGUAGE DeriveDataTypeable , ,
TypeFamilies , TemplateHaskell #
TypeFamilies, TemplateHaskell #-}
module Distribution.Server.Features.AnalyticsPixels.State
( AnalyticsPixel(..)
, AnalyticsPixelsState(..)
, initialAnalyticsPixelsState
-- * State queries and updates
, AnalyticsPixelsForPackage(..)
, AddPackageAnalyticsPixel(..)
, RemovePackageAnalyticsPixel(..)
, GetAnalyticsPixelsState(..)
, ReplaceAnalyticsPixelsState(..)
) where
import Distribution.Package (PackageName)
import Distribution.Server.Framework.MemSize (MemSize)
import Distribution.Server.Users.State ()
import Data.Text (Text)
import Data.Typeable (Typeable)
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Set (Set)
import qualified Data.Set as Set
import Control.DeepSeq (NFData)
import qualified Control.Monad.State as State
import Control.Monad.Reader.Class (ask, asks)
newtype AnalyticsPixel = AnalyticsPixel
{
analyticsPixelUrl :: Text
}
deriving (Show, Eq, Ord, NFData, Typeable, MemSize)
newtype AnalyticsPixelsState = AnalyticsPixelsState
{
analyticsPixels :: Map PackageName (Set AnalyticsPixel)
}
deriving (Show, Eq, NFData, Typeable, MemSize)
SafeCopy instances
$(deriveSafeCopy 0 'base ''AnalyticsPixel)
$(deriveSafeCopy 0 'base ''AnalyticsPixelsState)
--
initialAnalyticsPixelsState :: AnalyticsPixelsState
initialAnalyticsPixelsState = AnalyticsPixelsState
{
analyticsPixels = Map.empty
}
analyticsPixelsForPackage :: PackageName -> Query AnalyticsPixelsState (Set AnalyticsPixel)
analyticsPixelsForPackage name = asks $ Map.findWithDefault Set.empty name . analyticsPixels
-- | Adds a 'AnalyticsPixel' to a 'Package'. Returns 'True' if the pixel was inserted, and 'False' if
-- the 'AnalyticsPixel' was already present.
addPackageAnalyticsPixel :: PackageName -> AnalyticsPixel -> Update AnalyticsPixelsState Bool
addPackageAnalyticsPixel name analyticsPixel = do
state <- State.get
let (successfullyInserted, pixels) = Map.alterF insertAnalyticsPixel name (analyticsPixels state)
State.put (state { analyticsPixels = pixels })
pure successfullyInserted
where
insertAnalyticsPixel :: Maybe (Set AnalyticsPixel) -> (Bool, Maybe (Set AnalyticsPixel))
insertAnalyticsPixel Nothing =
(True, Just (Set.singleton analyticsPixel))
insertAnalyticsPixel existingPixels@(Just pixels)
| analyticsPixel `Set.member` pixels =
(False, existingPixels)
| otherwise =
(True, Just (Set.insert analyticsPixel pixels))
-- | Removes a 'AnalyticsPixel' from a 'Package'.
removePackageAnalyticsPixel :: PackageName -> AnalyticsPixel -> Update AnalyticsPixelsState ()
removePackageAnalyticsPixel name analyticsPixel = do
state <- State.get
let pixels = Map.alter removeAnalyticsPixel name (analyticsPixels state)
State.put (state { analyticsPixels = pixels })
pure ()
where
removeAnalyticsPixel Nothing =
Nothing
removeAnalyticsPixel (Just pixels) =
let pixels' = analyticsPixel `Set.delete` pixels in
if Set.null pixels' then Nothing else Just pixels'
-- get and replace the entire state, for backups
getAnalyticsPixelsState :: Query AnalyticsPixelsState AnalyticsPixelsState
getAnalyticsPixelsState = ask
replaceAnalyticsPixelsState :: AnalyticsPixelsState -> Update AnalyticsPixelsState ()
replaceAnalyticsPixelsState = State.put
makeAcidic
''AnalyticsPixelsState
[ 'getAnalyticsPixelsState
, 'analyticsPixelsForPackage
, 'replaceAnalyticsPixelsState
, 'addPackageAnalyticsPixel
, 'removePackageAnalyticsPixel
]
| null | https://raw.githubusercontent.com/haskell/hackage-server/0ba623b610e3afc514a7a3dffa1bcc0994ea2ba1/src/Distribution/Server/Features/AnalyticsPixels/State.hs | haskell | * State queries and updates
| Adds a 'AnalyticsPixel' to a 'Package'. Returns 'True' if the pixel was inserted, and 'False' if
the 'AnalyticsPixel' was already present.
| Removes a 'AnalyticsPixel' from a 'Package'.
get and replace the entire state, for backups | # LANGUAGE DeriveDataTypeable , ,
TypeFamilies , TemplateHaskell #
TypeFamilies, TemplateHaskell #-}
module Distribution.Server.Features.AnalyticsPixels.State
( AnalyticsPixel(..)
, AnalyticsPixelsState(..)
, initialAnalyticsPixelsState
, AnalyticsPixelsForPackage(..)
, AddPackageAnalyticsPixel(..)
, RemovePackageAnalyticsPixel(..)
, GetAnalyticsPixelsState(..)
, ReplaceAnalyticsPixelsState(..)
) where
import Distribution.Package (PackageName)
import Distribution.Server.Framework.MemSize (MemSize)
import Distribution.Server.Users.State ()
import Data.Text (Text)
import Data.Typeable (Typeable)
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Set (Set)
import qualified Data.Set as Set
import Control.DeepSeq (NFData)
import qualified Control.Monad.State as State
import Control.Monad.Reader.Class (ask, asks)
newtype AnalyticsPixel = AnalyticsPixel
{
analyticsPixelUrl :: Text
}
deriving (Show, Eq, Ord, NFData, Typeable, MemSize)
newtype AnalyticsPixelsState = AnalyticsPixelsState
{
analyticsPixels :: Map PackageName (Set AnalyticsPixel)
}
deriving (Show, Eq, NFData, Typeable, MemSize)
SafeCopy instances
$(deriveSafeCopy 0 'base ''AnalyticsPixel)
$(deriveSafeCopy 0 'base ''AnalyticsPixelsState)
initialAnalyticsPixelsState :: AnalyticsPixelsState
initialAnalyticsPixelsState = AnalyticsPixelsState
{
analyticsPixels = Map.empty
}
analyticsPixelsForPackage :: PackageName -> Query AnalyticsPixelsState (Set AnalyticsPixel)
analyticsPixelsForPackage name = asks $ Map.findWithDefault Set.empty name . analyticsPixels
addPackageAnalyticsPixel :: PackageName -> AnalyticsPixel -> Update AnalyticsPixelsState Bool
addPackageAnalyticsPixel name analyticsPixel = do
state <- State.get
let (successfullyInserted, pixels) = Map.alterF insertAnalyticsPixel name (analyticsPixels state)
State.put (state { analyticsPixels = pixels })
pure successfullyInserted
where
insertAnalyticsPixel :: Maybe (Set AnalyticsPixel) -> (Bool, Maybe (Set AnalyticsPixel))
insertAnalyticsPixel Nothing =
(True, Just (Set.singleton analyticsPixel))
insertAnalyticsPixel existingPixels@(Just pixels)
| analyticsPixel `Set.member` pixels =
(False, existingPixels)
| otherwise =
(True, Just (Set.insert analyticsPixel pixels))
removePackageAnalyticsPixel :: PackageName -> AnalyticsPixel -> Update AnalyticsPixelsState ()
removePackageAnalyticsPixel name analyticsPixel = do
state <- State.get
let pixels = Map.alter removeAnalyticsPixel name (analyticsPixels state)
State.put (state { analyticsPixels = pixels })
pure ()
where
removeAnalyticsPixel Nothing =
Nothing
removeAnalyticsPixel (Just pixels) =
let pixels' = analyticsPixel `Set.delete` pixels in
if Set.null pixels' then Nothing else Just pixels'
getAnalyticsPixelsState :: Query AnalyticsPixelsState AnalyticsPixelsState
getAnalyticsPixelsState = ask
replaceAnalyticsPixelsState :: AnalyticsPixelsState -> Update AnalyticsPixelsState ()
replaceAnalyticsPixelsState = State.put
makeAcidic
''AnalyticsPixelsState
[ 'getAnalyticsPixelsState
, 'analyticsPixelsForPackage
, 'replaceAnalyticsPixelsState
, 'addPackageAnalyticsPixel
, 'removePackageAnalyticsPixel
]
|
023ff5057dddc31d5349775c1f86c0fa2ddb98a1193611ea8be722d1453310fc | dschrempf/elynx | Options.hs | # LANGUAGE DeriveGeneric #
-- |
Module : SLynx . Options
Description : SLynx general options
Copyright : 2021
License : GPL-3.0 - or - later
--
-- Maintainer :
-- Stability : unstable
-- Portability : portable
--
Creation date : Sat Sep 7 18:55:03 2019 .
module SLynx.Options (CommandArguments (..)) where
import Data.Aeson
import ELynx.Alphabet.Alphabet
import ELynx.Tools.Options
import ELynx.Tools.Reproduction
import GHC.Generics
import Options.Applicative
import SLynx.Concatenate.Options
import SLynx.Examine.Options
import SLynx.Filter.Options
import SLynx.Simulate.Options
import SLynx.SubSample.Options
import SLynx.Translate.Options
| The different SLynx commands and their arguments .
data CommandArguments
= Concatenate ConcatenateArguments
| Examine ExamineArguments
| FilterCols FilterColsArguments
| FilterRows FilterRowsArguments
| Simulate SimulateArguments
| SubSample SubSampleArguments
| Translate TranslateArguments
deriving (Eq, Show, Generic)
instance Reproducible CommandArguments where
inFiles (Concatenate a) = inFiles a
inFiles (Examine a) = inFiles a
inFiles (FilterCols a) = inFiles a
inFiles (FilterRows a) = inFiles a
inFiles (Simulate a) = inFiles a
inFiles (SubSample a) = inFiles a
inFiles (Translate a) = inFiles a
outSuffixes (Concatenate a) = outSuffixes a
outSuffixes (Examine a) = outSuffixes a
outSuffixes (FilterCols a) = outSuffixes a
outSuffixes (FilterRows a) = outSuffixes a
outSuffixes (Simulate a) = outSuffixes a
outSuffixes (SubSample a) = outSuffixes a
outSuffixes (Translate a) = outSuffixes a
getSeed (Concatenate a) = getSeed a
getSeed (Examine a) = getSeed a
getSeed (FilterCols a) = getSeed a
getSeed (FilterRows a) = getSeed a
getSeed (Simulate a) = getSeed a
getSeed (SubSample a) = getSeed a
getSeed (Translate a) = getSeed a
setSeed (Concatenate a) = Concatenate . setSeed a
setSeed (Examine a) = Examine . setSeed a
setSeed (FilterCols a) = FilterCols . setSeed a
setSeed (FilterRows a) = FilterRows . setSeed a
setSeed (Simulate a) = Simulate . setSeed a
setSeed (SubSample a) = SubSample . setSeed a
setSeed (Translate a) = Translate . setSeed a
parser = commandArguments
cmdName = "slynx"
cmdDsc = ["Analyze, and simulate multi sequence alignments."]
cmdFtr =
[ "",
"Available sequence file formats:"
]
++ fs
++ ["", "Available alphabets:"]
++ as
where
toListItem = (" - " ++)
fs = map toListItem ["FASTA"]
as = map (toListItem . alphabetDescription) ([minBound ..] :: [Alphabet])
instance FromJSON CommandArguments
instance ToJSON CommandArguments
concatenateCommand :: Mod CommandFields CommandArguments
concatenateCommand = createCommandReproducible Concatenate
examineCommand :: Mod CommandFields CommandArguments
examineCommand = createCommandReproducible Examine
filterColumnsCommand :: Mod CommandFields CommandArguments
filterColumnsCommand = createCommandReproducible FilterCols
filterRowsCommand :: Mod CommandFields CommandArguments
filterRowsCommand = createCommandReproducible FilterRows
simulateCommand :: Mod CommandFields CommandArguments
simulateCommand = createCommandReproducible Simulate
subSampleCommand :: Mod CommandFields CommandArguments
subSampleCommand = createCommandReproducible SubSample
translateCommand :: Mod CommandFields CommandArguments
translateCommand = createCommandReproducible Translate
commandArguments :: Parser CommandArguments
commandArguments =
hsubparser $
concatenateCommand
<> examineCommand
<> filterColumnsCommand
<> filterRowsCommand
<> simulateCommand
<> subSampleCommand
<> translateCommand
| null | https://raw.githubusercontent.com/dschrempf/elynx/f73f4474c61c22c6a9e54c56bdc34b37eff09687/slynx/src/SLynx/Options.hs | haskell | |
Maintainer :
Stability : unstable
Portability : portable
| # LANGUAGE DeriveGeneric #
Module : SLynx . Options
Description : SLynx general options
Copyright : 2021
License : GPL-3.0 - or - later
Creation date : Sat Sep 7 18:55:03 2019 .
module SLynx.Options (CommandArguments (..)) where
import Data.Aeson
import ELynx.Alphabet.Alphabet
import ELynx.Tools.Options
import ELynx.Tools.Reproduction
import GHC.Generics
import Options.Applicative
import SLynx.Concatenate.Options
import SLynx.Examine.Options
import SLynx.Filter.Options
import SLynx.Simulate.Options
import SLynx.SubSample.Options
import SLynx.Translate.Options
| The different SLynx commands and their arguments .
data CommandArguments
= Concatenate ConcatenateArguments
| Examine ExamineArguments
| FilterCols FilterColsArguments
| FilterRows FilterRowsArguments
| Simulate SimulateArguments
| SubSample SubSampleArguments
| Translate TranslateArguments
deriving (Eq, Show, Generic)
instance Reproducible CommandArguments where
inFiles (Concatenate a) = inFiles a
inFiles (Examine a) = inFiles a
inFiles (FilterCols a) = inFiles a
inFiles (FilterRows a) = inFiles a
inFiles (Simulate a) = inFiles a
inFiles (SubSample a) = inFiles a
inFiles (Translate a) = inFiles a
outSuffixes (Concatenate a) = outSuffixes a
outSuffixes (Examine a) = outSuffixes a
outSuffixes (FilterCols a) = outSuffixes a
outSuffixes (FilterRows a) = outSuffixes a
outSuffixes (Simulate a) = outSuffixes a
outSuffixes (SubSample a) = outSuffixes a
outSuffixes (Translate a) = outSuffixes a
getSeed (Concatenate a) = getSeed a
getSeed (Examine a) = getSeed a
getSeed (FilterCols a) = getSeed a
getSeed (FilterRows a) = getSeed a
getSeed (Simulate a) = getSeed a
getSeed (SubSample a) = getSeed a
getSeed (Translate a) = getSeed a
setSeed (Concatenate a) = Concatenate . setSeed a
setSeed (Examine a) = Examine . setSeed a
setSeed (FilterCols a) = FilterCols . setSeed a
setSeed (FilterRows a) = FilterRows . setSeed a
setSeed (Simulate a) = Simulate . setSeed a
setSeed (SubSample a) = SubSample . setSeed a
setSeed (Translate a) = Translate . setSeed a
parser = commandArguments
cmdName = "slynx"
cmdDsc = ["Analyze, and simulate multi sequence alignments."]
cmdFtr =
[ "",
"Available sequence file formats:"
]
++ fs
++ ["", "Available alphabets:"]
++ as
where
toListItem = (" - " ++)
fs = map toListItem ["FASTA"]
as = map (toListItem . alphabetDescription) ([minBound ..] :: [Alphabet])
instance FromJSON CommandArguments
instance ToJSON CommandArguments
concatenateCommand :: Mod CommandFields CommandArguments
concatenateCommand = createCommandReproducible Concatenate
examineCommand :: Mod CommandFields CommandArguments
examineCommand = createCommandReproducible Examine
filterColumnsCommand :: Mod CommandFields CommandArguments
filterColumnsCommand = createCommandReproducible FilterCols
filterRowsCommand :: Mod CommandFields CommandArguments
filterRowsCommand = createCommandReproducible FilterRows
simulateCommand :: Mod CommandFields CommandArguments
simulateCommand = createCommandReproducible Simulate
subSampleCommand :: Mod CommandFields CommandArguments
subSampleCommand = createCommandReproducible SubSample
translateCommand :: Mod CommandFields CommandArguments
translateCommand = createCommandReproducible Translate
commandArguments :: Parser CommandArguments
commandArguments =
hsubparser $
concatenateCommand
<> examineCommand
<> filterColumnsCommand
<> filterRowsCommand
<> simulateCommand
<> subSampleCommand
<> translateCommand
|
ff1289590dfca76cfd5a44aa9499a3f13f99ff3d2e5f65ca0b4c4fc5588051f6 | debug-ito/net-spider | Internal.hs | -- |
-- Module: NetSpider.Query.Internal
-- Description:
Maintainer : < >
--
--
module NetSpider.Query.Internal
( FoundNodePolicy(..)
) where
| Policy to treat ' FoundNode 's ( local findings ) when the spider
-- creates the snapshot graph.
--
' ' instance was added in version 0.3.2.0
--
@since 0.2.0.0
data FoundNodePolicy n na=
PolicyOverwrite
| PolicyAppend
deriving (Show,Eq)
| null | https://raw.githubusercontent.com/debug-ito/net-spider/82dfbdca1add1edfd54ef36cb1ca5129d528b814/net-spider/src/NetSpider/Query/Internal.hs | haskell | |
Module: NetSpider.Query.Internal
Description:
creates the snapshot graph.
| Maintainer : < >
module NetSpider.Query.Internal
( FoundNodePolicy(..)
) where
| Policy to treat ' FoundNode 's ( local findings ) when the spider
' ' instance was added in version 0.3.2.0
@since 0.2.0.0
data FoundNodePolicy n na=
PolicyOverwrite
| PolicyAppend
deriving (Show,Eq)
|
30774b0b9ca9c4c8ef634527fb300189632cfbf0519d02a585ef9bc80c6dd691 | monadfix/ormolu-live | Lit.hs |
( c ) The University of Glasgow 2006
( c ) The GRASP / AQUA Project , Glasgow University , 1992 - 1998
\section[HsLit]{Abstract syntax : source - language literals }
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[HsLit]{Abstract syntax: source-language literals}
-}
# LANGUAGE CPP , DeriveDataTypeable #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
in module GHC.Hs . PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE TypeFamilies #
module GHC.Hs.Lit where
#include "HsVersions2.h"
import GhcPrelude
import {-# SOURCE #-} GHC.Hs.Expr( HsExpr, pprExpr )
import BasicTypes ( IntegralLit(..),FractionalLit(..),negateIntegralLit,
negateFractionalLit,SourceText(..),pprWithSourceText,
PprPrec(..), topPrec )
import Type
import Outputable
import FastString
import GHC.Hs.Extension
import Data.ByteString (ByteString)
import Data.Data hiding ( Fixity )
{-
************************************************************************
* *
\subsection[HsLit]{Literals}
* *
************************************************************************
-}
Note [ Literal source text ] in for SourceText fields in
-- the following
Note [ Trees that grow ] in GHC.Hs . Extension for the Xxxxx fields in the following
-- | Haskell Literal
data HsLit x
= HsChar (XHsChar x) {- SourceText -} Char
-- ^ Character
| HsCharPrim (XHsCharPrim x) {- SourceText -} Char
^ character
| HsString (XHsString x) {- SourceText -} FastString
-- ^ String
| HsStringPrim (XHsStringPrim x) {- SourceText -} ByteString
-- ^ Packed bytes
| HsInt (XHsInt x) IntegralLit
-- ^ Genuinely an Int; arises from
-- @TcGenDeriv@, and from TRANSLATION
| HsIntPrim (XHsIntPrim x) {- SourceText -} Integer
-- ^ literal @Int#@
| HsWordPrim (XHsWordPrim x) {- SourceText -} Integer
^ literal @Word#@
| HsInt64Prim (XHsInt64Prim x) {- SourceText -} Integer
-- ^ literal @Int64#@
| HsWord64Prim (XHsWord64Prim x) {- SourceText -} Integer
-- ^ literal @Word64#@
| HsInteger (XHsInteger x) {- SourceText -} Integer Type
-- ^ Genuinely an integer; arises only
-- from TRANSLATION (overloaded
-- literals are done with HsOverLit)
| HsRat (XHsRat x) FractionalLit Type
-- ^ Genuinely a rational; arises only from
-- TRANSLATION (overloaded literals are
-- done with HsOverLit)
| HsFloatPrim (XHsFloatPrim x) FractionalLit
^ Unboxed Float
| HsDoublePrim (XHsDoublePrim x) FractionalLit
-- ^ Unboxed Double
| XLit (XXLit x)
type instance XHsChar (GhcPass _) = SourceText
type instance XHsCharPrim (GhcPass _) = SourceText
type instance XHsString (GhcPass _) = SourceText
type instance XHsStringPrim (GhcPass _) = SourceText
type instance XHsInt (GhcPass _) = NoExtField
type instance XHsIntPrim (GhcPass _) = SourceText
type instance XHsWordPrim (GhcPass _) = SourceText
type instance XHsInt64Prim (GhcPass _) = SourceText
type instance XHsWord64Prim (GhcPass _) = SourceText
type instance XHsInteger (GhcPass _) = SourceText
type instance XHsRat (GhcPass _) = NoExtField
type instance XHsFloatPrim (GhcPass _) = NoExtField
type instance XHsDoublePrim (GhcPass _) = NoExtField
type instance XXLit (GhcPass _) = NoExtCon
instance Eq (HsLit x) where
(HsChar _ x1) == (HsChar _ x2) = x1==x2
(HsCharPrim _ x1) == (HsCharPrim _ x2) = x1==x2
(HsString _ x1) == (HsString _ x2) = x1==x2
(HsStringPrim _ x1) == (HsStringPrim _ x2) = x1==x2
(HsInt _ x1) == (HsInt _ x2) = x1==x2
(HsIntPrim _ x1) == (HsIntPrim _ x2) = x1==x2
(HsWordPrim _ x1) == (HsWordPrim _ x2) = x1==x2
(HsInt64Prim _ x1) == (HsInt64Prim _ x2) = x1==x2
(HsWord64Prim _ x1) == (HsWord64Prim _ x2) = x1==x2
(HsInteger _ x1 _) == (HsInteger _ x2 _) = x1==x2
(HsRat _ x1 _) == (HsRat _ x2 _) = x1==x2
(HsFloatPrim _ x1) == (HsFloatPrim _ x2) = x1==x2
(HsDoublePrim _ x1) == (HsDoublePrim _ x2) = x1==x2
_ == _ = False
-- | Haskell Overloaded Literal
data HsOverLit p
= OverLit {
ol_ext :: (XOverLit p),
ol_val :: OverLitVal,
ol_witness :: HsExpr p} -- Note [Overloaded literal witnesses]
| XOverLit
(XXOverLit p)
data OverLitTc
= OverLitTc {
ol_rebindable :: Bool, -- Note [ol_rebindable]
ol_type :: Type }
deriving Data
type instance XOverLit GhcPs = NoExtField
type instance XOverLit GhcRn = Bool -- Note [ol_rebindable]
type instance XOverLit GhcTc = OverLitTc
type instance XXOverLit (GhcPass _) = NoExtCon
Note [ Literal source text ] in for SourceText fields in
-- the following
-- | Overloaded Literal Value
data OverLitVal
= HsIntegral !IntegralLit -- ^ Integer-looking literals;
| HsFractional !FractionalLit -- ^ Frac-looking literals
| HsIsString !SourceText !FastString -- ^ String-looking literals
deriving Data
negateOverLitVal :: OverLitVal -> OverLitVal
negateOverLitVal (HsIntegral i) = HsIntegral (negateIntegralLit i)
negateOverLitVal (HsFractional f) = HsFractional (negateFractionalLit f)
negateOverLitVal _ = panic "negateOverLitVal: argument is not a number"
overLitType :: HsOverLit GhcTc -> Type
overLitType (OverLit (OverLitTc _ ty) _ _) = ty
overLitType (XOverLit nec) = noExtCon nec
| Convert a literal from one index type to another , updating the annotations
according to the relevant ' ' instance
convertLit :: (ConvertIdX a b) => HsLit a -> HsLit b
convertLit (HsChar a x) = (HsChar (convert a) x)
convertLit (HsCharPrim a x) = (HsCharPrim (convert a) x)
convertLit (HsString a x) = (HsString (convert a) x)
convertLit (HsStringPrim a x) = (HsStringPrim (convert a) x)
convertLit (HsInt a x) = (HsInt (convert a) x)
convertLit (HsIntPrim a x) = (HsIntPrim (convert a) x)
convertLit (HsWordPrim a x) = (HsWordPrim (convert a) x)
convertLit (HsInt64Prim a x) = (HsInt64Prim (convert a) x)
convertLit (HsWord64Prim a x) = (HsWord64Prim (convert a) x)
convertLit (HsInteger a x b) = (HsInteger (convert a) x b)
convertLit (HsRat a x b) = (HsRat (convert a) x b)
convertLit (HsFloatPrim a x) = (HsFloatPrim (convert a) x)
convertLit (HsDoublePrim a x) = (HsDoublePrim (convert a) x)
convertLit (XLit a) = (XLit (convert a))
Note [ ol_rebindable ]
~~~~~~~~~~~~~~~~~~~~
The ol_rebindable field is True if this literal is actually
using rebindable syntax . Specifically :
False iff ol_witness is the standard one
True iff ol_witness is non - standard
Equivalently it 's True if
a ) RebindableSyntax is on
b ) the witness for fromInteger / fromRational / fromString
that happens to be in scope is n't the standard one
Note [ Overloaded literal witnesses ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Before * type checking , the HsExpr in an HsOverLit is the
name of the coercion function , ' fromInteger ' or ' fromRational ' .
* After * type checking , it is a witness for the literal , such as
( fromInteger 3 ) or lit_78
This witness should replace the literal .
This dual role is unusual , because we 're replacing ' fromInteger ' with
a call to fromInteger . Reason : it allows commoning up of the fromInteger
calls , which would n't be possible if the desugarer made the application .
The PostTcType in each branch records the type the overload literal is
found to have .
Note [ol_rebindable]
~~~~~~~~~~~~~~~~~~~~
The ol_rebindable field is True if this literal is actually
using rebindable syntax. Specifically:
False iff ol_witness is the standard one
True iff ol_witness is non-standard
Equivalently it's True if
a) RebindableSyntax is on
b) the witness for fromInteger/fromRational/fromString
that happens to be in scope isn't the standard one
Note [Overloaded literal witnesses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*Before* type checking, the HsExpr in an HsOverLit is the
name of the coercion function, 'fromInteger' or 'fromRational'.
*After* type checking, it is a witness for the literal, such as
(fromInteger 3) or lit_78
This witness should replace the literal.
This dual role is unusual, because we're replacing 'fromInteger' with
a call to fromInteger. Reason: it allows commoning up of the fromInteger
calls, which wouldn't be possible if the desugarer made the application.
The PostTcType in each branch records the type the overload literal is
found to have.
-}
Comparison operations are needed when grouping literals
-- for compiling pattern-matching (module MatchLit)
instance (Eq (XXOverLit p)) => Eq (HsOverLit p) where
(OverLit _ val1 _) == (OverLit _ val2 _) = val1 == val2
(XOverLit val1) == (XOverLit val2) = val1 == val2
_ == _ = panic "Eq HsOverLit"
instance Eq OverLitVal where
(HsIntegral i1) == (HsIntegral i2) = i1 == i2
(HsFractional f1) == (HsFractional f2) = f1 == f2
(HsIsString _ s1) == (HsIsString _ s2) = s1 == s2
_ == _ = False
instance (Ord (XXOverLit p)) => Ord (HsOverLit p) where
compare (OverLit _ val1 _) (OverLit _ val2 _) = val1 `compare` val2
compare (XOverLit val1) (XOverLit val2) = val1 `compare` val2
compare _ _ = panic "Ord HsOverLit"
instance Ord OverLitVal where
compare (HsIntegral i1) (HsIntegral i2) = i1 `compare` i2
compare (HsIntegral _) (HsFractional _) = LT
compare (HsIntegral _) (HsIsString _ _) = LT
compare (HsFractional f1) (HsFractional f2) = f1 `compare` f2
compare (HsFractional _) (HsIntegral _) = GT
compare (HsFractional _) (HsIsString _ _) = LT
compare (HsIsString _ s1) (HsIsString _ s2) = s1 `compare` s2
compare (HsIsString _ _) (HsIntegral _) = GT
compare (HsIsString _ _) (HsFractional _) = GT
Instance specific to GhcPs , need the SourceText
instance Outputable (HsLit (GhcPass p)) where
ppr (HsChar st c) = pprWithSourceText st (pprHsChar c)
ppr (HsCharPrim st c) = pp_st_suffix st primCharSuffix (pprPrimChar c)
ppr (HsString st s) = pprWithSourceText st (pprHsString s)
ppr (HsStringPrim st s) = pprWithSourceText st (pprHsBytes s)
ppr (HsInt _ i)
= pprWithSourceText (il_text i) (integer (il_value i))
ppr (HsInteger st i _) = pprWithSourceText st (integer i)
ppr (HsRat _ f _) = ppr f
ppr (HsFloatPrim _ f) = ppr f <> primFloatSuffix
ppr (HsDoublePrim _ d) = ppr d <> primDoubleSuffix
ppr (HsIntPrim st i) = pprWithSourceText st (pprPrimInt i)
ppr (HsWordPrim st w) = pprWithSourceText st (pprPrimWord w)
ppr (HsInt64Prim st i) = pp_st_suffix st primInt64Suffix (pprPrimInt64 i)
ppr (HsWord64Prim st w) = pp_st_suffix st primWord64Suffix (pprPrimWord64 w)
ppr (XLit x) = ppr x
pp_st_suffix :: SourceText -> SDoc -> SDoc -> SDoc
pp_st_suffix NoSourceText _ doc = doc
pp_st_suffix (SourceText st) suffix _ = text st <> suffix
-- in debug mode, print the expression that it's resolved to, too
instance OutputableBndrId p
=> Outputable (HsOverLit (GhcPass p)) where
ppr (OverLit {ol_val=val, ol_witness=witness})
= ppr val <+> (whenPprDebug (parens (pprExpr witness)))
ppr (XOverLit x) = ppr x
instance Outputable OverLitVal where
ppr (HsIntegral i) = pprWithSourceText (il_text i) (integer (il_value i))
ppr (HsFractional f) = ppr f
ppr (HsIsString st s) = pprWithSourceText st (pprHsString s)
-- | pmPprHsLit pretty prints literals and is used when pretty printing pattern
-- match warnings. All are printed the same (i.e., without hashes if they are
-- primitive and not wrapped in constructors if they are boxed). This happens
-- mainly for too reasons:
-- * We do not want to expose their internal representation
-- * The warnings become too messy
pmPprHsLit :: HsLit (GhcPass x) -> SDoc
pmPprHsLit (HsChar _ c) = pprHsChar c
pmPprHsLit (HsCharPrim _ c) = pprHsChar c
pmPprHsLit (HsString st s) = pprWithSourceText st (pprHsString s)
pmPprHsLit (HsStringPrim _ s) = pprHsBytes s
pmPprHsLit (HsInt _ i) = integer (il_value i)
pmPprHsLit (HsIntPrim _ i) = integer i
pmPprHsLit (HsWordPrim _ w) = integer w
pmPprHsLit (HsInt64Prim _ i) = integer i
pmPprHsLit (HsWord64Prim _ w) = integer w
pmPprHsLit (HsInteger _ i _) = integer i
pmPprHsLit (HsRat _ f _) = ppr f
pmPprHsLit (HsFloatPrim _ f) = ppr f
pmPprHsLit (HsDoublePrim _ d) = ppr d
pmPprHsLit (XLit x) = ppr x
| @'hsLitNeedsParens ' p l@ returns ' True ' if a literal @l@ needs
-- to be parenthesized under precedence @p@.
hsLitNeedsParens :: PprPrec -> HsLit x -> Bool
hsLitNeedsParens p = go
where
go (HsChar {}) = False
go (HsCharPrim {}) = False
go (HsString {}) = False
go (HsStringPrim {}) = False
go (HsInt _ x) = p > topPrec && il_neg x
go (HsIntPrim _ x) = p > topPrec && x < 0
go (HsWordPrim {}) = False
go (HsInt64Prim _ x) = p > topPrec && x < 0
go (HsWord64Prim {}) = False
go (HsInteger _ x _) = p > topPrec && x < 0
go (HsRat _ x _) = p > topPrec && fl_neg x
go (HsFloatPrim _ x) = p > topPrec && fl_neg x
go (HsDoublePrim _ x) = p > topPrec && fl_neg x
go (XLit _) = False
| @'hsOverLitNeedsParens ' p returns ' True ' if an overloaded literal
-- @ol@ needs to be parenthesized under precedence @p@.
hsOverLitNeedsParens :: PprPrec -> HsOverLit x -> Bool
hsOverLitNeedsParens p (OverLit { ol_val = olv }) = go olv
where
go :: OverLitVal -> Bool
go (HsIntegral x) = p > topPrec && il_neg x
go (HsFractional x) = p > topPrec && fl_neg x
go (HsIsString {}) = False
hsOverLitNeedsParens _ (XOverLit { }) = False
| null | https://raw.githubusercontent.com/monadfix/ormolu-live/d8ae72ef168b98a8d179d642f70352c88b3ac226/ghc-lib-parser-8.10.1.20200412/compiler/GHC/Hs/Lit.hs | haskell | # LANGUAGE TypeSynonymInstances #
# LANGUAGE ConstraintKinds #
# SOURCE #
************************************************************************
* *
\subsection[HsLit]{Literals}
* *
************************************************************************
the following
| Haskell Literal
SourceText
^ Character
SourceText
SourceText
^ String
SourceText
^ Packed bytes
^ Genuinely an Int; arises from
@TcGenDeriv@, and from TRANSLATION
SourceText
^ literal @Int#@
SourceText
SourceText
^ literal @Int64#@
SourceText
^ literal @Word64#@
SourceText
^ Genuinely an integer; arises only
from TRANSLATION (overloaded
literals are done with HsOverLit)
^ Genuinely a rational; arises only from
TRANSLATION (overloaded literals are
done with HsOverLit)
^ Unboxed Double
| Haskell Overloaded Literal
Note [Overloaded literal witnesses]
Note [ol_rebindable]
Note [ol_rebindable]
the following
| Overloaded Literal Value
^ Integer-looking literals;
^ Frac-looking literals
^ String-looking literals
for compiling pattern-matching (module MatchLit)
in debug mode, print the expression that it's resolved to, too
| pmPprHsLit pretty prints literals and is used when pretty printing pattern
match warnings. All are printed the same (i.e., without hashes if they are
primitive and not wrapped in constructors if they are boxed). This happens
mainly for too reasons:
* We do not want to expose their internal representation
* The warnings become too messy
to be parenthesized under precedence @p@.
@ol@ needs to be parenthesized under precedence @p@. |
( c ) The University of Glasgow 2006
( c ) The GRASP / AQUA Project , Glasgow University , 1992 - 1998
\section[HsLit]{Abstract syntax : source - language literals }
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[HsLit]{Abstract syntax: source-language literals}
-}
# LANGUAGE CPP , DeriveDataTypeable #
# LANGUAGE StandaloneDeriving #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
in module GHC.Hs . PlaceHolder
# LANGUAGE TypeFamilies #
module GHC.Hs.Lit where
#include "HsVersions2.h"
import GhcPrelude
import BasicTypes ( IntegralLit(..),FractionalLit(..),negateIntegralLit,
negateFractionalLit,SourceText(..),pprWithSourceText,
PprPrec(..), topPrec )
import Type
import Outputable
import FastString
import GHC.Hs.Extension
import Data.ByteString (ByteString)
import Data.Data hiding ( Fixity )
Note [ Literal source text ] in for SourceText fields in
Note [ Trees that grow ] in GHC.Hs . Extension for the Xxxxx fields in the following
data HsLit x
^ character
| HsInt (XHsInt x) IntegralLit
^ literal @Word#@
| HsRat (XHsRat x) FractionalLit Type
| HsFloatPrim (XHsFloatPrim x) FractionalLit
^ Unboxed Float
| HsDoublePrim (XHsDoublePrim x) FractionalLit
| XLit (XXLit x)
type instance XHsChar (GhcPass _) = SourceText
type instance XHsCharPrim (GhcPass _) = SourceText
type instance XHsString (GhcPass _) = SourceText
type instance XHsStringPrim (GhcPass _) = SourceText
type instance XHsInt (GhcPass _) = NoExtField
type instance XHsIntPrim (GhcPass _) = SourceText
type instance XHsWordPrim (GhcPass _) = SourceText
type instance XHsInt64Prim (GhcPass _) = SourceText
type instance XHsWord64Prim (GhcPass _) = SourceText
type instance XHsInteger (GhcPass _) = SourceText
type instance XHsRat (GhcPass _) = NoExtField
type instance XHsFloatPrim (GhcPass _) = NoExtField
type instance XHsDoublePrim (GhcPass _) = NoExtField
type instance XXLit (GhcPass _) = NoExtCon
instance Eq (HsLit x) where
(HsChar _ x1) == (HsChar _ x2) = x1==x2
(HsCharPrim _ x1) == (HsCharPrim _ x2) = x1==x2
(HsString _ x1) == (HsString _ x2) = x1==x2
(HsStringPrim _ x1) == (HsStringPrim _ x2) = x1==x2
(HsInt _ x1) == (HsInt _ x2) = x1==x2
(HsIntPrim _ x1) == (HsIntPrim _ x2) = x1==x2
(HsWordPrim _ x1) == (HsWordPrim _ x2) = x1==x2
(HsInt64Prim _ x1) == (HsInt64Prim _ x2) = x1==x2
(HsWord64Prim _ x1) == (HsWord64Prim _ x2) = x1==x2
(HsInteger _ x1 _) == (HsInteger _ x2 _) = x1==x2
(HsRat _ x1 _) == (HsRat _ x2 _) = x1==x2
(HsFloatPrim _ x1) == (HsFloatPrim _ x2) = x1==x2
(HsDoublePrim _ x1) == (HsDoublePrim _ x2) = x1==x2
_ == _ = False
data HsOverLit p
= OverLit {
ol_ext :: (XOverLit p),
ol_val :: OverLitVal,
| XOverLit
(XXOverLit p)
data OverLitTc
= OverLitTc {
ol_type :: Type }
deriving Data
type instance XOverLit GhcPs = NoExtField
type instance XOverLit GhcTc = OverLitTc
type instance XXOverLit (GhcPass _) = NoExtCon
Note [ Literal source text ] in for SourceText fields in
data OverLitVal
deriving Data
negateOverLitVal :: OverLitVal -> OverLitVal
negateOverLitVal (HsIntegral i) = HsIntegral (negateIntegralLit i)
negateOverLitVal (HsFractional f) = HsFractional (negateFractionalLit f)
negateOverLitVal _ = panic "negateOverLitVal: argument is not a number"
overLitType :: HsOverLit GhcTc -> Type
overLitType (OverLit (OverLitTc _ ty) _ _) = ty
overLitType (XOverLit nec) = noExtCon nec
| Convert a literal from one index type to another , updating the annotations
according to the relevant ' ' instance
convertLit :: (ConvertIdX a b) => HsLit a -> HsLit b
convertLit (HsChar a x) = (HsChar (convert a) x)
convertLit (HsCharPrim a x) = (HsCharPrim (convert a) x)
convertLit (HsString a x) = (HsString (convert a) x)
convertLit (HsStringPrim a x) = (HsStringPrim (convert a) x)
convertLit (HsInt a x) = (HsInt (convert a) x)
convertLit (HsIntPrim a x) = (HsIntPrim (convert a) x)
convertLit (HsWordPrim a x) = (HsWordPrim (convert a) x)
convertLit (HsInt64Prim a x) = (HsInt64Prim (convert a) x)
convertLit (HsWord64Prim a x) = (HsWord64Prim (convert a) x)
convertLit (HsInteger a x b) = (HsInteger (convert a) x b)
convertLit (HsRat a x b) = (HsRat (convert a) x b)
convertLit (HsFloatPrim a x) = (HsFloatPrim (convert a) x)
convertLit (HsDoublePrim a x) = (HsDoublePrim (convert a) x)
convertLit (XLit a) = (XLit (convert a))
Note [ ol_rebindable ]
~~~~~~~~~~~~~~~~~~~~
The ol_rebindable field is True if this literal is actually
using rebindable syntax . Specifically :
False iff ol_witness is the standard one
True iff ol_witness is non - standard
Equivalently it 's True if
a ) RebindableSyntax is on
b ) the witness for fromInteger / fromRational / fromString
that happens to be in scope is n't the standard one
Note [ Overloaded literal witnesses ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Before * type checking , the HsExpr in an HsOverLit is the
name of the coercion function , ' fromInteger ' or ' fromRational ' .
* After * type checking , it is a witness for the literal , such as
( fromInteger 3 ) or lit_78
This witness should replace the literal .
This dual role is unusual , because we 're replacing ' fromInteger ' with
a call to fromInteger . Reason : it allows commoning up of the fromInteger
calls , which would n't be possible if the desugarer made the application .
The PostTcType in each branch records the type the overload literal is
found to have .
Note [ol_rebindable]
~~~~~~~~~~~~~~~~~~~~
The ol_rebindable field is True if this literal is actually
using rebindable syntax. Specifically:
False iff ol_witness is the standard one
True iff ol_witness is non-standard
Equivalently it's True if
a) RebindableSyntax is on
b) the witness for fromInteger/fromRational/fromString
that happens to be in scope isn't the standard one
Note [Overloaded literal witnesses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*Before* type checking, the HsExpr in an HsOverLit is the
name of the coercion function, 'fromInteger' or 'fromRational'.
*After* type checking, it is a witness for the literal, such as
(fromInteger 3) or lit_78
This witness should replace the literal.
This dual role is unusual, because we're replacing 'fromInteger' with
a call to fromInteger. Reason: it allows commoning up of the fromInteger
calls, which wouldn't be possible if the desugarer made the application.
The PostTcType in each branch records the type the overload literal is
found to have.
-}
Comparison operations are needed when grouping literals
instance (Eq (XXOverLit p)) => Eq (HsOverLit p) where
(OverLit _ val1 _) == (OverLit _ val2 _) = val1 == val2
(XOverLit val1) == (XOverLit val2) = val1 == val2
_ == _ = panic "Eq HsOverLit"
instance Eq OverLitVal where
(HsIntegral i1) == (HsIntegral i2) = i1 == i2
(HsFractional f1) == (HsFractional f2) = f1 == f2
(HsIsString _ s1) == (HsIsString _ s2) = s1 == s2
_ == _ = False
instance (Ord (XXOverLit p)) => Ord (HsOverLit p) where
compare (OverLit _ val1 _) (OverLit _ val2 _) = val1 `compare` val2
compare (XOverLit val1) (XOverLit val2) = val1 `compare` val2
compare _ _ = panic "Ord HsOverLit"
instance Ord OverLitVal where
compare (HsIntegral i1) (HsIntegral i2) = i1 `compare` i2
compare (HsIntegral _) (HsFractional _) = LT
compare (HsIntegral _) (HsIsString _ _) = LT
compare (HsFractional f1) (HsFractional f2) = f1 `compare` f2
compare (HsFractional _) (HsIntegral _) = GT
compare (HsFractional _) (HsIsString _ _) = LT
compare (HsIsString _ s1) (HsIsString _ s2) = s1 `compare` s2
compare (HsIsString _ _) (HsIntegral _) = GT
compare (HsIsString _ _) (HsFractional _) = GT
Instance specific to GhcPs , need the SourceText
instance Outputable (HsLit (GhcPass p)) where
ppr (HsChar st c) = pprWithSourceText st (pprHsChar c)
ppr (HsCharPrim st c) = pp_st_suffix st primCharSuffix (pprPrimChar c)
ppr (HsString st s) = pprWithSourceText st (pprHsString s)
ppr (HsStringPrim st s) = pprWithSourceText st (pprHsBytes s)
ppr (HsInt _ i)
= pprWithSourceText (il_text i) (integer (il_value i))
ppr (HsInteger st i _) = pprWithSourceText st (integer i)
ppr (HsRat _ f _) = ppr f
ppr (HsFloatPrim _ f) = ppr f <> primFloatSuffix
ppr (HsDoublePrim _ d) = ppr d <> primDoubleSuffix
ppr (HsIntPrim st i) = pprWithSourceText st (pprPrimInt i)
ppr (HsWordPrim st w) = pprWithSourceText st (pprPrimWord w)
ppr (HsInt64Prim st i) = pp_st_suffix st primInt64Suffix (pprPrimInt64 i)
ppr (HsWord64Prim st w) = pp_st_suffix st primWord64Suffix (pprPrimWord64 w)
ppr (XLit x) = ppr x
pp_st_suffix :: SourceText -> SDoc -> SDoc -> SDoc
pp_st_suffix NoSourceText _ doc = doc
pp_st_suffix (SourceText st) suffix _ = text st <> suffix
instance OutputableBndrId p
=> Outputable (HsOverLit (GhcPass p)) where
ppr (OverLit {ol_val=val, ol_witness=witness})
= ppr val <+> (whenPprDebug (parens (pprExpr witness)))
ppr (XOverLit x) = ppr x
instance Outputable OverLitVal where
ppr (HsIntegral i) = pprWithSourceText (il_text i) (integer (il_value i))
ppr (HsFractional f) = ppr f
ppr (HsIsString st s) = pprWithSourceText st (pprHsString s)
pmPprHsLit :: HsLit (GhcPass x) -> SDoc
pmPprHsLit (HsChar _ c) = pprHsChar c
pmPprHsLit (HsCharPrim _ c) = pprHsChar c
pmPprHsLit (HsString st s) = pprWithSourceText st (pprHsString s)
pmPprHsLit (HsStringPrim _ s) = pprHsBytes s
pmPprHsLit (HsInt _ i) = integer (il_value i)
pmPprHsLit (HsIntPrim _ i) = integer i
pmPprHsLit (HsWordPrim _ w) = integer w
pmPprHsLit (HsInt64Prim _ i) = integer i
pmPprHsLit (HsWord64Prim _ w) = integer w
pmPprHsLit (HsInteger _ i _) = integer i
pmPprHsLit (HsRat _ f _) = ppr f
pmPprHsLit (HsFloatPrim _ f) = ppr f
pmPprHsLit (HsDoublePrim _ d) = ppr d
pmPprHsLit (XLit x) = ppr x
| @'hsLitNeedsParens ' p l@ returns ' True ' if a literal @l@ needs
hsLitNeedsParens :: PprPrec -> HsLit x -> Bool
hsLitNeedsParens p = go
where
go (HsChar {}) = False
go (HsCharPrim {}) = False
go (HsString {}) = False
go (HsStringPrim {}) = False
go (HsInt _ x) = p > topPrec && il_neg x
go (HsIntPrim _ x) = p > topPrec && x < 0
go (HsWordPrim {}) = False
go (HsInt64Prim _ x) = p > topPrec && x < 0
go (HsWord64Prim {}) = False
go (HsInteger _ x _) = p > topPrec && x < 0
go (HsRat _ x _) = p > topPrec && fl_neg x
go (HsFloatPrim _ x) = p > topPrec && fl_neg x
go (HsDoublePrim _ x) = p > topPrec && fl_neg x
go (XLit _) = False
| @'hsOverLitNeedsParens ' p returns ' True ' if an overloaded literal
hsOverLitNeedsParens :: PprPrec -> HsOverLit x -> Bool
hsOverLitNeedsParens p (OverLit { ol_val = olv }) = go olv
where
go :: OverLitVal -> Bool
go (HsIntegral x) = p > topPrec && il_neg x
go (HsFractional x) = p > topPrec && fl_neg x
go (HsIsString {}) = False
hsOverLitNeedsParens _ (XOverLit { }) = False
|
b403194047e2b69f8ace2c932c524bc0ea158007c07f82ec6f2032e8d5bb50e1 | ndmitchell/shake | Basic.hs |
module Test.Basic(main) where
import Development.Shake
import System.FilePath
import Test.Type
import System.Directory as IO
import Data.List
import Control.Monad
import General.Extra
main = testBuild test $ do
"AB.txt" %> \out -> do
need ["A.txt", "B.txt"]
text1 <- readFile' "A.txt"
text2 <- readFile' "B.txt"
writeFile' out $ text1 ++ text2
"twice.txt" %> \out -> do
let src = "once.txt"
need [src, src]
copyFile' src out
"once.txt" %> \out -> do
src <- readFile' "zero.txt"
writeFile' out src
phonys $ \x -> if x /= "halfclean" then Nothing else Just $
removeFilesAfter "dir" ["//*e.txt"]
phony "cleaner" $
removeFilesAfter "dir" ["//*"]
phony "cleandb" $
removeFilesAfter "." [".shake.database"]
phony "configure" $
liftIO $ appendFile "configure" "1"
phony "install" $ do
need ["configure","once.txt"]
liftIO $ appendFile "install" "1"
phony "duplicate1" $ need ["duplicate2","duplicate3"]
phony "duplicate2" $ need ["duplicate3"]
phony "duplicate3" $ liftIO $ appendFile "duplicate" "1"
phony "dummy" $
liftIO $ appendFile "dummy" "1"
phony "threads" $ do
x <- getShakeOptions
writeFile' "threads.txt" $ show $ shakeThreads x
phony ("slash" </> "platform") $ pure ()
phony "slash/forward" $ pure ()
phony "options" $ do
opts <- getShakeOptions
putInfo $ show opts
"dummer.txt" %> \out -> do
need ["dummy","dummy"]
need ["dummy"]
liftIO $ appendFile out "1"
r <- newResource ".log file" 1
let trace x = withResource r 1 $ liftIO $ appendFile ".log" x
"*.par" %> \out -> do
trace "["
(if "unsafe" `isInfixOf` out then unsafeExtraThread else id) $ liftIO $ sleep 0.1
trace "]"
writeFile' out out
"sep" </> "1.txt" %> \out -> writeFile' out ""
"sep/2.txt" %> \out -> writeFile' out ""
["sep" </> "3.txt", "sep" </> "4.txt", "sep" </> "5.*", "sep/6.txt"] |%> \out -> writeFile' out ""
["sep" </> "7.txt"] |%> \out -> writeFile' out ""
"ids/source" %> \_ -> pure ()
"ids/out" %> \out -> do need =<< readFileLines "ids/source"; writeFile' out ""
"ids/*" %> \out -> do alwaysRerun; trace (takeFileName out); writeFile' out $ takeFileName out
"rerun" %> \out -> do alwaysRerun; liftIO $ appendFile out "."
phony "foo" $
liftIO $ createDirectoryRecursive "foo"
phony "ordering2" $
liftIO $ appendFile "order.log" "X"
phony "ordering" $ do
liftIO $ appendFile "order.log" "Y"
need ["ordering2"]
test build = do
build ["clean"]
writeFile "A.txt" "AAA"
writeFile "B.txt" "BBB"
build ["AB.txt","--sleep"]
assertContents "AB.txt" "AAABBB"
appendFile "A.txt" "aaa"
build ["AB.txt"]
assertContents "AB.txt" "AAAaaaBBB"
removeFile "AB.txt"
build ["AB.txt"]
assertContents "AB.txt" "AAAaaaBBB"
writeFile "zero.txt" "xxx"
build ["twice.txt","--sleep"]
assertContents "twice.txt" "xxx"
writeFile "zero.txt" "yyy"
build ["once.txt","--sleep"]
assertContents "twice.txt" "xxx"
assertContents "once.txt" "yyy"
writeFile "zero.txt" "zzz"
build ["once.txt","twice.txt","--sleep"]
assertContents "twice.txt" "zzz"
assertContents "once.txt" "zzz"
removeFile "twice.txt"
build ["twice.txt"]
assertContents "twice.txt" "zzz"
show shakeOptions === show shakeOptions
build ["options"]
createDirectoryRecursive "dir"
writeFile "dir/ae.txt" ""
writeFile "dir/ea.txt" ""
build ["halfclean"]
assertBoolIO (IO.doesDirectoryExist "dir") "Directory should exist, cleaner should not have removed it"
build ["cleaner"]
sleep 1 -- sometimes takes a while for the file system to notice
assertBoolIO (not <$> IO.doesDirectoryExist "dir") "Directory should not exist, cleaner should have removed it"
assertBoolIO (IO.doesFileExist ".shake.database") "Precondition not met"
build ["cleandb"]
assertBoolIO (not <$> IO.doesFileExist ".shake.database") "Postcondition not met"
writeFile "zero.txt" ""
writeFile "configure" ""
writeFile "install" ""
build ["configure"]
build ["install"]
build ["install"]
assertContents "configure" "111"
assertContents "install" "11"
build ["dummy"]
assertContents "dummy" "1"
build ["dummy"]
assertContents "dummy" "11"
build ["dummy","dummy"]
assertContents "dummy" "111"
writeFile "dummer.txt" ""
build ["dummer.txt"]
assertContents "dummer.txt" "1"
build ["dummer.txt"]
assertContents "dummer.txt" "11"
build ["1.par","2.par","-j1"]
assertContents ".log" "[][]"
writeFile ".log" ""
build ["3.par","4.par","-j2"]
assertContents ".log" "[[]]"
writeFile ".log" ""
processors <- getProcessorCount
putStrLn $ "getProcessorCount returned " ++ show processors
when (processors > 1) $ do
build ["5.par","6.par","-j0"]
assertContents ".log" "[[]]"
writeFile ".log" ""
build ["unsafe1.par","unsafe2.par","-j2"]
assertContents ".log" "[[]]"
build ["threads","-j3"]
assertContents "threads.txt" "3"
build ["threads","-j0"]
assertContents "threads.txt" (show processors)
writeFile "duplicate" ""
build ["duplicate1","duplicate3"]
assertContents "duplicate" "1"
build $ concat [["sep/" ++ show i ++ ".txt", "sep" </> show i ++ ".txt"] | i <- [1..7]]
build ["slash" </> "platform","slash" </> "forward"]
build ["slash/platform","slash/forward"]
createDirectoryRecursive "ids"
writeFile "ids/source" "ids/a"
build ["ids/out","--sleep"]
writeFile ".log" ""
writeFile "ids/source" "ids/b"
build ["ids/out","-j4"]
-- if you collapse depends to [Id] then this ends up asking for the stale 'a'
assertContents ".log" "b"
writeFile "rerun" ""
build ["rerun"]
assertContents "rerun" "."
build ["rerun","rerun"]
assertContents "rerun" ".."
build ["foo"]
build ["foo"]
build [] -- should say "no want/action statements, nothing to do" (checked manually)
# 523 , # 524 - phony children should not run first
writeFile "order.log" ""
build ["ordering"]
assertContents "order.log" "YX"
build ["ordering"]
assertContents "order.log" "YXYX"
| null | https://raw.githubusercontent.com/ndmitchell/shake/99c5a7a4dc1d5a069b13ed5c1bc8e4bc7f13f4a6/src/Test/Basic.hs | haskell | sometimes takes a while for the file system to notice
if you collapse depends to [Id] then this ends up asking for the stale 'a'
should say "no want/action statements, nothing to do" (checked manually) |
module Test.Basic(main) where
import Development.Shake
import System.FilePath
import Test.Type
import System.Directory as IO
import Data.List
import Control.Monad
import General.Extra
main = testBuild test $ do
"AB.txt" %> \out -> do
need ["A.txt", "B.txt"]
text1 <- readFile' "A.txt"
text2 <- readFile' "B.txt"
writeFile' out $ text1 ++ text2
"twice.txt" %> \out -> do
let src = "once.txt"
need [src, src]
copyFile' src out
"once.txt" %> \out -> do
src <- readFile' "zero.txt"
writeFile' out src
phonys $ \x -> if x /= "halfclean" then Nothing else Just $
removeFilesAfter "dir" ["//*e.txt"]
phony "cleaner" $
removeFilesAfter "dir" ["//*"]
phony "cleandb" $
removeFilesAfter "." [".shake.database"]
phony "configure" $
liftIO $ appendFile "configure" "1"
phony "install" $ do
need ["configure","once.txt"]
liftIO $ appendFile "install" "1"
phony "duplicate1" $ need ["duplicate2","duplicate3"]
phony "duplicate2" $ need ["duplicate3"]
phony "duplicate3" $ liftIO $ appendFile "duplicate" "1"
phony "dummy" $
liftIO $ appendFile "dummy" "1"
phony "threads" $ do
x <- getShakeOptions
writeFile' "threads.txt" $ show $ shakeThreads x
phony ("slash" </> "platform") $ pure ()
phony "slash/forward" $ pure ()
phony "options" $ do
opts <- getShakeOptions
putInfo $ show opts
"dummer.txt" %> \out -> do
need ["dummy","dummy"]
need ["dummy"]
liftIO $ appendFile out "1"
r <- newResource ".log file" 1
let trace x = withResource r 1 $ liftIO $ appendFile ".log" x
"*.par" %> \out -> do
trace "["
(if "unsafe" `isInfixOf` out then unsafeExtraThread else id) $ liftIO $ sleep 0.1
trace "]"
writeFile' out out
"sep" </> "1.txt" %> \out -> writeFile' out ""
"sep/2.txt" %> \out -> writeFile' out ""
["sep" </> "3.txt", "sep" </> "4.txt", "sep" </> "5.*", "sep/6.txt"] |%> \out -> writeFile' out ""
["sep" </> "7.txt"] |%> \out -> writeFile' out ""
"ids/source" %> \_ -> pure ()
"ids/out" %> \out -> do need =<< readFileLines "ids/source"; writeFile' out ""
"ids/*" %> \out -> do alwaysRerun; trace (takeFileName out); writeFile' out $ takeFileName out
"rerun" %> \out -> do alwaysRerun; liftIO $ appendFile out "."
phony "foo" $
liftIO $ createDirectoryRecursive "foo"
phony "ordering2" $
liftIO $ appendFile "order.log" "X"
phony "ordering" $ do
liftIO $ appendFile "order.log" "Y"
need ["ordering2"]
test build = do
build ["clean"]
writeFile "A.txt" "AAA"
writeFile "B.txt" "BBB"
build ["AB.txt","--sleep"]
assertContents "AB.txt" "AAABBB"
appendFile "A.txt" "aaa"
build ["AB.txt"]
assertContents "AB.txt" "AAAaaaBBB"
removeFile "AB.txt"
build ["AB.txt"]
assertContents "AB.txt" "AAAaaaBBB"
writeFile "zero.txt" "xxx"
build ["twice.txt","--sleep"]
assertContents "twice.txt" "xxx"
writeFile "zero.txt" "yyy"
build ["once.txt","--sleep"]
assertContents "twice.txt" "xxx"
assertContents "once.txt" "yyy"
writeFile "zero.txt" "zzz"
build ["once.txt","twice.txt","--sleep"]
assertContents "twice.txt" "zzz"
assertContents "once.txt" "zzz"
removeFile "twice.txt"
build ["twice.txt"]
assertContents "twice.txt" "zzz"
show shakeOptions === show shakeOptions
build ["options"]
createDirectoryRecursive "dir"
writeFile "dir/ae.txt" ""
writeFile "dir/ea.txt" ""
build ["halfclean"]
assertBoolIO (IO.doesDirectoryExist "dir") "Directory should exist, cleaner should not have removed it"
build ["cleaner"]
assertBoolIO (not <$> IO.doesDirectoryExist "dir") "Directory should not exist, cleaner should have removed it"
assertBoolIO (IO.doesFileExist ".shake.database") "Precondition not met"
build ["cleandb"]
assertBoolIO (not <$> IO.doesFileExist ".shake.database") "Postcondition not met"
writeFile "zero.txt" ""
writeFile "configure" ""
writeFile "install" ""
build ["configure"]
build ["install"]
build ["install"]
assertContents "configure" "111"
assertContents "install" "11"
build ["dummy"]
assertContents "dummy" "1"
build ["dummy"]
assertContents "dummy" "11"
build ["dummy","dummy"]
assertContents "dummy" "111"
writeFile "dummer.txt" ""
build ["dummer.txt"]
assertContents "dummer.txt" "1"
build ["dummer.txt"]
assertContents "dummer.txt" "11"
build ["1.par","2.par","-j1"]
assertContents ".log" "[][]"
writeFile ".log" ""
build ["3.par","4.par","-j2"]
assertContents ".log" "[[]]"
writeFile ".log" ""
processors <- getProcessorCount
putStrLn $ "getProcessorCount returned " ++ show processors
when (processors > 1) $ do
build ["5.par","6.par","-j0"]
assertContents ".log" "[[]]"
writeFile ".log" ""
build ["unsafe1.par","unsafe2.par","-j2"]
assertContents ".log" "[[]]"
build ["threads","-j3"]
assertContents "threads.txt" "3"
build ["threads","-j0"]
assertContents "threads.txt" (show processors)
writeFile "duplicate" ""
build ["duplicate1","duplicate3"]
assertContents "duplicate" "1"
build $ concat [["sep/" ++ show i ++ ".txt", "sep" </> show i ++ ".txt"] | i <- [1..7]]
build ["slash" </> "platform","slash" </> "forward"]
build ["slash/platform","slash/forward"]
createDirectoryRecursive "ids"
writeFile "ids/source" "ids/a"
build ["ids/out","--sleep"]
writeFile ".log" ""
writeFile "ids/source" "ids/b"
build ["ids/out","-j4"]
assertContents ".log" "b"
writeFile "rerun" ""
build ["rerun"]
assertContents "rerun" "."
build ["rerun","rerun"]
assertContents "rerun" ".."
build ["foo"]
build ["foo"]
# 523 , # 524 - phony children should not run first
writeFile "order.log" ""
build ["ordering"]
assertContents "order.log" "YX"
build ["ordering"]
assertContents "order.log" "YXYX"
|
9beecf3fe922db6e90c7b5f17fddfc5db03aebed584e82b3141145517e71fdf6 | xh4/web-toolkit | function.lisp | (in-package :utility)
(defun function-lambda-list (function)
(macrolet ((with-unknown-on-error (&body body)
`(handler-case (progn ,@body)
(error () :unknown))))
(let ((cont-function-p))
(let ((function (etypecase function
(cl-cont::funcallable/cc
(setf cont-function-p t)
(cl-cont::f/cc-function function))
((or list symbol function) function))))
(let ((lambda-list
#+:lispworks
(let ((list (lw:function-lambda-list function)))
(if (eq list :dont-know) :unknown list))
#+:sbcl
(sb-introspect:function-lambda-list function)
#+:ccl
(multiple-value-bind (list provided) (ccl:arglist function)
(if provided list :unknown))
#+:allegro
(with-unknown-on-error
(excl:arglist function))
#+:abcl
(multiple-value-bind (list provided) (sys::arglist function)
(if provided list :unknown))
#+:ecl
(multiple-value-bind (list provided) (ext:function-lambda-list function)
(if provided list :unknown))))
(if cont-function-p (rest lambda-list) lambda-list))))))
| null | https://raw.githubusercontent.com/xh4/web-toolkit/e510d44a25b36ca8acd66734ed1ee9f5fe6ecd09/utility/function.lisp | lisp | (in-package :utility)
(defun function-lambda-list (function)
(macrolet ((with-unknown-on-error (&body body)
`(handler-case (progn ,@body)
(error () :unknown))))
(let ((cont-function-p))
(let ((function (etypecase function
(cl-cont::funcallable/cc
(setf cont-function-p t)
(cl-cont::f/cc-function function))
((or list symbol function) function))))
(let ((lambda-list
#+:lispworks
(let ((list (lw:function-lambda-list function)))
(if (eq list :dont-know) :unknown list))
#+:sbcl
(sb-introspect:function-lambda-list function)
#+:ccl
(multiple-value-bind (list provided) (ccl:arglist function)
(if provided list :unknown))
#+:allegro
(with-unknown-on-error
(excl:arglist function))
#+:abcl
(multiple-value-bind (list provided) (sys::arglist function)
(if provided list :unknown))
#+:ecl
(multiple-value-bind (list provided) (ext:function-lambda-list function)
(if provided list :unknown))))
(if cont-function-p (rest lambda-list) lambda-list))))))
| |
c41dc6ba5a8f22f208d43ccf6106905598d0f6c34cd01e3352fcc44928b061d3 | mhwombat/creatur | BenchAll.hs | ------------------------------------------------------------------------
-- |
-- Module : Main
Copyright : ( c ) 2014 - 2022
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Runs the benchmarks.
--
------------------------------------------------------------------------
module Main where
import ALife.Creatur.Genetics.BRGCBoolBench (benchmark)
import ALife.Creatur.Genetics.BRGCWord8Bench (benchmark)
import Criterion.Main (Benchmark, defaultMain)
benches :: [Benchmark]
benches =
[
ALife.Creatur.Genetics.BRGCBoolBench.benchmark,
ALife.Creatur.Genetics.BRGCWord8Bench.benchmark
]
main :: IO ()
main = defaultMain benches
| null | https://raw.githubusercontent.com/mhwombat/creatur/c715f4de16d29b646b27f3185d1d073df0fc9cb6/unused/BenchAll.hs | haskell | ----------------------------------------------------------------------
|
Module : Main
License : BSD-style
Maintainer :
Stability : experimental
Portability : portable
Runs the benchmarks.
---------------------------------------------------------------------- | Copyright : ( c ) 2014 - 2022
module Main where
import ALife.Creatur.Genetics.BRGCBoolBench (benchmark)
import ALife.Creatur.Genetics.BRGCWord8Bench (benchmark)
import Criterion.Main (Benchmark, defaultMain)
benches :: [Benchmark]
benches =
[
ALife.Creatur.Genetics.BRGCBoolBench.benchmark,
ALife.Creatur.Genetics.BRGCWord8Bench.benchmark
]
main :: IO ()
main = defaultMain benches
|
dde58e3a08f19d5ec08837fcacae1ff547ab2b93f8d740348bcba36717705368 | nilenso/time-tracker | project.clj | (defproject time-tracker "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url "-tracker"
:scm {:name "git" :url "-tracker"}
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0-alpha13"]
[org.clojure/tools.cli "1.0.194"]
[org.clojure/data.csv "0.1.3"]
[com.taoensso/timbre "4.10.0"]
[aero "1.1.6"]
[org.clojure/java.jdbc "0.6.2-alpha3"]
[mount "0.1.16"]
[tolitius/mount-up "0.1.3"]
[http-kit "2.3.0"]
[bidi "2.0.11"]
[ring "1.5.0"]
[cheshire "5.6.3"]
[yesql "0.5.3"]
[ragtime "0.6.3"]
[org.postgresql/postgresql "42.1.4.jre7"]
[ring/ring-json "0.4.0"]
[com.mchange/c3p0 "0.9.5.2"]
[ring/ring-defaults "0.2.1"]
[org.clojure/algo.generic "0.1.2"]
[clj-time "0.12.0"]
[clj-pdf "2.2.29"]
[nilenso/mailgun "0.2.3"]]
:source-paths ["src/clj" "src/cljc"]
:test-paths ["test/clj" "test/cljc"]
:main ^:skip-aot time-tracker.core
:target-path "target/%s"
:plugins []
:profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"]
[org.clojure/core.async "1.3.610"]
[stylefruits/gniazdo "1.0.0"]]}
:test {:jvm-opts ["-Xms512m" "-Xmx2g"]}
:default [:base :system :user :provided :dev]
:uberjar {:aot [#"time-tracker.*"]}
:cljs {:source-paths ["src/cljs" "test/cljs"]
:dependencies [[re-frame "1.0.0"]
[day8.re-frame/http-fx "0.2.1"]
;; dev dependencies
shadow - cljs requires clojure 1.10.1
[thheller/shadow-cljs "2.10.21"]
[day8.re-frame/re-frame-10x "0.7.0"]
[binaryage/devtools "1.0.2"]]}}
:aliases {"test" ["test"]
"migrate" ["run" "-m" "time-tracker.migration/lein-migrate-db"]
"rollback" ["run" "-m" "time-tracker.migration/lein-rollback-db"]}
:monkeypatch-clojure-test false
:uberjar-exclusions [#"dev.*"])
| null | https://raw.githubusercontent.com/nilenso/time-tracker/054d0dc6d6b89a4ed234d8f0b0a260b6deeef9e3/project.clj | clojure | dev dependencies | (defproject time-tracker "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url "-tracker"
:scm {:name "git" :url "-tracker"}
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0-alpha13"]
[org.clojure/tools.cli "1.0.194"]
[org.clojure/data.csv "0.1.3"]
[com.taoensso/timbre "4.10.0"]
[aero "1.1.6"]
[org.clojure/java.jdbc "0.6.2-alpha3"]
[mount "0.1.16"]
[tolitius/mount-up "0.1.3"]
[http-kit "2.3.0"]
[bidi "2.0.11"]
[ring "1.5.0"]
[cheshire "5.6.3"]
[yesql "0.5.3"]
[ragtime "0.6.3"]
[org.postgresql/postgresql "42.1.4.jre7"]
[ring/ring-json "0.4.0"]
[com.mchange/c3p0 "0.9.5.2"]
[ring/ring-defaults "0.2.1"]
[org.clojure/algo.generic "0.1.2"]
[clj-time "0.12.0"]
[clj-pdf "2.2.29"]
[nilenso/mailgun "0.2.3"]]
:source-paths ["src/clj" "src/cljc"]
:test-paths ["test/clj" "test/cljc"]
:main ^:skip-aot time-tracker.core
:target-path "target/%s"
:plugins []
:profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"]
[org.clojure/core.async "1.3.610"]
[stylefruits/gniazdo "1.0.0"]]}
:test {:jvm-opts ["-Xms512m" "-Xmx2g"]}
:default [:base :system :user :provided :dev]
:uberjar {:aot [#"time-tracker.*"]}
:cljs {:source-paths ["src/cljs" "test/cljs"]
:dependencies [[re-frame "1.0.0"]
[day8.re-frame/http-fx "0.2.1"]
shadow - cljs requires clojure 1.10.1
[thheller/shadow-cljs "2.10.21"]
[day8.re-frame/re-frame-10x "0.7.0"]
[binaryage/devtools "1.0.2"]]}}
:aliases {"test" ["test"]
"migrate" ["run" "-m" "time-tracker.migration/lein-migrate-db"]
"rollback" ["run" "-m" "time-tracker.migration/lein-rollback-db"]}
:monkeypatch-clojure-test false
:uberjar-exclusions [#"dev.*"])
|
79268ff946a3e14d3327b00773410247395994831d9e19184a547441a71cd71f | rollacaster/sketches | sierpinski.cljs | (ns sketches.nature-of-code.fractals.sierpinski
(:require [quil.core :as q :include-macros true]
[sketches.vector :as v]))
(defn compute-triangles [v1 v2 v3]
(let [[x1 y1] (v/mult (v/add v1 v2) 0.5)
[x2 y2] (v/mult (v/add v1 v3) 0.5)
[x3 y3] (v/mult (v/add v3 v2) 0.5)]
[[v1 [x1 y1] [x2 y2]]
[[x1 y1] v2 [x3 y3]]
[[x2 y2] [x3 y3] v3]]))
(defn do-sierpinski [init-triangle step]
(if (= step 0)
init-triangle
(mapcat
#(do-sierpinski % (- step 1))
(apply compute-triangles init-triangle))))
(defn sierpinski [init-triangle step]
(partition 3 (do-sierpinski init-triangle step)))
(defn draw []
(q/background 200)
(q/fill 0)
(q/translate -50 -50)
(let [step (q/round (q/map-range (q/mouse-x) 0 (q/width) 0 5))
[x1 y1] [100 280]
[x2 y2] [200 120]
[x3 y3] [300 280]]
(doall
(map
(fn [[[x1 y1] [x2 y2] [x3 y3]]]
(q/triangle x1 y1 x2 y2 x3 y3))
(sierpinski [[x1 y1] [x2 y2] [x3 y3]] step)))))
(defn setup []
(q/frame-rate 5))
(defn run [host]
(q/defsketch sierpinski-sketch
:host host
:draw draw
:setup setup
:size [300 300]))
| null | https://raw.githubusercontent.com/rollacaster/sketches/ba79fccf2a37139de9193ed2ea7a6cc04b63fad0/src/sketches/nature_of_code/fractals/sierpinski.cljs | clojure | (ns sketches.nature-of-code.fractals.sierpinski
(:require [quil.core :as q :include-macros true]
[sketches.vector :as v]))
(defn compute-triangles [v1 v2 v3]
(let [[x1 y1] (v/mult (v/add v1 v2) 0.5)
[x2 y2] (v/mult (v/add v1 v3) 0.5)
[x3 y3] (v/mult (v/add v3 v2) 0.5)]
[[v1 [x1 y1] [x2 y2]]
[[x1 y1] v2 [x3 y3]]
[[x2 y2] [x3 y3] v3]]))
(defn do-sierpinski [init-triangle step]
(if (= step 0)
init-triangle
(mapcat
#(do-sierpinski % (- step 1))
(apply compute-triangles init-triangle))))
(defn sierpinski [init-triangle step]
(partition 3 (do-sierpinski init-triangle step)))
(defn draw []
(q/background 200)
(q/fill 0)
(q/translate -50 -50)
(let [step (q/round (q/map-range (q/mouse-x) 0 (q/width) 0 5))
[x1 y1] [100 280]
[x2 y2] [200 120]
[x3 y3] [300 280]]
(doall
(map
(fn [[[x1 y1] [x2 y2] [x3 y3]]]
(q/triangle x1 y1 x2 y2 x3 y3))
(sierpinski [[x1 y1] [x2 y2] [x3 y3]] step)))))
(defn setup []
(q/frame-rate 5))
(defn run [host]
(q/defsketch sierpinski-sketch
:host host
:draw draw
:setup setup
:size [300 300]))
| |
d127946c83fed085106706697423750e04ccd036de022192caed23daf63c4888 | johnlawrenceaspden/hobby-code | map-destructuring.clj | Maps
;; I can never ever remember how this works, so here is a note to self:
1
;; And by let-lambda isomorphism
1
Why on earth is the syntax the wrong way round ? Why ca n't { : a a } match { : a 1 } ?
;; Similarly
[ 1 2 ]
; [ 1 2 ]
;; And with the common pattern where the variables are like the keys:
[ 1 2 ]
[ 1 2 ]
;; We can destructure recursively (although we may not be wise to if we keep forgetting how it works!)
((fn [{a :a {c :c d :d} :b}] [a c d]) {:a 1 :b {:c 2 :d 3}}) ; [1 2 3]
(let [{a :a {c :c d :d} :b} {:a 1 :b {:c 2 :d 3}}] [a c d]) ; [1 2 3]
;; And we can remember the keys entire on which we have recursed, so:
(let [{a :a {c :c d :d :as b} :b}
{:a 1 :b {:c 2 :d 3}}]
- > [ 1 { : c 2 , : d 3 } 2 3 ]
;; Finally a 'real' example, a ring request map containing parameters and a session, both of
;; which have substructure
(def ring-request
{:params {:action "a" :key "k" :spurious "sp"}
:session {:data "d" :state "s"}
:irrelevant "irr"})
;; So the parameters we're interested in look like
{:params {:action :key} :session {:data :state}}
;; And we can extract all the pieces, naming each part, like so:
(defn process-request [{{action :action key :key :as params } :params
{data :data state :state :as session} :session :as request}]
(println action)
(println key)
(println data)
(println state)
(println params)
(println session)
(println request))
(process-request ring-request)
;; a
;; k
;; d
;; s
;; {:key k, :action a, :spurious sp}
;; {:state s, :data d}
;; {:irrelevant irr, :params {:key k, :action a, :spurious sp}, :session {:state s, :data d}}
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/map-destructuring.clj | clojure | I can never ever remember how this works, so here is a note to self:
And by let-lambda isomorphism
Similarly
[ 1 2 ]
And with the common pattern where the variables are like the keys:
We can destructure recursively (although we may not be wise to if we keep forgetting how it works!)
[1 2 3]
[1 2 3]
And we can remember the keys entire on which we have recursed, so:
Finally a 'real' example, a ring request map containing parameters and a session, both of
which have substructure
So the parameters we're interested in look like
And we can extract all the pieces, naming each part, like so:
a
k
d
s
{:key k, :action a, :spurious sp}
{:state s, :data d}
{:irrelevant irr, :params {:key k, :action a, :spurious sp}, :session {:state s, :data d}} | Maps
1
1
Why on earth is the syntax the wrong way round ? Why ca n't { : a a } match { : a 1 } ?
[ 1 2 ]
[ 1 2 ]
[ 1 2 ]
(let [{a :a {c :c d :d :as b} :b}
{:a 1 :b {:c 2 :d 3}}]
- > [ 1 { : c 2 , : d 3 } 2 3 ]
(def ring-request
{:params {:action "a" :key "k" :spurious "sp"}
:session {:data "d" :state "s"}
:irrelevant "irr"})
{:params {:action :key} :session {:data :state}}
(defn process-request [{{action :action key :key :as params } :params
{data :data state :state :as session} :session :as request}]
(println action)
(println key)
(println data)
(println state)
(println params)
(println session)
(println request))
(process-request ring-request)
|
cf5c89b1e0412aa91d6856fd195b74c019e0b42a7c60514394902143f6159166 | soranoba/bbmustache | bbmustache_tests.erl | 2015 All Rights Reserved .
-module(bbmustache_tests).
-include_lib("eunit/include/eunit.hrl").
%%----------------------------------------------------------------------------------------------------------------------
%% Unit Tests
%%----------------------------------------------------------------------------------------------------------------------
-define(PARSE_ERROR, incorrect_format).
-define(FILE_ERROR, file_not_found).
-define(NT_S(X, Y), ?_assertMatch({_, X, _, _, _, _}, bbmustache:parse_binary(Y))).
%% parse_binary_test generator (success case)
-define(NT_F(X, Y), ?_assertError(X, bbmustache:parse_binary(Y))).
%% parse_binary_test generator (failure case)
parse_file_test_() ->
[
{"file_not_exist (without extension)", ?_assertError(?FILE_ERROR, bbmustache:parse_file(<<"not_exist">>))},
{"file_not_exist (with extension)", ?_assertError(?FILE_ERROR, bbmustache:parse_file(<<"not_exist.mustache">>))}
].
custom_serializer_test_() ->
[
{"simple function replacement",
fun() ->
?assertEqual(<<"test, test">>,
bbmustache:render(<<"{{i}}, {{f}}">>,
[{"i", 1}, {"f", 1.5}, {"b", <<"hoge">>}, {"s", "fugo"}, {"a", atom}], [{value_serializer, fun(_X) -> <<"test">> end}]))
end},
{"argument modifier",
fun() ->
?assertEqual(<<"<A&B> , <A&B>">>,
bbmustache:render(<<"{{s}} , {{{s}}}">>,
[{"s", "A&B"}], [{value_serializer, fun(X) -> "<" ++ X ++ ">" end}]))
end}
].
parse_binary_test_() ->
[
{"bbmustache:template/0 format check", ?NT_S([], <<>>)},
{"{{tag}}", ?NT_S([<<"a">>, {n, [<<"t">>]}, <<"b">>], <<"a{{t}}b">>)},
{"{{ tag }}", ?NT_S([{n, [<<"t">>]}], <<"{{ t }}">>)},
{"{{ ta g }}", ?NT_S([{n, [<<"tag">>]}], <<"{{ ta g }}">>)},
{"{{}}", ?NT_S([{n, [<<>>]}], <<"{{}}">>)},
{"{{ }}", ?NT_S([{n, [<<>>]}], <<"{{ }}">>)},
{"{{tag", ?NT_F({?PARSE_ERROR, unclosed_tag}, <<"{{tag">>)},
{"{{{tag}}}", ?NT_S([<<"a">>, {'&', [<<"t">>]}, <<"b">>], <<"a{{{t}}}b">>)},
{"{{{ tag }}}", ?NT_S([{'&', [<<"t">>]}], <<"{{{ t }}}">>)},
{"{{{ ta g }}}",?NT_S([{'&', [<<"tag">>]}], <<"{{{ ta g }}}">>)},
{"{{{tag", ?NT_F({?PARSE_ERROR, unclosed_tag}, <<"{{{tag">>)},
{"{{{tag}} other}",
?NT_S([<<"{">>, {n, [<<"tag">>]}, <<" other}">>], <<"{{{tag}} other}">>)},
{"{{& tag}}", ?NT_S([<<"a">>, {'&', [<<"t">>]}, <<"b">>], <<"a{{& t}}b">>)},
{"{{ & tag }}", ?NT_S([{'&', [<<"t">>]}], <<"{{ & t }}">>)},
{"{{ & ta g }}",?NT_S([{'&', [<<"tag">>]}], <<"{{ & ta g }}">>)},
{"{{&ta g }}", ?NT_S([{'&', [<<"tag">>]}], <<"{{&ta g}}">>)},
{"{{&tag}}", ?NT_S([{'&', [<<"t">>]}], <<"{{&t}}">>)},
{"{{/tag}}", ?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"tag">>}}, <<"{{/tag}}">>)},
{"{{#tag}}", ?NT_F({?PARSE_ERROR, {section_end_tag_not_found, <<"/tag">>}}, <<"{{#tag}}">>)},
{"{{#tag1}}{{#tag2}}{{name}}{{/tag1}}{{/tag2}}",
?NT_S([<<"a">>, {'#', [<<"t1">>], [<<"b">>,
{'#', [<<"t2">>], [<<"c">>, {n, [<<"t3">>]}, <<"d">>], <<"c{{t3}}d">>},
<<"e">>], <<"b{{#t2}}c{{t3}}d{{/t2}}e">>}, <<"f">>],
<<"a{{#t1}}b{{#t2}}c{{t3}}d{{/t2}}e{{/t1}}f">>)},
{"{{#tag1}}{{#tag2}}{{/tag1}}{{/tag2}}",
?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"t1">>}}, <<"{{#t1}}{{#t2}}{{/t1}}{{/t2}}">>)},
{"{{# tag}}{{/ tag}}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{# tag}}{{/ tag}}">>)},
{"{{ #tag }}{{ / tag }}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ #tag }}{{ / tag }}">>)},
{"{{ # tag }}{{ /tag }}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ # tag }}{{ /tag }}">>)},
{"{{ # ta g}}{{ / ta g}}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ # ta g}}{{ / ta g}}">>)},
{"{{!comment}}", ?NT_S([<<"a">>, <<"c">>], <<"a{{!comment}}c">>)},
{"{{! comment }}", ?NT_S([], <<"{{! comment }}">>)},
{"{{! co mmen t }}", ?NT_S([], <<"{{! co mmen t }}">>)},
{"{{ !comment }}", ?NT_S([], <<"{{ !comment }}">>)},
{" {{ !comment }} \r\n", ?NT_S([], <<" {{ !comment }} \r\n">>)},
{"{{^tag}}", ?NT_F({?PARSE_ERROR, {section_end_tag_not_found, <<"/tag">>}}, <<"a{{^tag}}b">>)},
{"{{^tag1}}{{^tag2}}{{name}}{{/tag2}}{{/tag1}}",
?NT_S([<<"a">>, {'^', [<<"t1">>], [<<"b">>, {'^', [<<"t2">>], [<<"c">>, {n, [<<"t3">>]}, <<"d">>]}, <<"e">>]}, <<"f">>],
<<"a{{^t1}}b{{^t2}}c{{t3}}d{{/t2}}e{{/t1}}f">>)},
{"{{^tag1}}{{^tag2}}{{/tag1}}{{tag2}}",
?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"t1">>}}, <<"{{^t1}}{{^t2}}{{/t1}}{{/t2}}">>)},
{"{{^ tag}}{{/ tag}}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{^ tag}}{{/ tag}}">>)},
{"{{ ^tag }}{{ / tag }}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^tag }}{{ / tag }}">>)},
{"{{ ^ tag }}{{ /tag }}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^ tag }}{{ /tag }}">>)},
{"{{ ^ ta g}}{{ / t ag}}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^ ta g}}{{ / t ag}}">>)},
{"{{=<< >>=}}{{n}}<<n>><<={{ }}=>>{{n}}<<n>>",
?NT_S([<<"a">>, <<"b{{n}}c">>, {n, [<<"n">>]}, <<"d">>, <<"e">>, {n, [<<"m">>]}, <<"f<<m>>g">>],
<<"a{{=<< >>=}}b{{n}}c<<n>>d<<={{ }}=>>e{{m}}f<<m>>g">>)},
{"{{=<< >>=}}<<#tag>><<{n}>><</tag>>",
?NT_S([{'#', [<<"tag">>], [{'&', [<<"n">>]}], <<"<<{n}>>">>}],
<<"{{=<< >>=}}<<#tag>><<{n}>><</tag>>">>)},
{"{{=<< >>=}}<<n>>", ?NT_S([{n, [<<"n">>]}], <<"{{=<< >>=}}<<n>>">>)},
{"{{ = << >> = }}<<n>>", ?NT_S([{n, [<<"n">>]}], <<"{{ = << >> = }}<<n>>">>)},
{"{{=<= =>=}}<=n=>", ?NT_F({?PARSE_ERROR, delimiters_may_not_contain_equals}, <<"{{=<= =>=}}<=n=>">>)},
{"{{ = < < >> = }}< <n>>", ?NT_F({?PARSE_ERROR, delimiters_may_not_contain_whitespaces}, <<"{{ = < < >> = }}< <n>>">>)},
{"{{=<< >>}}", ?NT_F({?PARSE_ERROR, {unsupported_tag, <<"=<< >>">>}}, <<"{{=<< >>}}">>)},
{"{{={ }=}}{{n}}", ?NT_S([{'&', [<<"n">>]}], <<"{{={ }=}}{{n}}">>)},
{"{{#tag}}text\n{{/tag}}\n",
?NT_S([{'#',[<<"tag">>],[<<"text\n">>],<<"text\n">>}], <<"{{#tag}}text\n{{/tag}}\n">>)}
].
assoc_list_render_test_() ->
[
{"integer, float, binary, string",
fun() ->
?assertEqual(<<"1, 1.5, hoge, fugo, atom">>,
bbmustache:render(<<"{{i}}, {{f}}, {{b}}, {{s}}, {{a}}">>,
[{"i", 1}, {"f", 1.5}, {"b", <<"hoge">>}, {"s", "fugo"}, {"a", atom}]))
end}
].
top_level_context_render_test_() ->
[
{"top-level binary",
?_assertEqual(<<"hello world">>, bbmustache:render(<<"hello {{.}}">>, <<"world">>))},
{"top-level string",
?_assertEqual(<<"hello world">>, bbmustache:render(<<"hello {{.}}">>, "world"))},
{"top-level integer",
?_assertEqual(<<"1">>, bbmustache:render(<<"{{.}}">>, 1))},
{"top-level float",
?_assertEqual(<<"1.5">>, bbmustache:render(<<"{{.}}">>, 1.5))},
{"top-level atom",
?_assertEqual(<<"atom">>, bbmustache:render(<<"{{.}}">>, atom))},
{"top-level array",
?_assertEqual(<<"1, 2, 3, ">>, bbmustache:render(<<"{{#.}}{{.}}, {{/.}}">>, [1, 2, 3]))},
{"top-level map",
?_assertEqual(<<"yes">>, bbmustache:render(<<"{{.}}">>, #{"a" => "1"}, [{value_serializer, fun(#{"a" := "1"}) -> <<"yes">> end}]))}
].
atom_and_binary_key_test_() ->
[
{"atom key",
fun() ->
F = fun(Text, Render) -> ["<b>", Render(Text), "</b>"] end,
?assertEqual(<<"<b>Willy is awesome.</b>">>,
bbmustache:render(<<"{{#wrapped}}{{name}} is awesome.{{dummy_atom}}{{/wrapped}}">>,
[{name, "Willy"}, {wrapped, F}], [{key_type, atom}])),
?assertError(_, binary_to_existing_atom(<<"dummy_atom">>, utf8))
end},
{"binary key",
fun() ->
F = fun(Text, Render) -> ["<b>", Render(Text), "</b>"] end,
?assertEqual(<<"<b>Willy is awesome.</b>">>,
bbmustache:render(<<"{{#wrapped}}{{name}} is awesome.{{dummy}}{{/wrapped}}">>,
[{<<"name">>, "Willy"}, {<<"wrapped">>, F}], [{key_type, binary}]))
end}
].
raise_on_context_miss_test_() ->
[
{"It raise an exception, if the key of escape tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{ child }}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of unescape tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{{child}}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of & tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{&child}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the child does not exist (parent is a # tag)",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{#parent}}{{child}}{{/parent}}">>,
[{"parent", true}],
[raise_on_context_miss]))},
{"It raise an exception, if the child does not exist (parent is a ^ tag)",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{^parent}}{{child}}{{/parent}}">>,
[{"parent", false}],
[raise_on_context_miss]))},
{"It raise an exception, if the key of # tag does not exist",
?_assertError({context_missing, {key, <<"parent">>}},
bbmustache:render(<<"{{#parent}}{{/parent}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of ^ tag does not exist",
?_assertError({context_missing, {key, <<"parent">>}},
bbmustache:render(<<"{{^parent}}{{/parent}}">>, [], [raise_on_context_miss]))},
{"It does not raise an exception, if the child of the hidden parent does not exist (parent is a ^ tag)",
?_assertEqual(<<"">>, bbmustache:render(<<"{{^parent}}{{child}}{{/parent}}">>,
[{"parent", true}],
[raise_on_context_miss]))},
{"It does not raise an exception, if the child of the hidden parent does not exist (parent is a # tag)",
?_assertEqual(<<"">>, bbmustache:render(<<"{{#parent}}{{child}}{{/parent}}">>,
[{"parent", false}],
[raise_on_context_miss]))},
{"It raise an exception, if specified file does not exist",
?_assertError({context_missing, {file_not_found, <<"not_found_filename">>}},
bbmustache:render(<<"{{> not_found_filename}}">>, [], [raise_on_context_miss]))},
{"The exceptions thrown include information on the specified key",
?_assertError({context_missing, {key, <<"parent.child">>}},
bbmustache:render(<<"{{#parent}}{{ parent . child }}{{/parent}}">>,
[{"parent", [{"dummy", true}]}, {"child", []}],
[raise_on_context_miss]))}
].
falsy_value_test_() ->
[
{"It prints false when value is false",
?_assertEqual(<<"false">>, bbmustache:render(<<"{{a}}">>, [{"a", false}]))},
{"It prints an empty string when value is null",
?_assertEqual(<<"">>, bbmustache:render(<<"{{a}}">>, [{"a", null}]))},
{"It prints an empty string when value is nil",
?_assertEqual(<<"">>, bbmustache:render(<<"{{a}}">>, [{"a", nil}]))}
].
context_stack_test_() ->
[
{"It can use the key which parent is not a dictionary (resolve #22)",
?_assertEqual(<<"aaabbb">>,
bbmustache:render(<<"{{#parent}}aaa{{parent.child}}bbb{{/parent}}">>,
[{"parent", true}]))},
{"It hide all tags in # tag that is specified empty list",
?_assertEqual(<<"">>,
bbmustache:render(<<"{{#parent}}aaa{{parent.child}}bbb{{/parent}}">>,
[{"parent", []}],
[raise_on_context_miss]))}
].
shows_or_hides_content_test_() ->
[
{"It hides content in # tag that is specified as empty list, empty binary, nil or false",
fun() ->
lists:foreach(fun(X) ->
?assertEqual(<<"">>, bbmustache:render(<<"{{#content}}hello world{{/content}}">>, [{"content", X}]))
end, ["", <<"">>, nil, false])
end},
{"It show content in ^ tag that is specified as empty list, empty binary, nil or false",
fun() ->
lists:foreach(fun(X) ->
?assertEqual(<<"hello world">>, bbmustache:render(<<"{{^content}}hello world{{/content}}">>, [{"content", X}]))
end, ["", <<"">>, nil, false])
end}
].
escape_fun_test_() ->
[
{"It is able to specified own escape function",
?_assertEqual(<<"==>value<==">>,
bbmustache:render(<<"{{tag}}">>,
[{"tag", "value"}],
[{escape_fun, fun(X) -> <<"==>", X/binary, "<==">> end}]))}
].
| null | https://raw.githubusercontent.com/soranoba/bbmustache/dba8c7a50c9e6cba08c89319a7cf53221be4b0f0/test/bbmustache_tests.erl | erlang | ----------------------------------------------------------------------------------------------------------------------
Unit Tests
----------------------------------------------------------------------------------------------------------------------
parse_binary_test generator (success case)
parse_binary_test generator (failure case) | 2015 All Rights Reserved .
-module(bbmustache_tests).
-include_lib("eunit/include/eunit.hrl").
-define(PARSE_ERROR, incorrect_format).
-define(FILE_ERROR, file_not_found).
-define(NT_S(X, Y), ?_assertMatch({_, X, _, _, _, _}, bbmustache:parse_binary(Y))).
-define(NT_F(X, Y), ?_assertError(X, bbmustache:parse_binary(Y))).
parse_file_test_() ->
[
{"file_not_exist (without extension)", ?_assertError(?FILE_ERROR, bbmustache:parse_file(<<"not_exist">>))},
{"file_not_exist (with extension)", ?_assertError(?FILE_ERROR, bbmustache:parse_file(<<"not_exist.mustache">>))}
].
custom_serializer_test_() ->
[
{"simple function replacement",
fun() ->
?assertEqual(<<"test, test">>,
bbmustache:render(<<"{{i}}, {{f}}">>,
[{"i", 1}, {"f", 1.5}, {"b", <<"hoge">>}, {"s", "fugo"}, {"a", atom}], [{value_serializer, fun(_X) -> <<"test">> end}]))
end},
{"argument modifier",
fun() ->
?assertEqual(<<"<A&B> , <A&B>">>,
bbmustache:render(<<"{{s}} , {{{s}}}">>,
[{"s", "A&B"}], [{value_serializer, fun(X) -> "<" ++ X ++ ">" end}]))
end}
].
parse_binary_test_() ->
[
{"bbmustache:template/0 format check", ?NT_S([], <<>>)},
{"{{tag}}", ?NT_S([<<"a">>, {n, [<<"t">>]}, <<"b">>], <<"a{{t}}b">>)},
{"{{ tag }}", ?NT_S([{n, [<<"t">>]}], <<"{{ t }}">>)},
{"{{ ta g }}", ?NT_S([{n, [<<"tag">>]}], <<"{{ ta g }}">>)},
{"{{}}", ?NT_S([{n, [<<>>]}], <<"{{}}">>)},
{"{{ }}", ?NT_S([{n, [<<>>]}], <<"{{ }}">>)},
{"{{tag", ?NT_F({?PARSE_ERROR, unclosed_tag}, <<"{{tag">>)},
{"{{{tag}}}", ?NT_S([<<"a">>, {'&', [<<"t">>]}, <<"b">>], <<"a{{{t}}}b">>)},
{"{{{ tag }}}", ?NT_S([{'&', [<<"t">>]}], <<"{{{ t }}}">>)},
{"{{{ ta g }}}",?NT_S([{'&', [<<"tag">>]}], <<"{{{ ta g }}}">>)},
{"{{{tag", ?NT_F({?PARSE_ERROR, unclosed_tag}, <<"{{{tag">>)},
{"{{{tag}} other}",
?NT_S([<<"{">>, {n, [<<"tag">>]}, <<" other}">>], <<"{{{tag}} other}">>)},
{"{{& tag}}", ?NT_S([<<"a">>, {'&', [<<"t">>]}, <<"b">>], <<"a{{& t}}b">>)},
{"{{ & tag }}", ?NT_S([{'&', [<<"t">>]}], <<"{{ & t }}">>)},
{"{{ & ta g }}",?NT_S([{'&', [<<"tag">>]}], <<"{{ & ta g }}">>)},
{"{{&ta g }}", ?NT_S([{'&', [<<"tag">>]}], <<"{{&ta g}}">>)},
{"{{&tag}}", ?NT_S([{'&', [<<"t">>]}], <<"{{&t}}">>)},
{"{{/tag}}", ?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"tag">>}}, <<"{{/tag}}">>)},
{"{{#tag}}", ?NT_F({?PARSE_ERROR, {section_end_tag_not_found, <<"/tag">>}}, <<"{{#tag}}">>)},
{"{{#tag1}}{{#tag2}}{{name}}{{/tag1}}{{/tag2}}",
?NT_S([<<"a">>, {'#', [<<"t1">>], [<<"b">>,
{'#', [<<"t2">>], [<<"c">>, {n, [<<"t3">>]}, <<"d">>], <<"c{{t3}}d">>},
<<"e">>], <<"b{{#t2}}c{{t3}}d{{/t2}}e">>}, <<"f">>],
<<"a{{#t1}}b{{#t2}}c{{t3}}d{{/t2}}e{{/t1}}f">>)},
{"{{#tag1}}{{#tag2}}{{/tag1}}{{/tag2}}",
?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"t1">>}}, <<"{{#t1}}{{#t2}}{{/t1}}{{/t2}}">>)},
{"{{# tag}}{{/ tag}}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{# tag}}{{/ tag}}">>)},
{"{{ #tag }}{{ / tag }}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ #tag }}{{ / tag }}">>)},
{"{{ # tag }}{{ /tag }}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ # tag }}{{ /tag }}">>)},
{"{{ # ta g}}{{ / ta g}}", ?NT_S([{'#', [<<"tag">>], [], <<>>}], <<"{{ # ta g}}{{ / ta g}}">>)},
{"{{!comment}}", ?NT_S([<<"a">>, <<"c">>], <<"a{{!comment}}c">>)},
{"{{! comment }}", ?NT_S([], <<"{{! comment }}">>)},
{"{{! co mmen t }}", ?NT_S([], <<"{{! co mmen t }}">>)},
{"{{ !comment }}", ?NT_S([], <<"{{ !comment }}">>)},
{" {{ !comment }} \r\n", ?NT_S([], <<" {{ !comment }} \r\n">>)},
{"{{^tag}}", ?NT_F({?PARSE_ERROR, {section_end_tag_not_found, <<"/tag">>}}, <<"a{{^tag}}b">>)},
{"{{^tag1}}{{^tag2}}{{name}}{{/tag2}}{{/tag1}}",
?NT_S([<<"a">>, {'^', [<<"t1">>], [<<"b">>, {'^', [<<"t2">>], [<<"c">>, {n, [<<"t3">>]}, <<"d">>]}, <<"e">>]}, <<"f">>],
<<"a{{^t1}}b{{^t2}}c{{t3}}d{{/t2}}e{{/t1}}f">>)},
{"{{^tag1}}{{^tag2}}{{/tag1}}{{tag2}}",
?NT_F({?PARSE_ERROR, {section_is_incorrect, <<"t1">>}}, <<"{{^t1}}{{^t2}}{{/t1}}{{/t2}}">>)},
{"{{^ tag}}{{/ tag}}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{^ tag}}{{/ tag}}">>)},
{"{{ ^tag }}{{ / tag }}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^tag }}{{ / tag }}">>)},
{"{{ ^ tag }}{{ /tag }}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^ tag }}{{ /tag }}">>)},
{"{{ ^ ta g}}{{ / t ag}}", ?NT_S([{'^', [<<"tag">>], []}], <<"{{ ^ ta g}}{{ / t ag}}">>)},
{"{{=<< >>=}}{{n}}<<n>><<={{ }}=>>{{n}}<<n>>",
?NT_S([<<"a">>, <<"b{{n}}c">>, {n, [<<"n">>]}, <<"d">>, <<"e">>, {n, [<<"m">>]}, <<"f<<m>>g">>],
<<"a{{=<< >>=}}b{{n}}c<<n>>d<<={{ }}=>>e{{m}}f<<m>>g">>)},
{"{{=<< >>=}}<<#tag>><<{n}>><</tag>>",
?NT_S([{'#', [<<"tag">>], [{'&', [<<"n">>]}], <<"<<{n}>>">>}],
<<"{{=<< >>=}}<<#tag>><<{n}>><</tag>>">>)},
{"{{=<< >>=}}<<n>>", ?NT_S([{n, [<<"n">>]}], <<"{{=<< >>=}}<<n>>">>)},
{"{{ = << >> = }}<<n>>", ?NT_S([{n, [<<"n">>]}], <<"{{ = << >> = }}<<n>>">>)},
{"{{=<= =>=}}<=n=>", ?NT_F({?PARSE_ERROR, delimiters_may_not_contain_equals}, <<"{{=<= =>=}}<=n=>">>)},
{"{{ = < < >> = }}< <n>>", ?NT_F({?PARSE_ERROR, delimiters_may_not_contain_whitespaces}, <<"{{ = < < >> = }}< <n>>">>)},
{"{{=<< >>}}", ?NT_F({?PARSE_ERROR, {unsupported_tag, <<"=<< >>">>}}, <<"{{=<< >>}}">>)},
{"{{={ }=}}{{n}}", ?NT_S([{'&', [<<"n">>]}], <<"{{={ }=}}{{n}}">>)},
{"{{#tag}}text\n{{/tag}}\n",
?NT_S([{'#',[<<"tag">>],[<<"text\n">>],<<"text\n">>}], <<"{{#tag}}text\n{{/tag}}\n">>)}
].
assoc_list_render_test_() ->
[
{"integer, float, binary, string",
fun() ->
?assertEqual(<<"1, 1.5, hoge, fugo, atom">>,
bbmustache:render(<<"{{i}}, {{f}}, {{b}}, {{s}}, {{a}}">>,
[{"i", 1}, {"f", 1.5}, {"b", <<"hoge">>}, {"s", "fugo"}, {"a", atom}]))
end}
].
top_level_context_render_test_() ->
[
{"top-level binary",
?_assertEqual(<<"hello world">>, bbmustache:render(<<"hello {{.}}">>, <<"world">>))},
{"top-level string",
?_assertEqual(<<"hello world">>, bbmustache:render(<<"hello {{.}}">>, "world"))},
{"top-level integer",
?_assertEqual(<<"1">>, bbmustache:render(<<"{{.}}">>, 1))},
{"top-level float",
?_assertEqual(<<"1.5">>, bbmustache:render(<<"{{.}}">>, 1.5))},
{"top-level atom",
?_assertEqual(<<"atom">>, bbmustache:render(<<"{{.}}">>, atom))},
{"top-level array",
?_assertEqual(<<"1, 2, 3, ">>, bbmustache:render(<<"{{#.}}{{.}}, {{/.}}">>, [1, 2, 3]))},
{"top-level map",
?_assertEqual(<<"yes">>, bbmustache:render(<<"{{.}}">>, #{"a" => "1"}, [{value_serializer, fun(#{"a" := "1"}) -> <<"yes">> end}]))}
].
atom_and_binary_key_test_() ->
[
{"atom key",
fun() ->
F = fun(Text, Render) -> ["<b>", Render(Text), "</b>"] end,
?assertEqual(<<"<b>Willy is awesome.</b>">>,
bbmustache:render(<<"{{#wrapped}}{{name}} is awesome.{{dummy_atom}}{{/wrapped}}">>,
[{name, "Willy"}, {wrapped, F}], [{key_type, atom}])),
?assertError(_, binary_to_existing_atom(<<"dummy_atom">>, utf8))
end},
{"binary key",
fun() ->
F = fun(Text, Render) -> ["<b>", Render(Text), "</b>"] end,
?assertEqual(<<"<b>Willy is awesome.</b>">>,
bbmustache:render(<<"{{#wrapped}}{{name}} is awesome.{{dummy}}{{/wrapped}}">>,
[{<<"name">>, "Willy"}, {<<"wrapped">>, F}], [{key_type, binary}]))
end}
].
raise_on_context_miss_test_() ->
[
{"It raise an exception, if the key of escape tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{ child }}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of unescape tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{{child}}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of & tag does not exist",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{&child}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the child does not exist (parent is a # tag)",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{#parent}}{{child}}{{/parent}}">>,
[{"parent", true}],
[raise_on_context_miss]))},
{"It raise an exception, if the child does not exist (parent is a ^ tag)",
?_assertError({context_missing, {key, <<"child">>}},
bbmustache:render(<<"{{^parent}}{{child}}{{/parent}}">>,
[{"parent", false}],
[raise_on_context_miss]))},
{"It raise an exception, if the key of # tag does not exist",
?_assertError({context_missing, {key, <<"parent">>}},
bbmustache:render(<<"{{#parent}}{{/parent}}">>, [], [raise_on_context_miss]))},
{"It raise an exception, if the key of ^ tag does not exist",
?_assertError({context_missing, {key, <<"parent">>}},
bbmustache:render(<<"{{^parent}}{{/parent}}">>, [], [raise_on_context_miss]))},
{"It does not raise an exception, if the child of the hidden parent does not exist (parent is a ^ tag)",
?_assertEqual(<<"">>, bbmustache:render(<<"{{^parent}}{{child}}{{/parent}}">>,
[{"parent", true}],
[raise_on_context_miss]))},
{"It does not raise an exception, if the child of the hidden parent does not exist (parent is a # tag)",
?_assertEqual(<<"">>, bbmustache:render(<<"{{#parent}}{{child}}{{/parent}}">>,
[{"parent", false}],
[raise_on_context_miss]))},
{"It raise an exception, if specified file does not exist",
?_assertError({context_missing, {file_not_found, <<"not_found_filename">>}},
bbmustache:render(<<"{{> not_found_filename}}">>, [], [raise_on_context_miss]))},
{"The exceptions thrown include information on the specified key",
?_assertError({context_missing, {key, <<"parent.child">>}},
bbmustache:render(<<"{{#parent}}{{ parent . child }}{{/parent}}">>,
[{"parent", [{"dummy", true}]}, {"child", []}],
[raise_on_context_miss]))}
].
falsy_value_test_() ->
[
{"It prints false when value is false",
?_assertEqual(<<"false">>, bbmustache:render(<<"{{a}}">>, [{"a", false}]))},
{"It prints an empty string when value is null",
?_assertEqual(<<"">>, bbmustache:render(<<"{{a}}">>, [{"a", null}]))},
{"It prints an empty string when value is nil",
?_assertEqual(<<"">>, bbmustache:render(<<"{{a}}">>, [{"a", nil}]))}
].
context_stack_test_() ->
[
{"It can use the key which parent is not a dictionary (resolve #22)",
?_assertEqual(<<"aaabbb">>,
bbmustache:render(<<"{{#parent}}aaa{{parent.child}}bbb{{/parent}}">>,
[{"parent", true}]))},
{"It hide all tags in # tag that is specified empty list",
?_assertEqual(<<"">>,
bbmustache:render(<<"{{#parent}}aaa{{parent.child}}bbb{{/parent}}">>,
[{"parent", []}],
[raise_on_context_miss]))}
].
shows_or_hides_content_test_() ->
[
{"It hides content in # tag that is specified as empty list, empty binary, nil or false",
fun() ->
lists:foreach(fun(X) ->
?assertEqual(<<"">>, bbmustache:render(<<"{{#content}}hello world{{/content}}">>, [{"content", X}]))
end, ["", <<"">>, nil, false])
end},
{"It show content in ^ tag that is specified as empty list, empty binary, nil or false",
fun() ->
lists:foreach(fun(X) ->
?assertEqual(<<"hello world">>, bbmustache:render(<<"{{^content}}hello world{{/content}}">>, [{"content", X}]))
end, ["", <<"">>, nil, false])
end}
].
escape_fun_test_() ->
[
{"It is able to specified own escape function",
?_assertEqual(<<"==>value<==">>,
bbmustache:render(<<"{{tag}}">>,
[{"tag", "value"}],
[{escape_fun, fun(X) -> <<"==>", X/binary, "<==">> end}]))}
].
|
13235c36d610062f509bca8014ab3f7426c83a352396bf410aa60fa11bc0a88d | openbadgefactory/salava | edit_profile.cljs | (ns salava.user.ui.edit-profile
(:require [reagent.core :refer [atom cursor]]
[reagent.session :as session]
[reagent-modals.modals :as m]
[salava.core.ui.ajax-utils :as ajax]
[salava.core.ui.layout :as layout]
[salava.core.ui.field :as f]
[salava.core.i18n :refer [t]]
[salava.core.ui.helper :refer [js-navigate-to path-for private? plugin-fun]]
[salava.file.ui.my :as file]
[salava.user.schemas :refer [contact-fields]]
[salava.user.ui.helper :refer [profile-picture]]))
(defn save-profile [state]
(let [{:keys [profile_visibility about profile_picture]} (:user @state)
profile-fields (->> (:profile @state)
(filter #(not-empty (:field %)))
(map #(select-keys % [:field :value])))]
(ajax/POST
(path-for "/obpv1/user/profile")
{:params {:profile_visibility profile_visibility
:about about
:profile_picture profile_picture
:fields profile-fields}
:handler (fn [] (js-navigate-to (str "/user/profile/" (:user_id @state))))})))
(defn send-file [files-atom profile-picture-atom]
(let [file (-> (.querySelector js/document "#profile-picture-upload")
.-files
(.item 0))
form-data (doto
(js/FormData.)
(.append "file" file (.-name file)))]
(m/modal! (file/upload-modal nil (t :file/Uploadingfile) (t :file/Uploadinprogress)))
(ajax/POST
(path-for "/obpv1/file/upload_image")
{:body form-data
:handler (fn [{:keys [status message reason data]} response]
(when (= status "success")
(reset! files-atom (conj @files-atom data))
(reset! profile-picture-atom (:path data)))
(m/modal! (file/upload-modal status message reason)))})))
(defn gallery-element [picture-data profile-picture-atom pictures-atom]
(let [{:keys [path id]} picture-data
current-profile-picture (session/get-in [:user :profile_picture])]
[:div {:key path
:class (str "profile-picture-gallery-element " (if (= @profile-picture-atom path) "element-selected"))
:on-click #(reset! profile-picture-atom path)}
[:img {:src (profile-picture path)}]
(if (and (not (nil? id)) (not (= path current-profile-picture)))
[:a {:class "delete-icon"
:title (t :file/Delete)
:on-click (fn []
(m/modal! [file/delete-file-modal id pictures-atom]
{:size :lg}))}
[:i {:class "fa fa-trash"}]])]))
(defn profile-picture-gallery [pictures-atom profile-picture-atom]
[:div {:id "profile-picture-gallery" :class "row"}
[:label.col-xs-12 (t :user/Selectprofilepicture)]
[:div.col-xs-12
[gallery-element {:path nil} profile-picture-atom]
(into [:div]
(for [picture-elem (map first (vals (group-by :path @pictures-atom)))]
[gallery-element picture-elem profile-picture-atom pictures-atom]))]
[:div.col-xs-12 {:id "profile-picture-upload-button"}
[:button {:class "btn btn-primary"
:on-click #(.preventDefault %)}
(t :file/Upload)]
[:input {:id "profile-picture-upload"
:type "file"
:name "file"
:on-change #(send-file pictures-atom profile-picture-atom)
:accept "image/*"}]]])
(def empty-field {:field "" :value ""})
(defn select-field-type [profile-field-atom]
[:select {:class "form-control"
:on-change #(swap! profile-field-atom assoc :field (.-target.value %))
:value (:field @profile-field-atom)}
[:option {:value ""} (str "- " (t :core/None) " -")]
(doall
(for [{:keys [type key]} contact-fields]
[:option {:value type :key type} (t key)]))])
(defn profile-field [index profile-fields-atom]
(let [profile-field-atom (cursor profile-fields-atom [index])
last? (= index (dec (count @profile-fields-atom)))
first? (= index 0)]
[:div {:key index}
[:div.add-field-after
[:button {:class "btn btn-success"
:on-click #(do
(.preventDefault %)
(f/add-field profile-fields-atom empty-field index))}
(t :user/Addfield)]]
[:div.field
[:div.field-move
[:div.move-arrows
(if-not first?
[:div.move-up {:on-click #(f/move-field :up profile-fields-atom index)}
[:i {:class "fa fa-chevron-up"}]])
(if-not last?
[:div.move-down {:on-click #(f/move-field :down profile-fields-atom index)}
[:i {:class "fa fa-chevron-down"}]])]]
[:div.field-content
[:div.form-group
[:div.col-xs-8
(select-field-type profile-field-atom)]
[:div {:class "col-xs-4 field-remove"
:on-click #(f/remove-field profile-fields-atom index)}
[:span {:class "remove-button"}
[:i {:class "fa fa-close"}]]]]
[:div.form-group
[:div.col-xs-12
[:input {:type "text"
:class "form-control"
:value (:value @profile-field-atom)
:on-change #(swap! profile-field-atom assoc :value (.-target.value %))}]]]]]]))
(defn profile-fields [profile-fields-atom]
[:div {:id "field-editor"}
(into [:div]
(for [index (range (count @profile-fields-atom))]
(profile-field index profile-fields-atom)))
[:div.add-field-after
[:button {:class "btn btn-success"
:on-click #(do
(.preventDefault %)
(f/add-field profile-fields-atom empty-field))}
(t :user/Addfield)]]])
(defn content [state]
(let [visibility-atom (cursor state [:user :profile_visibility])
profile-picture-atom (cursor state [:user :profile_picture])
about-me-atom (cursor state [:user :about])
pictures-atom (cursor state [:picture_files])
profile-fields-atom (cursor state [:profile])]
[:div.panel {:id "edit-profile"}
[m/modal-window]
[:div.panel-body
[:div.row [:div.col-xs-12 [:a {:href (path-for (str "/user/profile/" (:user_id @state)))} (t :user/Viewprofile)]]]
[:form.form-horizontal
(if-not (private?)
[:div
[:div.row [:label.col-xs-12 (t :user/Profilevisibility)]]
[:div.radio {:id "visibility-radio-internal"}
[:label [:input {:name "visibility"
:value "internal"
:type "radio"
:checked (= "internal" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :user/Visibleonlytoregistered)]]
[:div.radio
[:label [:input {:name "visibility"
:value "public"
:type "radio"
:checked (= "public" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))} ]
(t :core/Public)]]])
[profile-picture-gallery pictures-atom profile-picture-atom]
[:div {:id "about-me" :class "form-group"}
[:label.col-xs-12 (t :user/Aboutme)]
[:div.col-xs-12
[:textarea {:class "form-control" :rows 5 :cols 60 :value @about-me-atom :on-change #(reset! about-me-atom (.-target.value %))}]]]
[:div.row
[:label.col-xs-12 (t :user/Contactinfo)]
[:div.col-xs-12
(profile-fields profile-fields-atom)]]
(into [:div]
(for [f (plugin-fun (session/get :plugins) "block" "user_edit_profile")]
[f]))
[:div.row {:id "save-profile-buttons"}
[:div.col-xs-12
[:button {:id "save-profile-button"
:class "btn btn-primary"
:on-click #(do
(.preventDefault %)
(save-profile state))}
(t :core/Save)]]]]]]))
(defn init-data [state]
(ajax/GET
(path-for "/obpv1/user/edit/profile" true)
{:handler (fn [data]
(reset! state data))}))
(defn handler [site-navi]
(let [state (atom {:profile-fields []})]
(init-data state)
(fn []
(layout/default site-navi (content state)))))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/cljs/salava/user/ui/edit_profile.cljs | clojure | (ns salava.user.ui.edit-profile
(:require [reagent.core :refer [atom cursor]]
[reagent.session :as session]
[reagent-modals.modals :as m]
[salava.core.ui.ajax-utils :as ajax]
[salava.core.ui.layout :as layout]
[salava.core.ui.field :as f]
[salava.core.i18n :refer [t]]
[salava.core.ui.helper :refer [js-navigate-to path-for private? plugin-fun]]
[salava.file.ui.my :as file]
[salava.user.schemas :refer [contact-fields]]
[salava.user.ui.helper :refer [profile-picture]]))
(defn save-profile [state]
(let [{:keys [profile_visibility about profile_picture]} (:user @state)
profile-fields (->> (:profile @state)
(filter #(not-empty (:field %)))
(map #(select-keys % [:field :value])))]
(ajax/POST
(path-for "/obpv1/user/profile")
{:params {:profile_visibility profile_visibility
:about about
:profile_picture profile_picture
:fields profile-fields}
:handler (fn [] (js-navigate-to (str "/user/profile/" (:user_id @state))))})))
(defn send-file [files-atom profile-picture-atom]
(let [file (-> (.querySelector js/document "#profile-picture-upload")
.-files
(.item 0))
form-data (doto
(js/FormData.)
(.append "file" file (.-name file)))]
(m/modal! (file/upload-modal nil (t :file/Uploadingfile) (t :file/Uploadinprogress)))
(ajax/POST
(path-for "/obpv1/file/upload_image")
{:body form-data
:handler (fn [{:keys [status message reason data]} response]
(when (= status "success")
(reset! files-atom (conj @files-atom data))
(reset! profile-picture-atom (:path data)))
(m/modal! (file/upload-modal status message reason)))})))
(defn gallery-element [picture-data profile-picture-atom pictures-atom]
(let [{:keys [path id]} picture-data
current-profile-picture (session/get-in [:user :profile_picture])]
[:div {:key path
:class (str "profile-picture-gallery-element " (if (= @profile-picture-atom path) "element-selected"))
:on-click #(reset! profile-picture-atom path)}
[:img {:src (profile-picture path)}]
(if (and (not (nil? id)) (not (= path current-profile-picture)))
[:a {:class "delete-icon"
:title (t :file/Delete)
:on-click (fn []
(m/modal! [file/delete-file-modal id pictures-atom]
{:size :lg}))}
[:i {:class "fa fa-trash"}]])]))
(defn profile-picture-gallery [pictures-atom profile-picture-atom]
[:div {:id "profile-picture-gallery" :class "row"}
[:label.col-xs-12 (t :user/Selectprofilepicture)]
[:div.col-xs-12
[gallery-element {:path nil} profile-picture-atom]
(into [:div]
(for [picture-elem (map first (vals (group-by :path @pictures-atom)))]
[gallery-element picture-elem profile-picture-atom pictures-atom]))]
[:div.col-xs-12 {:id "profile-picture-upload-button"}
[:button {:class "btn btn-primary"
:on-click #(.preventDefault %)}
(t :file/Upload)]
[:input {:id "profile-picture-upload"
:type "file"
:name "file"
:on-change #(send-file pictures-atom profile-picture-atom)
:accept "image/*"}]]])
(def empty-field {:field "" :value ""})
(defn select-field-type [profile-field-atom]
[:select {:class "form-control"
:on-change #(swap! profile-field-atom assoc :field (.-target.value %))
:value (:field @profile-field-atom)}
[:option {:value ""} (str "- " (t :core/None) " -")]
(doall
(for [{:keys [type key]} contact-fields]
[:option {:value type :key type} (t key)]))])
(defn profile-field [index profile-fields-atom]
(let [profile-field-atom (cursor profile-fields-atom [index])
last? (= index (dec (count @profile-fields-atom)))
first? (= index 0)]
[:div {:key index}
[:div.add-field-after
[:button {:class "btn btn-success"
:on-click #(do
(.preventDefault %)
(f/add-field profile-fields-atom empty-field index))}
(t :user/Addfield)]]
[:div.field
[:div.field-move
[:div.move-arrows
(if-not first?
[:div.move-up {:on-click #(f/move-field :up profile-fields-atom index)}
[:i {:class "fa fa-chevron-up"}]])
(if-not last?
[:div.move-down {:on-click #(f/move-field :down profile-fields-atom index)}
[:i {:class "fa fa-chevron-down"}]])]]
[:div.field-content
[:div.form-group
[:div.col-xs-8
(select-field-type profile-field-atom)]
[:div {:class "col-xs-4 field-remove"
:on-click #(f/remove-field profile-fields-atom index)}
[:span {:class "remove-button"}
[:i {:class "fa fa-close"}]]]]
[:div.form-group
[:div.col-xs-12
[:input {:type "text"
:class "form-control"
:value (:value @profile-field-atom)
:on-change #(swap! profile-field-atom assoc :value (.-target.value %))}]]]]]]))
(defn profile-fields [profile-fields-atom]
[:div {:id "field-editor"}
(into [:div]
(for [index (range (count @profile-fields-atom))]
(profile-field index profile-fields-atom)))
[:div.add-field-after
[:button {:class "btn btn-success"
:on-click #(do
(.preventDefault %)
(f/add-field profile-fields-atom empty-field))}
(t :user/Addfield)]]])
(defn content [state]
(let [visibility-atom (cursor state [:user :profile_visibility])
profile-picture-atom (cursor state [:user :profile_picture])
about-me-atom (cursor state [:user :about])
pictures-atom (cursor state [:picture_files])
profile-fields-atom (cursor state [:profile])]
[:div.panel {:id "edit-profile"}
[m/modal-window]
[:div.panel-body
[:div.row [:div.col-xs-12 [:a {:href (path-for (str "/user/profile/" (:user_id @state)))} (t :user/Viewprofile)]]]
[:form.form-horizontal
(if-not (private?)
[:div
[:div.row [:label.col-xs-12 (t :user/Profilevisibility)]]
[:div.radio {:id "visibility-radio-internal"}
[:label [:input {:name "visibility"
:value "internal"
:type "radio"
:checked (= "internal" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))}]
(t :user/Visibleonlytoregistered)]]
[:div.radio
[:label [:input {:name "visibility"
:value "public"
:type "radio"
:checked (= "public" @visibility-atom)
:on-change #(reset! visibility-atom (.-target.value %))} ]
(t :core/Public)]]])
[profile-picture-gallery pictures-atom profile-picture-atom]
[:div {:id "about-me" :class "form-group"}
[:label.col-xs-12 (t :user/Aboutme)]
[:div.col-xs-12
[:textarea {:class "form-control" :rows 5 :cols 60 :value @about-me-atom :on-change #(reset! about-me-atom (.-target.value %))}]]]
[:div.row
[:label.col-xs-12 (t :user/Contactinfo)]
[:div.col-xs-12
(profile-fields profile-fields-atom)]]
(into [:div]
(for [f (plugin-fun (session/get :plugins) "block" "user_edit_profile")]
[f]))
[:div.row {:id "save-profile-buttons"}
[:div.col-xs-12
[:button {:id "save-profile-button"
:class "btn btn-primary"
:on-click #(do
(.preventDefault %)
(save-profile state))}
(t :core/Save)]]]]]]))
(defn init-data [state]
(ajax/GET
(path-for "/obpv1/user/edit/profile" true)
{:handler (fn [data]
(reset! state data))}))
(defn handler [site-navi]
(let [state (atom {:profile-fields []})]
(init-data state)
(fn []
(layout/default site-navi (content state)))))
| |
43eaca438ccd0e9c671dd65c3524212952ad8eaa545bbd21b274fc382b75eeee | jordwalke/rehp | generate_closure.ml | Js_of_ocaml compiler
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open! Stdlib
open Code
let debug_tc = Debug.find "gen_tc"
type closure_info =
{ f_name : Code.Var.t
; args : Code.Var.t list
; cont : Code.cont
; tc : Code.Addr.Set.t Code.Var.Map.t
}
type 'a int_ext =
{ int : 'a
; ext : 'a
}
module SCC = Strongly_connected_components.Make (Var)
let add_multi k v map =
let set = try Var.Map.find k map with Not_found -> Addr.Set.empty in
Var.Map.add k (Addr.Set.add v set) map
let rec tailcall pc blocks visited tc =
if Addr.Set.mem pc visited
then visited, tc
else
let visited = Addr.Set.add pc visited in
let block = Addr.Map.find pc blocks in
let tc_opt =
match block.branch with
| Return x -> (
match List.last block.body with
| Some (Let (y, Apply (z, _, true))) when Code.Var.compare x y = 0 ->
Some (add_multi z pc tc)
| None -> None
| Some _ -> None)
| _ -> None
in
match tc_opt with
| Some tc -> visited, tc
| None ->
Code.fold_children
blocks
pc
(fun pc (visited, tc) -> tailcall pc blocks visited tc)
(visited, tc)
(* Collects adjacent closures. *)
let rec collect_closures blocks l =
match l with
| Let (f_name, Closure (args, ((pc, _) as cont))) :: rem ->
let tc = snd (tailcall pc blocks Addr.Set.empty Var.Map.empty) in
let l, rem = collect_closures blocks rem in
{ f_name; args; cont; tc } :: l, rem
| rem -> [], rem
let group_closures closures =
let names =
List.fold_left closures ~init:Var.Set.empty ~f:(fun names x ->
Var.Set.add x.f_name names)
in
let closures_map =
List.fold_left closures ~init:Var.Map.empty ~f:(fun closures_map x ->
Var.Map.add x.f_name x closures_map)
in
let graph =
List.fold_left closures ~init:Var.Map.empty ~f:(fun graph x ->
let tc = Var.Map.fold (fun x _ tc -> Var.Set.add x tc) x.tc Var.Set.empty in
let tc = Var.Set.inter names tc in
Var.Map.add x.f_name tc graph)
in
closures_map, SCC.connected_components_sorted_from_roots_to_leaf graph
module Trampoline = struct
let direct_call_block block ~counter ~x ~f ~args =
let counter_plus_1 = Code.Var.fork counter in
let return = Code.Var.fork x in
{ block with
params = []
; body =
[ Let (counter_plus_1, Prim (Extern "%int_add", [ Pv counter; Pc (Int 1l) ]))
; Let (return, Apply (f, counter_plus_1 :: args, true))
]
; branch = Return return
}
let bounce_call_block block ~x ~f ~args =
let return = Code.Var.fork x in
let new_args = Code.Var.fresh () in
{ block with
params = []
; body =
[ Let
( new_args
, Prim (Extern "%js_array", Pc (Int 0l) :: List.map args ~f:(fun x -> Pv x))
)
; Let (return, Prim (Extern "caml_trampoline_return", [ Pv f; Pv new_args ]))
]
; branch = Return return
}
let wrapper_block f ~args ~counter =
let result1 = Code.Var.fresh () in
let result2 = Code.Var.fresh () in
let block =
{ params = []
; handler = None
; body =
[ Let (counter, Constant (Int 0l))
; Let (result1, Apply (f, counter :: args, true))
; Let (result2, Prim (Extern "caml_trampoline", [ Pv result1 ]))
]
; branch = Return result2
}
in
block
let wrapper_closure pc args = Closure (args, (pc, []))
let f free_pc blocks closures_map component =
match component with
| SCC.No_loop id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
free_pc, blocks, { int = []; ext = [ instr ] }
| SCC.Has_loop all ->
if debug_tc ()
then (
Format.eprintf "Detect cycles of size (%d).\n%!" (List.length all);
Format.eprintf
"%s\n%!"
(String.concat ~sep:", " (List.map all ~f:(fun x -> Var.to_string x))));
let all =
List.map all ~f:(fun id ->
Code.Var.fresh_n "counter", Var.Map.find id closures_map)
in
let blocks, free_pc, instrs, instrs_wrapper =
List.fold_left
all
~init:(blocks, free_pc, [], [])
~f:(fun (blocks, free_pc, instrs, instrs_wrapper) (counter, ci) ->
if debug_tc ()
then Format.eprintf "Rewriting for %s\n%!" (Var.to_string ci.f_name);
let new_f = Code.Var.fork ci.f_name in
let new_args = List.map ci.args ~f:Code.Var.fork in
let wrapper_pc = free_pc in
let free_pc = free_pc + 1 in
let new_counter = Code.Var.fork counter in
let wrapper_block =
wrapper_block new_f ~args:new_args ~counter:new_counter
in
let blocks = Addr.Map.add wrapper_pc wrapper_block blocks in
let instr_wrapper = Let (ci.f_name, wrapper_closure wrapper_pc new_args) in
let instr_real = Let (new_f, Closure (counter :: ci.args, ci.cont)) in
let counter_and_pc =
List.fold_left all ~init:[] ~f:(fun acc (counter, ci2) ->
try
let pcs = Addr.Set.elements (Var.Map.find ci.f_name ci2.tc) in
List.map pcs ~f:(fun x -> counter, x) @ acc
with Not_found -> acc)
in
let blocks, free_pc =
List.fold_left
counter_and_pc
~init:(blocks, free_pc)
~f:(fun (blocks, free_pc) (counter, pc) ->
if debug_tc () then Format.eprintf "Rewriting tc in %d\n%!" pc;
let block = Addr.Map.find pc blocks in
let direct_call_pc = free_pc in
let bounce_call_pc = free_pc + 1 in
let free_pc = free_pc + 2 in
match List.rev block.body with
| Let (x, Apply (f, args, true)) :: rem_rev ->
assert (Var.equal f ci.f_name);
let blocks =
Addr.Map.add
direct_call_pc
(direct_call_block block ~counter ~x ~f:new_f ~args)
blocks
in
let blocks =
Addr.Map.add
bounce_call_pc
(bounce_call_block block ~x ~f:new_f ~args)
blocks
in
let direct = Code.Var.fresh () in
let branch =
Cond (direct, (direct_call_pc, []), (bounce_call_pc, []))
in
let last =
Let
( direct
, Prim
( Lt
, [ Pv counter
; Pc
(Int
(Int32.of_int
(Config.Param.tailcall_max_depth ())))
] ) )
in
let block =
{ block with body = List.rev (last :: rem_rev); branch }
in
let blocks = Addr.Map.remove pc blocks in
Addr.Map.add pc block blocks, free_pc
| _ -> assert false)
in
blocks, free_pc, instr_real :: instrs, instr_wrapper :: instrs_wrapper)
in
free_pc, blocks, { int = instrs; ext = instrs_wrapper }
end
module Ident = struct
let f free_pc blocks closures_map component =
match component with
| SCC.No_loop id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
free_pc, blocks, { int = []; ext = [ instr ] }
| SCC.Has_loop ids ->
let instrs =
List.map ids ~f:(fun id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
instr)
in
free_pc, blocks, { int = []; ext = instrs }
end
let rewrite_tc free_pc blocks closures_map component =
let open Config.Param in
match tailcall_optim () with
| TcNone -> Ident.f free_pc blocks closures_map component
| TcTrampoline -> Trampoline.f free_pc blocks closures_map component
let rewrite_mutable
free_pc
blocks
mutated_vars
rewrite_list
{ int = closures_intern; ext = closures_extern } =
let internal_and_external = closures_intern @ closures_extern in
assert (not (List.is_empty closures_extern));
let all_mut, names =
List.fold_left
internal_and_external
~init:(Var.Set.empty, Var.Set.empty)
~f:(fun (all_mut, names) i ->
match i with
| Let (x, Closure (_, (pc, _))) ->
let all_mut =
try Var.Set.union all_mut (Addr.Map.find pc mutated_vars)
with Not_found -> all_mut
in
let names = Var.Set.add x names in
all_mut, names
| _ -> assert false)
in
let vars = Var.Set.elements (Var.Set.diff all_mut names) in
if List.is_empty vars
then free_pc, blocks, internal_and_external
else
match internal_and_external with
| [ Let (x, Closure (params, (pc, pc_args))) ] ->
let new_pc = free_pc in
let free_pc = free_pc + 1 in
let closure = Code.Var.fork x in
let args = List.map vars ~f:Code.Var.fork in
let new_x = Code.Var.fork x in
let mapping = Subst.from_map (Subst.build_mapping (x :: vars) (new_x :: args)) in
rewrite_list := (mapping, pc) :: !rewrite_list;
let new_block =
{ params = []
; handler = None
; body = [ Let (new_x, Closure (params, (pc, List.map pc_args ~f:mapping))) ]
; branch = Return new_x
}
in
let blocks = Addr.Map.add new_pc new_block blocks in
let body =
[ Let (closure, Closure (args, (new_pc, [])))
; Let (x, Apply (closure, vars, true))
]
in
free_pc, blocks, body
| _ ->
let new_pc = free_pc in
let free_pc = free_pc + 1 in
let closure = Code.Var.fresh_n "closures" in
let closure' = Code.Var.fresh_n "closures" in
let b = Code.Var.fresh_n "block" in
let args = List.map vars ~f:Code.Var.fork in
let pcs =
List.map internal_and_external ~f:(function
| Let (_, Closure (_, (pc, _))) -> pc
| _ -> assert false)
in
let old_xs =
List.map closures_extern ~f:(function
| Let (x, Closure _) -> x
| _ -> assert false)
in
let new_xs = List.map old_xs ~f:Code.Var.fork in
let mapping =
Subst.from_map (Subst.build_mapping (old_xs @ vars) (new_xs @ args))
in
rewrite_list := List.map pcs ~f:(fun pc -> mapping, pc) @ !rewrite_list;
let new_block =
let proj =
List.map2 closures_extern new_xs ~f:(fun cl new_x ->
match cl with
| Let (_, Closure (params, (pc, pc_args))) ->
Let (new_x, Closure (params, (pc, List.map pc_args ~f:mapping)))
| _ -> assert false)
in
{ params = []
; handler = None
; body =
closures_intern
@ proj
@ [ Let (b, Block (0, Array.of_list new_xs, NotArray)) ]
; branch = Return b
}
in
let blocks = Addr.Map.add new_pc new_block blocks in
let body =
[ Let (closure, Closure (args, (new_pc, [])))
; Let (closure', Apply (closure, vars, true))
]
@ List.mapi closures_extern ~f:(fun i x ->
match x with
| Let (x, Closure _) -> Let (x, Field (closure', i))
| _ -> assert false)
in
free_pc, blocks, body
let rec rewrite_closures mutated_vars rewrite_list free_pc blocks body : int * _ * _ list
=
match body with
| Let (_, Closure _) :: _ ->
let closures, rem = collect_closures blocks body in
let closures_map, components = group_closures closures in
let free_pc, blocks, closures =
List.fold_left
(Array.to_list components)
~init:(free_pc, blocks, [])
~f:(fun (free_pc, blocks, acc) component ->
let free_pc, blocks, closures =
rewrite_tc free_pc blocks closures_map component
in
let free_pc, blocks, intrs =
rewrite_mutable free_pc blocks mutated_vars rewrite_list closures
in
free_pc, blocks, intrs :: acc)
in
let free_pc, blocks, rem =
rewrite_closures mutated_vars rewrite_list free_pc blocks rem
in
free_pc, blocks, List.flatten closures @ rem
| i :: rem ->
let free_pc, blocks, rem =
rewrite_closures mutated_vars rewrite_list free_pc blocks rem
in
free_pc, blocks, i :: rem
| [] -> free_pc, blocks, []
let f p : Code.program =
Code.invariant p;
let mutated_vars = Freevars.f p in
let rewrite_list = ref [] in
let blocks, free_pc =
Addr.Map.fold
(fun pc _ (blocks, free_pc) ->
(* make sure we have the latest version *)
let block = Addr.Map.find pc blocks in
let free_pc, blocks, body =
rewrite_closures mutated_vars rewrite_list free_pc blocks block.body
in
Addr.Map.add pc { block with body } blocks, free_pc)
p.blocks
(p.blocks, p.free_pc)
in
(* Code.invariant (pc, blocks, free_pc); *)
let p = { p with blocks; free_pc } in
let p =
List.fold_left !rewrite_list ~init:p ~f:(fun program (mapping, pc) ->
Subst.cont mapping pc program)
in
Code.invariant p;
p
| null | https://raw.githubusercontent.com/jordwalke/rehp/f122b94f0a3f06410ddba59e3c9c603b33aadabf/compiler/lib/generate_closure.ml | ocaml | Collects adjacent closures.
make sure we have the latest version
Code.invariant (pc, blocks, free_pc); | Js_of_ocaml compiler
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open! Stdlib
open Code
let debug_tc = Debug.find "gen_tc"
type closure_info =
{ f_name : Code.Var.t
; args : Code.Var.t list
; cont : Code.cont
; tc : Code.Addr.Set.t Code.Var.Map.t
}
type 'a int_ext =
{ int : 'a
; ext : 'a
}
module SCC = Strongly_connected_components.Make (Var)
let add_multi k v map =
let set = try Var.Map.find k map with Not_found -> Addr.Set.empty in
Var.Map.add k (Addr.Set.add v set) map
let rec tailcall pc blocks visited tc =
if Addr.Set.mem pc visited
then visited, tc
else
let visited = Addr.Set.add pc visited in
let block = Addr.Map.find pc blocks in
let tc_opt =
match block.branch with
| Return x -> (
match List.last block.body with
| Some (Let (y, Apply (z, _, true))) when Code.Var.compare x y = 0 ->
Some (add_multi z pc tc)
| None -> None
| Some _ -> None)
| _ -> None
in
match tc_opt with
| Some tc -> visited, tc
| None ->
Code.fold_children
blocks
pc
(fun pc (visited, tc) -> tailcall pc blocks visited tc)
(visited, tc)
let rec collect_closures blocks l =
match l with
| Let (f_name, Closure (args, ((pc, _) as cont))) :: rem ->
let tc = snd (tailcall pc blocks Addr.Set.empty Var.Map.empty) in
let l, rem = collect_closures blocks rem in
{ f_name; args; cont; tc } :: l, rem
| rem -> [], rem
let group_closures closures =
let names =
List.fold_left closures ~init:Var.Set.empty ~f:(fun names x ->
Var.Set.add x.f_name names)
in
let closures_map =
List.fold_left closures ~init:Var.Map.empty ~f:(fun closures_map x ->
Var.Map.add x.f_name x closures_map)
in
let graph =
List.fold_left closures ~init:Var.Map.empty ~f:(fun graph x ->
let tc = Var.Map.fold (fun x _ tc -> Var.Set.add x tc) x.tc Var.Set.empty in
let tc = Var.Set.inter names tc in
Var.Map.add x.f_name tc graph)
in
closures_map, SCC.connected_components_sorted_from_roots_to_leaf graph
module Trampoline = struct
let direct_call_block block ~counter ~x ~f ~args =
let counter_plus_1 = Code.Var.fork counter in
let return = Code.Var.fork x in
{ block with
params = []
; body =
[ Let (counter_plus_1, Prim (Extern "%int_add", [ Pv counter; Pc (Int 1l) ]))
; Let (return, Apply (f, counter_plus_1 :: args, true))
]
; branch = Return return
}
let bounce_call_block block ~x ~f ~args =
let return = Code.Var.fork x in
let new_args = Code.Var.fresh () in
{ block with
params = []
; body =
[ Let
( new_args
, Prim (Extern "%js_array", Pc (Int 0l) :: List.map args ~f:(fun x -> Pv x))
)
; Let (return, Prim (Extern "caml_trampoline_return", [ Pv f; Pv new_args ]))
]
; branch = Return return
}
let wrapper_block f ~args ~counter =
let result1 = Code.Var.fresh () in
let result2 = Code.Var.fresh () in
let block =
{ params = []
; handler = None
; body =
[ Let (counter, Constant (Int 0l))
; Let (result1, Apply (f, counter :: args, true))
; Let (result2, Prim (Extern "caml_trampoline", [ Pv result1 ]))
]
; branch = Return result2
}
in
block
let wrapper_closure pc args = Closure (args, (pc, []))
let f free_pc blocks closures_map component =
match component with
| SCC.No_loop id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
free_pc, blocks, { int = []; ext = [ instr ] }
| SCC.Has_loop all ->
if debug_tc ()
then (
Format.eprintf "Detect cycles of size (%d).\n%!" (List.length all);
Format.eprintf
"%s\n%!"
(String.concat ~sep:", " (List.map all ~f:(fun x -> Var.to_string x))));
let all =
List.map all ~f:(fun id ->
Code.Var.fresh_n "counter", Var.Map.find id closures_map)
in
let blocks, free_pc, instrs, instrs_wrapper =
List.fold_left
all
~init:(blocks, free_pc, [], [])
~f:(fun (blocks, free_pc, instrs, instrs_wrapper) (counter, ci) ->
if debug_tc ()
then Format.eprintf "Rewriting for %s\n%!" (Var.to_string ci.f_name);
let new_f = Code.Var.fork ci.f_name in
let new_args = List.map ci.args ~f:Code.Var.fork in
let wrapper_pc = free_pc in
let free_pc = free_pc + 1 in
let new_counter = Code.Var.fork counter in
let wrapper_block =
wrapper_block new_f ~args:new_args ~counter:new_counter
in
let blocks = Addr.Map.add wrapper_pc wrapper_block blocks in
let instr_wrapper = Let (ci.f_name, wrapper_closure wrapper_pc new_args) in
let instr_real = Let (new_f, Closure (counter :: ci.args, ci.cont)) in
let counter_and_pc =
List.fold_left all ~init:[] ~f:(fun acc (counter, ci2) ->
try
let pcs = Addr.Set.elements (Var.Map.find ci.f_name ci2.tc) in
List.map pcs ~f:(fun x -> counter, x) @ acc
with Not_found -> acc)
in
let blocks, free_pc =
List.fold_left
counter_and_pc
~init:(blocks, free_pc)
~f:(fun (blocks, free_pc) (counter, pc) ->
if debug_tc () then Format.eprintf "Rewriting tc in %d\n%!" pc;
let block = Addr.Map.find pc blocks in
let direct_call_pc = free_pc in
let bounce_call_pc = free_pc + 1 in
let free_pc = free_pc + 2 in
match List.rev block.body with
| Let (x, Apply (f, args, true)) :: rem_rev ->
assert (Var.equal f ci.f_name);
let blocks =
Addr.Map.add
direct_call_pc
(direct_call_block block ~counter ~x ~f:new_f ~args)
blocks
in
let blocks =
Addr.Map.add
bounce_call_pc
(bounce_call_block block ~x ~f:new_f ~args)
blocks
in
let direct = Code.Var.fresh () in
let branch =
Cond (direct, (direct_call_pc, []), (bounce_call_pc, []))
in
let last =
Let
( direct
, Prim
( Lt
, [ Pv counter
; Pc
(Int
(Int32.of_int
(Config.Param.tailcall_max_depth ())))
] ) )
in
let block =
{ block with body = List.rev (last :: rem_rev); branch }
in
let blocks = Addr.Map.remove pc blocks in
Addr.Map.add pc block blocks, free_pc
| _ -> assert false)
in
blocks, free_pc, instr_real :: instrs, instr_wrapper :: instrs_wrapper)
in
free_pc, blocks, { int = instrs; ext = instrs_wrapper }
end
module Ident = struct
let f free_pc blocks closures_map component =
match component with
| SCC.No_loop id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
free_pc, blocks, { int = []; ext = [ instr ] }
| SCC.Has_loop ids ->
let instrs =
List.map ids ~f:(fun id ->
let ci = Var.Map.find id closures_map in
let instr = Let (ci.f_name, Closure (ci.args, ci.cont)) in
instr)
in
free_pc, blocks, { int = []; ext = instrs }
end
let rewrite_tc free_pc blocks closures_map component =
let open Config.Param in
match tailcall_optim () with
| TcNone -> Ident.f free_pc blocks closures_map component
| TcTrampoline -> Trampoline.f free_pc blocks closures_map component
let rewrite_mutable
free_pc
blocks
mutated_vars
rewrite_list
{ int = closures_intern; ext = closures_extern } =
let internal_and_external = closures_intern @ closures_extern in
assert (not (List.is_empty closures_extern));
let all_mut, names =
List.fold_left
internal_and_external
~init:(Var.Set.empty, Var.Set.empty)
~f:(fun (all_mut, names) i ->
match i with
| Let (x, Closure (_, (pc, _))) ->
let all_mut =
try Var.Set.union all_mut (Addr.Map.find pc mutated_vars)
with Not_found -> all_mut
in
let names = Var.Set.add x names in
all_mut, names
| _ -> assert false)
in
let vars = Var.Set.elements (Var.Set.diff all_mut names) in
if List.is_empty vars
then free_pc, blocks, internal_and_external
else
match internal_and_external with
| [ Let (x, Closure (params, (pc, pc_args))) ] ->
let new_pc = free_pc in
let free_pc = free_pc + 1 in
let closure = Code.Var.fork x in
let args = List.map vars ~f:Code.Var.fork in
let new_x = Code.Var.fork x in
let mapping = Subst.from_map (Subst.build_mapping (x :: vars) (new_x :: args)) in
rewrite_list := (mapping, pc) :: !rewrite_list;
let new_block =
{ params = []
; handler = None
; body = [ Let (new_x, Closure (params, (pc, List.map pc_args ~f:mapping))) ]
; branch = Return new_x
}
in
let blocks = Addr.Map.add new_pc new_block blocks in
let body =
[ Let (closure, Closure (args, (new_pc, [])))
; Let (x, Apply (closure, vars, true))
]
in
free_pc, blocks, body
| _ ->
let new_pc = free_pc in
let free_pc = free_pc + 1 in
let closure = Code.Var.fresh_n "closures" in
let closure' = Code.Var.fresh_n "closures" in
let b = Code.Var.fresh_n "block" in
let args = List.map vars ~f:Code.Var.fork in
let pcs =
List.map internal_and_external ~f:(function
| Let (_, Closure (_, (pc, _))) -> pc
| _ -> assert false)
in
let old_xs =
List.map closures_extern ~f:(function
| Let (x, Closure _) -> x
| _ -> assert false)
in
let new_xs = List.map old_xs ~f:Code.Var.fork in
let mapping =
Subst.from_map (Subst.build_mapping (old_xs @ vars) (new_xs @ args))
in
rewrite_list := List.map pcs ~f:(fun pc -> mapping, pc) @ !rewrite_list;
let new_block =
let proj =
List.map2 closures_extern new_xs ~f:(fun cl new_x ->
match cl with
| Let (_, Closure (params, (pc, pc_args))) ->
Let (new_x, Closure (params, (pc, List.map pc_args ~f:mapping)))
| _ -> assert false)
in
{ params = []
; handler = None
; body =
closures_intern
@ proj
@ [ Let (b, Block (0, Array.of_list new_xs, NotArray)) ]
; branch = Return b
}
in
let blocks = Addr.Map.add new_pc new_block blocks in
let body =
[ Let (closure, Closure (args, (new_pc, [])))
; Let (closure', Apply (closure, vars, true))
]
@ List.mapi closures_extern ~f:(fun i x ->
match x with
| Let (x, Closure _) -> Let (x, Field (closure', i))
| _ -> assert false)
in
free_pc, blocks, body
let rec rewrite_closures mutated_vars rewrite_list free_pc blocks body : int * _ * _ list
=
match body with
| Let (_, Closure _) :: _ ->
let closures, rem = collect_closures blocks body in
let closures_map, components = group_closures closures in
let free_pc, blocks, closures =
List.fold_left
(Array.to_list components)
~init:(free_pc, blocks, [])
~f:(fun (free_pc, blocks, acc) component ->
let free_pc, blocks, closures =
rewrite_tc free_pc blocks closures_map component
in
let free_pc, blocks, intrs =
rewrite_mutable free_pc blocks mutated_vars rewrite_list closures
in
free_pc, blocks, intrs :: acc)
in
let free_pc, blocks, rem =
rewrite_closures mutated_vars rewrite_list free_pc blocks rem
in
free_pc, blocks, List.flatten closures @ rem
| i :: rem ->
let free_pc, blocks, rem =
rewrite_closures mutated_vars rewrite_list free_pc blocks rem
in
free_pc, blocks, i :: rem
| [] -> free_pc, blocks, []
let f p : Code.program =
Code.invariant p;
let mutated_vars = Freevars.f p in
let rewrite_list = ref [] in
let blocks, free_pc =
Addr.Map.fold
(fun pc _ (blocks, free_pc) ->
let block = Addr.Map.find pc blocks in
let free_pc, blocks, body =
rewrite_closures mutated_vars rewrite_list free_pc blocks block.body
in
Addr.Map.add pc { block with body } blocks, free_pc)
p.blocks
(p.blocks, p.free_pc)
in
let p = { p with blocks; free_pc } in
let p =
List.fold_left !rewrite_list ~init:p ~f:(fun program (mapping, pc) ->
Subst.cont mapping pc program)
in
Code.invariant p;
p
|
59522b16da4c5a6e136096ee8656fa182335dd436dd879e34b913f1e0e7b5d93 | korya/efuns | dPrintf.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
$ I d : dPrintf.ml , v 1.1 1999/10/25 07:39:40 lefessan Exp $
external format_int: string -> int -> string = "format_int"
external format_float: string -> float -> string = "format_float"
let fprintf outchan format =
let format = (Obj.magic format : string) in
let rec doprn i =
if i >= String.length format then
Obj.magic ()
else begin
let c = String.unsafe_get format i in
if c <> '%' then begin
doprn (succ i)
end else begin
let j = skip_args (succ i) in
match String.unsafe_get format j with
'%' ->
doprn (succ j)
| 's' ->
Obj.magic(fun (s: string) ->
doprn (succ j))
| 'c' ->
Obj.magic(fun (c: char) ->
doprn (succ j))
| 'd' | 'i' | 'o' | 'x' | 'X' | 'u' ->
Obj.magic(fun (n: int) ->
doprn (succ j))
| 'f' | 'e' | 'E' | 'g' | 'G' ->
Obj.magic(fun (f: float) ->
doprn (succ j))
| 'b' ->
Obj.magic(fun (b: bool) ->
doprn (succ j))
| 'a' ->
Obj.magic(fun (printer: out_channel -> 'a -> unit) (arg: 'a) ->
doprn(succ j))
| 't' ->
Obj.magic(fun (printer: out_channel -> unit) ->
doprn(succ j))
| c ->
invalid_arg ("fprintf: unknown format")
end
end
and skip_args j =
match String.unsafe_get format j with
'0' .. '9' | ' ' | '.' | '-' -> skip_args (succ j)
| c -> j
in doprn 0
let printf fmt = fprintf stdout fmt
and eprintf fmt = fprintf stderr fmt
| null | https://raw.githubusercontent.com/korya/efuns/78b21d9dff45b7eec764c63132c7a564f5367c30/inliner/dPrintf.ml | ocaml | *********************************************************************
Objective Caml
********************************************************************* | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
$ I d : dPrintf.ml , v 1.1 1999/10/25 07:39:40 lefessan Exp $
external format_int: string -> int -> string = "format_int"
external format_float: string -> float -> string = "format_float"
let fprintf outchan format =
let format = (Obj.magic format : string) in
let rec doprn i =
if i >= String.length format then
Obj.magic ()
else begin
let c = String.unsafe_get format i in
if c <> '%' then begin
doprn (succ i)
end else begin
let j = skip_args (succ i) in
match String.unsafe_get format j with
'%' ->
doprn (succ j)
| 's' ->
Obj.magic(fun (s: string) ->
doprn (succ j))
| 'c' ->
Obj.magic(fun (c: char) ->
doprn (succ j))
| 'd' | 'i' | 'o' | 'x' | 'X' | 'u' ->
Obj.magic(fun (n: int) ->
doprn (succ j))
| 'f' | 'e' | 'E' | 'g' | 'G' ->
Obj.magic(fun (f: float) ->
doprn (succ j))
| 'b' ->
Obj.magic(fun (b: bool) ->
doprn (succ j))
| 'a' ->
Obj.magic(fun (printer: out_channel -> 'a -> unit) (arg: 'a) ->
doprn(succ j))
| 't' ->
Obj.magic(fun (printer: out_channel -> unit) ->
doprn(succ j))
| c ->
invalid_arg ("fprintf: unknown format")
end
end
and skip_args j =
match String.unsafe_get format j with
'0' .. '9' | ' ' | '.' | '-' -> skip_args (succ j)
| c -> j
in doprn 0
let printf fmt = fprintf stdout fmt
and eprintf fmt = fprintf stderr fmt
|
d04ac76863b7c7817021be8605f82c6e02e6fc2ad44a5428c25b4b6873f3b408 | jserot/caph | options.ml | (************************************************************************************)
(* *)
(* CAPH *)
(* -bpclermont.fr *)
(* *)
(* *)
(* *)
Copyright 2011 - 2019 . All rights reserved .
This file is distributed under the terms of the Q Public License version 1.0 .
(* *)
(************************************************************************************)
let print_version () = Printf.printf "This is the Caph compiler, version %s\n" Version.version; exit 0
type output_format = NoOutput | Dot | Xdf | Dif | Systemc | Vhdl
let output_fmt = ref NoOutput
let output_prefix = ref ""
let prefix = ref ""
let dump_tenv = ref false
let dump_typed = ref false
let dump_senv = ref false
let dump_boxes = ref false
let dump_denv = ref false
let dump_fsms = ref false
let dump_static_fifo_sizes = ref false
let abstract_interpret = ref (None : int option)
let run = ref false
let dump_sdf_fifo_sizes = ref false
let ignore_pragmas () = Syntax.allow_pragmas := false
let add_allowed_input_file f =
Genmake.safe_mode := true;
Genmake.safe_cfg.Genmake.allowed_ifiles <- Genmake.safe_cfg.Genmake.allowed_ifiles @ [f]
let add_allowed_output_file f =
Genmake.safe_mode := true;
Genmake.safe_cfg.Genmake.allowed_ofiles <- Genmake.safe_cfg.Genmake.allowed_ofiles @ [f]
let set_output_prefix name = output_prefix := name
let add_include_path path = Lexer.include_path := !Lexer.include_path @ [path]
let set_prefix p = prefix := p
let set_project_file f = Genmake.target.Genmake.proj_file <- f
let set_target_dir p = Genmake.target.Genmake.dir <- p
let do_show_signness () = Const.show_signness := true
let do_flat_variants () = Expr.flat_variants := true
let do_dump_tenv () = dump_tenv := true
let do_dump_typed () = dump_typed := true
let do_dump_senv () = dump_senv := true
let do_dump_boxes () = dump_boxes := true
let do_compute_moc () = Static.compute_moc := true
let do_dump_ir () = Interm.dump_ir := true
let do_dump_denv () = dump_denv := true
let do_dump_fsms () = dump_fsms := true
let do_run () = run := true
let do_dot () = output_fmt := Dot
let do_xdf () = output_fmt := Xdf
let do_dif () = output_fmt := Dif
let do_systemc () = output_fmt := Systemc
let do_vhdl () = output_fmt := Vhdl
let do_dot_unlabeled_edges () = Dot.cfg.Dot.labeled_edges <- false
let do_dot_unboxed_ios () =
Dot.cfg.Dot.stream_io_box_shape <- "plaintext";
Dot.cfg.Dot.port_io_box_shape <- "plaintext"
let do_dot_show_indexes () = Dot.cfg.Dot.show_indexes <- true
let do_dot_wire_annots () = Dot.cfg.Dot.show_wire_annots <- true
let do_dot_simple_boxes () = Dot.cfg.Dot.slotted_boxes <- false
let do_trace () = Trace.tracing := true
let do_trace_ports () = Ports.trace_port_value_change := true
let set_abbrev_dc_ctors () = Expr.abbrev_dc_ctors := true
let set_default_channel_cap c = Dynamic.default_channel_capacity := c
let do_dump_channel_stats () = Dynamic.dump_channel_stats := true
let do_dump_sdf_fifo_sizes () = dump_sdf_fifo_sizes := true
let set_max_run_cycles n = Process.max_run_cycles := n
let set_min_run_cycles n = Process.min_run_cycles := n
let do_warn_channels () = Process.warn_on_full_channels := true
let suppress_cast_warnings () = Error.report_cast_warnings := false
let do_phantom_types () = Pr_type.print_type_repr := true
let set_stdin f = Io.stdin_file := f
let set_stdout f = Io.stdout_file := f
let add_cmo_file f = match Misc.get_extension f with
"ml" ->
Genmake.add_to_target Genmake.target.Genmake.sim_extra_files f;
Foreign.cmo_files := (Misc.change_extension "cmo" f) :: !Foreign.cmo_files;
| _ -> Error.illegal_extra_file "-sim_extra" ".ml" f
let add_macro_defn d = Macro.add_defn d
let do_gen_make () = Misc.generate_makefiles := true
let set_split_output_frames () = Streams.split_output_frames := true
let do_abstract_interpret id = abstract_interpret := Some id
let set_ai_max_cycles n = Absint.cfg.Absint.ai_max_cycles <- n
(* SystemC related options *)
let set_sc_stop_time n = Systemc.cfg.Systemc.sc_stop_time <- n
let set_sc_stop_idle_time n = Systemc.cfg.Systemc.sc_stop_idle_time <- n
let set_sc_io_monitor () = Systemc.cfg.Systemc.sc_io_monitor <- true
let set_sc_io_monitor_file f = Systemc.cfg.Systemc.sc_io_monitor_file <- f
let set_sc_clock_period n = Systemc.cfg.Systemc.sc_clock_period_ns <- n
let set_sc_fifo_capacity n = Systemc.cfg.Systemc.sc_fifo_capacity <- n
let set_sc_trace () = Systemc.cfg.Systemc.sc_trace <- true
let set_sc_trace_fifos () = Systemc.cfg.Systemc.sc_trace_fifos <- true
let set_sc_dump_fifos () = Systemc.cfg.Systemc.sc_dump_fifos <- true
let set_sc_dump_fifo_stats () = Systemc.cfg.Systemc.sc_dump_fifo_stats <- true
let set_sc_fifo_stats_file f = Systemc.cfg.Systemc.sc_fifo_stats_file <- f
let set_sc_use_int () = Systemc.cfg.Systemc.sc_use_int <- true
let set_sc_use_templates () = Printf.printf "** Warning: option -sc_use_templates is deprecated (ignored here)\n"
let add_sc_extra_file f = match Misc.get_extension f with
"h" -> Genmake.add_to_target Genmake.target.Genmake.h_extra_files f
| "cpp" -> Genmake.add_to_target Genmake.target.Genmake.cpp_extra_files f
| _ -> Error.illegal_extra_file "-sc_extra" ".h, .cpp" f
let set_sc_abbrev_dc_ctors () = Systemc.cfg.Systemc.sc_abbrev_dc_ctors <- true
let set_sc_istream_period p = Systemc.cfg.Systemc.sc_stream_in_period <- p
let set_sc_istream_hblank p = Systemc.cfg.Systemc.sc_stream_in_hblank <- p
let set_sc_istream_vblank p = Systemc.cfg.Systemc.sc_stream_in_vblank <- p
(* VHDL related options *)
let set_vhdl_num_lib s = Vhdl.cfg.Vhdl.vhdl_num_lib <- s
let set_vhdl_default_int_size s = Vhdl.cfg.Vhdl.vhdl_default_int_size <- s
let set_vhdl_annot_file f = Vhdl.cfg.Vhdl.vhdl_annot_file <- f
let set_vhdl_fifo_offset n = Vhdl.cfg.Vhdl.vhdl_fifo_offset <- n
let set_vhdl_default_fifo_capacity s = Vhdl.cfg.Vhdl.vhdl_default_fifo_capacity <- s
let set_vhdl_big_fifo_model s = Vhdl.cfg.Vhdl.vhdl_big_fifo_model <- s
let set_vhdl_small_fifo_model s = Vhdl.cfg.Vhdl.vhdl_small_fifo_model <- s
let set_vhdl_fifo_model_threshold n = Vhdl.cfg.Vhdl.vhdl_fifo_model_threshold <- n
let set_vhdl_reset_duration s = Vhdl.cfg.Vhdl.vhdl_reset_duration_ns <- s
let set_vhdl_clock_period s = Vhdl.cfg.Vhdl.vhdl_clock_period_ns <- s
let set_vhdl_seq_delay s = Vhdl.cfg.Vhdl.vhdl_seq_delay_ns <- s
let set_vhdl_istream_period p =
if p < 1 then Printf.eprintf "Warning : pixel period for I/O streams should be > 0 !\n";
Vhdl.cfg.Vhdl.vhdl_stream_in_period <- p
let set_vhdl_istream_blanking () = Vhdl.cfg.Vhdl.vhdl_stream_in_blanking <- true
let set_vhdl_istream_skew t = Vhdl.cfg.Vhdl.vhdl_stream_in_skew <- t
let set_vhdl_trace () = Vhdl.cfg.Vhdl.vhdl_trace <- true
let set_vhdl_init_array_at_decl () = Vhdl.cfg.Vhdl.vhdl_init_array_at_decl <- true
let set_vhdl_warn_on_unsized_consts () = Vhdl.cfg.Vhdl.vhdl_warn_on_unsized_consts <- true
let set_vhdl_use_native_mult () = Vhdl.cfg.Vhdl.vhdl_use_native_mult <- true
let set_vhdl_float_support () = Vhdl.cfg.Vhdl.vhdl_float_support <- true
let set_vhdl_write_type_converters () = Vhdl.cfg.Vhdl.vhdl_write_type_converters <- true
let set_vhdl_rename_io_wires () = Vhdl.cfg.Vhdl.vhdl_rename_io_wires <- true
let add_vhdl_extra_file f = match Misc.get_extension f with
| "vhd" -> Genmake.add_to_target Genmake.target.Genmake.vhdl_extra_files f
| _ -> Error.illegal_extra_file "-vhdl_extra" ".vhd" f
let set_vhdl_tb_external_clock () = Vhdl.cfg.Vhdl.vhdl_tb_external_clock <- true
let set_vhdl_tb_inline_io () = Vhdl.cfg.Vhdl.vhdl_tb_inline_io <- true; Genmake.cfg.Genmake.tb_inline_io <- true
XDF related options
let set_xdf_package p = Xdf.cfg.Xdf.target_package <- p
| null | https://raw.githubusercontent.com/jserot/caph/2b3b241f0c32aa4fcaf60d4b8529956cca8aa6b1/compiler/options.ml | ocaml | **********************************************************************************
CAPH
-bpclermont.fr
**********************************************************************************
SystemC related options
VHDL related options |
Copyright 2011 - 2019 . All rights reserved .
This file is distributed under the terms of the Q Public License version 1.0 .
let print_version () = Printf.printf "This is the Caph compiler, version %s\n" Version.version; exit 0
type output_format = NoOutput | Dot | Xdf | Dif | Systemc | Vhdl
let output_fmt = ref NoOutput
let output_prefix = ref ""
let prefix = ref ""
let dump_tenv = ref false
let dump_typed = ref false
let dump_senv = ref false
let dump_boxes = ref false
let dump_denv = ref false
let dump_fsms = ref false
let dump_static_fifo_sizes = ref false
let abstract_interpret = ref (None : int option)
let run = ref false
let dump_sdf_fifo_sizes = ref false
let ignore_pragmas () = Syntax.allow_pragmas := false
let add_allowed_input_file f =
Genmake.safe_mode := true;
Genmake.safe_cfg.Genmake.allowed_ifiles <- Genmake.safe_cfg.Genmake.allowed_ifiles @ [f]
let add_allowed_output_file f =
Genmake.safe_mode := true;
Genmake.safe_cfg.Genmake.allowed_ofiles <- Genmake.safe_cfg.Genmake.allowed_ofiles @ [f]
let set_output_prefix name = output_prefix := name
let add_include_path path = Lexer.include_path := !Lexer.include_path @ [path]
let set_prefix p = prefix := p
let set_project_file f = Genmake.target.Genmake.proj_file <- f
let set_target_dir p = Genmake.target.Genmake.dir <- p
let do_show_signness () = Const.show_signness := true
let do_flat_variants () = Expr.flat_variants := true
let do_dump_tenv () = dump_tenv := true
let do_dump_typed () = dump_typed := true
let do_dump_senv () = dump_senv := true
let do_dump_boxes () = dump_boxes := true
let do_compute_moc () = Static.compute_moc := true
let do_dump_ir () = Interm.dump_ir := true
let do_dump_denv () = dump_denv := true
let do_dump_fsms () = dump_fsms := true
let do_run () = run := true
let do_dot () = output_fmt := Dot
let do_xdf () = output_fmt := Xdf
let do_dif () = output_fmt := Dif
let do_systemc () = output_fmt := Systemc
let do_vhdl () = output_fmt := Vhdl
let do_dot_unlabeled_edges () = Dot.cfg.Dot.labeled_edges <- false
let do_dot_unboxed_ios () =
Dot.cfg.Dot.stream_io_box_shape <- "plaintext";
Dot.cfg.Dot.port_io_box_shape <- "plaintext"
let do_dot_show_indexes () = Dot.cfg.Dot.show_indexes <- true
let do_dot_wire_annots () = Dot.cfg.Dot.show_wire_annots <- true
let do_dot_simple_boxes () = Dot.cfg.Dot.slotted_boxes <- false
let do_trace () = Trace.tracing := true
let do_trace_ports () = Ports.trace_port_value_change := true
let set_abbrev_dc_ctors () = Expr.abbrev_dc_ctors := true
let set_default_channel_cap c = Dynamic.default_channel_capacity := c
let do_dump_channel_stats () = Dynamic.dump_channel_stats := true
let do_dump_sdf_fifo_sizes () = dump_sdf_fifo_sizes := true
let set_max_run_cycles n = Process.max_run_cycles := n
let set_min_run_cycles n = Process.min_run_cycles := n
let do_warn_channels () = Process.warn_on_full_channels := true
let suppress_cast_warnings () = Error.report_cast_warnings := false
let do_phantom_types () = Pr_type.print_type_repr := true
let set_stdin f = Io.stdin_file := f
let set_stdout f = Io.stdout_file := f
let add_cmo_file f = match Misc.get_extension f with
"ml" ->
Genmake.add_to_target Genmake.target.Genmake.sim_extra_files f;
Foreign.cmo_files := (Misc.change_extension "cmo" f) :: !Foreign.cmo_files;
| _ -> Error.illegal_extra_file "-sim_extra" ".ml" f
let add_macro_defn d = Macro.add_defn d
let do_gen_make () = Misc.generate_makefiles := true
let set_split_output_frames () = Streams.split_output_frames := true
let do_abstract_interpret id = abstract_interpret := Some id
let set_ai_max_cycles n = Absint.cfg.Absint.ai_max_cycles <- n
let set_sc_stop_time n = Systemc.cfg.Systemc.sc_stop_time <- n
let set_sc_stop_idle_time n = Systemc.cfg.Systemc.sc_stop_idle_time <- n
let set_sc_io_monitor () = Systemc.cfg.Systemc.sc_io_monitor <- true
let set_sc_io_monitor_file f = Systemc.cfg.Systemc.sc_io_monitor_file <- f
let set_sc_clock_period n = Systemc.cfg.Systemc.sc_clock_period_ns <- n
let set_sc_fifo_capacity n = Systemc.cfg.Systemc.sc_fifo_capacity <- n
let set_sc_trace () = Systemc.cfg.Systemc.sc_trace <- true
let set_sc_trace_fifos () = Systemc.cfg.Systemc.sc_trace_fifos <- true
let set_sc_dump_fifos () = Systemc.cfg.Systemc.sc_dump_fifos <- true
let set_sc_dump_fifo_stats () = Systemc.cfg.Systemc.sc_dump_fifo_stats <- true
let set_sc_fifo_stats_file f = Systemc.cfg.Systemc.sc_fifo_stats_file <- f
let set_sc_use_int () = Systemc.cfg.Systemc.sc_use_int <- true
let set_sc_use_templates () = Printf.printf "** Warning: option -sc_use_templates is deprecated (ignored here)\n"
let add_sc_extra_file f = match Misc.get_extension f with
"h" -> Genmake.add_to_target Genmake.target.Genmake.h_extra_files f
| "cpp" -> Genmake.add_to_target Genmake.target.Genmake.cpp_extra_files f
| _ -> Error.illegal_extra_file "-sc_extra" ".h, .cpp" f
let set_sc_abbrev_dc_ctors () = Systemc.cfg.Systemc.sc_abbrev_dc_ctors <- true
let set_sc_istream_period p = Systemc.cfg.Systemc.sc_stream_in_period <- p
let set_sc_istream_hblank p = Systemc.cfg.Systemc.sc_stream_in_hblank <- p
let set_sc_istream_vblank p = Systemc.cfg.Systemc.sc_stream_in_vblank <- p
let set_vhdl_num_lib s = Vhdl.cfg.Vhdl.vhdl_num_lib <- s
let set_vhdl_default_int_size s = Vhdl.cfg.Vhdl.vhdl_default_int_size <- s
let set_vhdl_annot_file f = Vhdl.cfg.Vhdl.vhdl_annot_file <- f
let set_vhdl_fifo_offset n = Vhdl.cfg.Vhdl.vhdl_fifo_offset <- n
let set_vhdl_default_fifo_capacity s = Vhdl.cfg.Vhdl.vhdl_default_fifo_capacity <- s
let set_vhdl_big_fifo_model s = Vhdl.cfg.Vhdl.vhdl_big_fifo_model <- s
let set_vhdl_small_fifo_model s = Vhdl.cfg.Vhdl.vhdl_small_fifo_model <- s
let set_vhdl_fifo_model_threshold n = Vhdl.cfg.Vhdl.vhdl_fifo_model_threshold <- n
let set_vhdl_reset_duration s = Vhdl.cfg.Vhdl.vhdl_reset_duration_ns <- s
let set_vhdl_clock_period s = Vhdl.cfg.Vhdl.vhdl_clock_period_ns <- s
let set_vhdl_seq_delay s = Vhdl.cfg.Vhdl.vhdl_seq_delay_ns <- s
let set_vhdl_istream_period p =
if p < 1 then Printf.eprintf "Warning : pixel period for I/O streams should be > 0 !\n";
Vhdl.cfg.Vhdl.vhdl_stream_in_period <- p
let set_vhdl_istream_blanking () = Vhdl.cfg.Vhdl.vhdl_stream_in_blanking <- true
let set_vhdl_istream_skew t = Vhdl.cfg.Vhdl.vhdl_stream_in_skew <- t
let set_vhdl_trace () = Vhdl.cfg.Vhdl.vhdl_trace <- true
let set_vhdl_init_array_at_decl () = Vhdl.cfg.Vhdl.vhdl_init_array_at_decl <- true
let set_vhdl_warn_on_unsized_consts () = Vhdl.cfg.Vhdl.vhdl_warn_on_unsized_consts <- true
let set_vhdl_use_native_mult () = Vhdl.cfg.Vhdl.vhdl_use_native_mult <- true
let set_vhdl_float_support () = Vhdl.cfg.Vhdl.vhdl_float_support <- true
let set_vhdl_write_type_converters () = Vhdl.cfg.Vhdl.vhdl_write_type_converters <- true
let set_vhdl_rename_io_wires () = Vhdl.cfg.Vhdl.vhdl_rename_io_wires <- true
let add_vhdl_extra_file f = match Misc.get_extension f with
| "vhd" -> Genmake.add_to_target Genmake.target.Genmake.vhdl_extra_files f
| _ -> Error.illegal_extra_file "-vhdl_extra" ".vhd" f
let set_vhdl_tb_external_clock () = Vhdl.cfg.Vhdl.vhdl_tb_external_clock <- true
let set_vhdl_tb_inline_io () = Vhdl.cfg.Vhdl.vhdl_tb_inline_io <- true; Genmake.cfg.Genmake.tb_inline_io <- true
XDF related options
let set_xdf_package p = Xdf.cfg.Xdf.target_package <- p
|
678e1d289dcb6677863bf5083cc9ef8ab927f41aa3a2c4501ce79a05ccd05f70 | simmone/racket-simple-qr | example.rkt | #lang racket
(require simple-qr)
block 's default width is 5
(qr-write "" "normal.png")
(qr-write "" "normal_color.png" #:color '("#ffbb33" . "#0d47a1"))
(qr-write "" "normal_trans.png" #:color '("#9933CC" . "transparent"))
(qr-write "" "small.png" #:module_width 2)
(qr-write "" "large.png" #:module_width 10)
(printf "~a\n~a\n~a\n"
(qr-read "normal.png")
(qr-read "small.png")
(qr-read "large.png"))
(printf "~a\n" (qr-read "damaged.png"))
(qr-write "" "normal.svg" #:output_type 'svg)
(qr-write "" "normal_color.svg" #:color '("#ffbb33" . "#0d47a1") #:output_type 'svg)
(qr-write "" "normal_trans.svg" #:color '("#9933CC" . "transparent") #:output_type 'svg)
| null | https://raw.githubusercontent.com/simmone/racket-simple-qr/904f1491bc521badeafeabd0d7d7e97e3d0ee958/simple-qr/example/example.rkt | racket | #lang racket
(require simple-qr)
block 's default width is 5
(qr-write "" "normal.png")
(qr-write "" "normal_color.png" #:color '("#ffbb33" . "#0d47a1"))
(qr-write "" "normal_trans.png" #:color '("#9933CC" . "transparent"))
(qr-write "" "small.png" #:module_width 2)
(qr-write "" "large.png" #:module_width 10)
(printf "~a\n~a\n~a\n"
(qr-read "normal.png")
(qr-read "small.png")
(qr-read "large.png"))
(printf "~a\n" (qr-read "damaged.png"))
(qr-write "" "normal.svg" #:output_type 'svg)
(qr-write "" "normal_color.svg" #:color '("#ffbb33" . "#0d47a1") #:output_type 'svg)
(qr-write "" "normal_trans.svg" #:color '("#9933CC" . "transparent") #:output_type 'svg)
| |
a50c8f8f0c80aea684fca7610dba692afc68fa47eb12cf1b5661fbcd2e1b8f85 | lgessler/glam | span.cljc | (ns glam.models.span
(:require [clojure.set :refer [rename-keys]]
[com.wsscode.pathom.connect :as pc]
[taoensso.timbre :as log]
#?(:cljs [glam.models.common :as mc]
:clj [glam.models.common :as mc :refer [server-error server-message]])
#?(:clj [glam.xtdb.token :as tok])
#?(:clj [glam.xtdb.span :as s])
#?(:clj [glam.xtdb.span-layer :as sl])
#?(:clj [glam.models.auth :as ma])
#?(:clj [glam.xtdb.easy :as gxe])
[com.fulcrologic.fulcro.algorithms.tempid :as tempid]))
#?(:cljs
(defn get-span-snapshots [fulcro-db doc-id span-layer-id]
(:document/id fulcro-db)
(let [spans (->> (:span/id fulcro-db)
vals
(filter #(not (tempid/tempid? (:span/id %))))
(filter #(= doc-id (mc/get-document fulcro-db %))))
snapshots (map #(select-keys % [:span/id :span/value :span/tokens]) spans)]
snapshots)))
;; user --------------------------------------------------------------------------------
#?(:clj
(pc/defresolver get-span [{:keys [node] :as env} {:span/keys [id]}]
{::pc/input #{:span/id}
::pc/output [:span/id :span/tokens :span/value :span/layer]
::pc/transform (ma/readable-required :span/id)}
(s/get node id)))
#?(:clj
TODO this needs span - snapshots
(pc/defmutation save-span [{:keys [node] :as env} {:span/keys [id value] :as span}]
{::pc/transform (ma/writeable-required :span/id)}
(cond
(nil? (s/get node id))
(server-error (str "Span with id " id " does not exist."))
(not (string? value))
(server-error "Value must be a string.")
:else
(if-let [result (s/merge node id {:span/value value})]
(server-message "Successfully saved span")
(server-error (str "Failed to save span " id))))))
#?(:clj
TODO this needs span - snapshots
(pc/defmutation create-span
[{:keys [node] :as env} {:span/keys [id value layer tokens] :as span}]
{::pc/transform (ma/writeable-required :span-layer/id :span/layer)}
(cond
(not (string? value))
(server-error "Value must be a string.")
(not (sl/get node layer))
(server-error "Span layer does not exist")
(some nil? (map #(tok/get node %) tokens))
(server-error "Not all tokens exist for this span")
:else
(let [{:keys [success] new-id :id} (s/create node {:span/value value
:span/layer layer
:span/tokens (into [] tokens)})]
(if-not success
(server-error "Failed to create span, please try again")
(merge {:tempids {id new-id}} (server-message "Span created")))))))
#?(:clj
(pc/defmutation batched-update
[{:keys [node] :as env} {document-id :document/id
span-layer-id :span-layer/id
span-snapshots :span-snapshots
updates :updates
:as args}]
{::pc/transform (ma/writeable-required :span-layer/id)}
(let [success (s/batched-update node document-id span-layer-id span-snapshots updates)]
(if-not success
(server-error "Failed to update document, please try again")
(server-message "Updates applied")))))
#?(:clj
(pc/defmutation multi-layer-batched-update
[{:keys [node] :as env} {document-id :document/id
batches :batches
:as args}]
{::pc/transform (ma/writeable-required :document/id)}
(let [success (s/multi-batched-update node document-id batches)]
(if-not success
(server-error "Failed to update document, please try again")
(server-message "Updates applied")))))
;; admin --------------------------------------------------------------------------------
#?(:clj
(def span-resolvers [get-span save-span create-span batched-update multi-layer-batched-update])) | null | https://raw.githubusercontent.com/lgessler/glam/871ac6e9343754755c90dc124cdb00a1cf3d8406/src/main/glam/models/span.cljc | clojure | user --------------------------------------------------------------------------------
admin -------------------------------------------------------------------------------- | (ns glam.models.span
(:require [clojure.set :refer [rename-keys]]
[com.wsscode.pathom.connect :as pc]
[taoensso.timbre :as log]
#?(:cljs [glam.models.common :as mc]
:clj [glam.models.common :as mc :refer [server-error server-message]])
#?(:clj [glam.xtdb.token :as tok])
#?(:clj [glam.xtdb.span :as s])
#?(:clj [glam.xtdb.span-layer :as sl])
#?(:clj [glam.models.auth :as ma])
#?(:clj [glam.xtdb.easy :as gxe])
[com.fulcrologic.fulcro.algorithms.tempid :as tempid]))
#?(:cljs
(defn get-span-snapshots [fulcro-db doc-id span-layer-id]
(:document/id fulcro-db)
(let [spans (->> (:span/id fulcro-db)
vals
(filter #(not (tempid/tempid? (:span/id %))))
(filter #(= doc-id (mc/get-document fulcro-db %))))
snapshots (map #(select-keys % [:span/id :span/value :span/tokens]) spans)]
snapshots)))
#?(:clj
(pc/defresolver get-span [{:keys [node] :as env} {:span/keys [id]}]
{::pc/input #{:span/id}
::pc/output [:span/id :span/tokens :span/value :span/layer]
::pc/transform (ma/readable-required :span/id)}
(s/get node id)))
#?(:clj
TODO this needs span - snapshots
(pc/defmutation save-span [{:keys [node] :as env} {:span/keys [id value] :as span}]
{::pc/transform (ma/writeable-required :span/id)}
(cond
(nil? (s/get node id))
(server-error (str "Span with id " id " does not exist."))
(not (string? value))
(server-error "Value must be a string.")
:else
(if-let [result (s/merge node id {:span/value value})]
(server-message "Successfully saved span")
(server-error (str "Failed to save span " id))))))
#?(:clj
TODO this needs span - snapshots
(pc/defmutation create-span
[{:keys [node] :as env} {:span/keys [id value layer tokens] :as span}]
{::pc/transform (ma/writeable-required :span-layer/id :span/layer)}
(cond
(not (string? value))
(server-error "Value must be a string.")
(not (sl/get node layer))
(server-error "Span layer does not exist")
(some nil? (map #(tok/get node %) tokens))
(server-error "Not all tokens exist for this span")
:else
(let [{:keys [success] new-id :id} (s/create node {:span/value value
:span/layer layer
:span/tokens (into [] tokens)})]
(if-not success
(server-error "Failed to create span, please try again")
(merge {:tempids {id new-id}} (server-message "Span created")))))))
#?(:clj
(pc/defmutation batched-update
[{:keys [node] :as env} {document-id :document/id
span-layer-id :span-layer/id
span-snapshots :span-snapshots
updates :updates
:as args}]
{::pc/transform (ma/writeable-required :span-layer/id)}
(let [success (s/batched-update node document-id span-layer-id span-snapshots updates)]
(if-not success
(server-error "Failed to update document, please try again")
(server-message "Updates applied")))))
#?(:clj
(pc/defmutation multi-layer-batched-update
[{:keys [node] :as env} {document-id :document/id
batches :batches
:as args}]
{::pc/transform (ma/writeable-required :document/id)}
(let [success (s/multi-batched-update node document-id batches)]
(if-not success
(server-error "Failed to update document, please try again")
(server-message "Updates applied")))))
#?(:clj
(def span-resolvers [get-span save-span create-span batched-update multi-layer-batched-update])) |
5c4952ab8e9175b3f620e4a988a5945473303140c6b637f880d2b2728c94078d | abyala/advent-2022-clojure | day23_test.clj | (ns advent-2022-clojure.day23-test
(:require [clojure.test :refer :all]
[advent-2022-clojure.day23 :refer :all]))
(def test-simple-data (slurp "resources/day23-test-simple.txt"))
(def test-complex-data (slurp "resources/day23-test-complex.txt"))
(def puzzle-data (slurp "resources/day23-puzzle.txt"))
(deftest elves-seq-test
(are [input expected] (every? true? (map = expected (elves-seq (parse-elves input))))
test-simple-data
[#{[2 1] [3 1] [2 2] [2 4] [3 4]}
#{[2 0] [3 0] [2 2] [3 3] [2 4]}
#{[2 1] [3 1] [1 2] [4 3] [2 5]}
#{[2 0] [4 1] [0 2] [4 3] [2 5]}]
test-complex-data
[#{[7 2] [5 3] [6 3] [7 3] [9 3] [3 4] [7 4] [9 4]
[4 5] [8 5] [9 5] [3 6] [5 6] [6 6] [7 6]
[3 7] [4 7] [6 7] [8 7] [9 7] [4 8] [7 8]}
#{[7 1] [5 2] [9 2] [3 3] [6 3] [8 3] [7 4] [10 4]
[4 5] [6 5] [8 5] [9 5] [2 6] [5 6] [7 6] [2 7] [4 7] [6 7] [8 7] [9 7]
[4 9] [7 9]}
#{[7 1] [4 2] [10 2] [3 3] [6 3] [8 3] [7 4] [11 4]
[3 5] [6 5] [8 5] [1 6] [5 6] [7 6] [9 6]
[2 8] [4 8] [6 8] [8 8] [9 8] [4 9] [7 9]}
#{[7 1] [5 2] [10 2] [2 3] [5 3] [9 3] [7 4] [11 4]
[3 5] [6 5] [8 5] [1 6] [4 6] [10 6] [7 7] [8 7]
[2 8] [3 8] [5 8] [10 8] [3 9] [7 10]}
#{[7 1] [6 2] [11 2] [2 3] [6 3] [7 3] [3 4] [9 4] [11 4]
[9 5] [1 6] [5 6] [6 6] [7 6] [10 6] [2 7] [9 7]
[4 8] [5 8] [10 8] [4 9] [7 10]}
#{[7 0] [2 2] [5 2] [11 2] [9 3] [6 4] [7 4] [11 4]
[1 5] [3 5] [5 5] [6 5] [7 5] [8 5] [11 6]
[4 7] [5 7] [8 7] [2 8] [10 9] [4 10] [7 10]}])
(is (= #{[7 0] [11 1] [2 2] [4 2] [7 2] [6 3] [3 4] [9 4] [12 4] [1 5] [8 5] [9 5]
[5 6] [6 6] [2 7] [11 7] [4 8] [6 8] [9 8] [4 10] [7 10] [10 10]}
(nth (elves-seq (parse-elves test-complex-data)) 10))))
(deftest part1-test
(are [expected input] (= expected (part1 input))
110 test-complex-data
3923 puzzle-data))
(deftest part2-test
(are [expected input] (= expected (part2 input))
20 test-complex-data
1019 puzzle-data)) | null | https://raw.githubusercontent.com/abyala/advent-2022-clojure/4dba868c2019efe8da74c82de7d82883f44be659/test/advent_2022_clojure/day23_test.clj | clojure | (ns advent-2022-clojure.day23-test
(:require [clojure.test :refer :all]
[advent-2022-clojure.day23 :refer :all]))
(def test-simple-data (slurp "resources/day23-test-simple.txt"))
(def test-complex-data (slurp "resources/day23-test-complex.txt"))
(def puzzle-data (slurp "resources/day23-puzzle.txt"))
(deftest elves-seq-test
(are [input expected] (every? true? (map = expected (elves-seq (parse-elves input))))
test-simple-data
[#{[2 1] [3 1] [2 2] [2 4] [3 4]}
#{[2 0] [3 0] [2 2] [3 3] [2 4]}
#{[2 1] [3 1] [1 2] [4 3] [2 5]}
#{[2 0] [4 1] [0 2] [4 3] [2 5]}]
test-complex-data
[#{[7 2] [5 3] [6 3] [7 3] [9 3] [3 4] [7 4] [9 4]
[4 5] [8 5] [9 5] [3 6] [5 6] [6 6] [7 6]
[3 7] [4 7] [6 7] [8 7] [9 7] [4 8] [7 8]}
#{[7 1] [5 2] [9 2] [3 3] [6 3] [8 3] [7 4] [10 4]
[4 5] [6 5] [8 5] [9 5] [2 6] [5 6] [7 6] [2 7] [4 7] [6 7] [8 7] [9 7]
[4 9] [7 9]}
#{[7 1] [4 2] [10 2] [3 3] [6 3] [8 3] [7 4] [11 4]
[3 5] [6 5] [8 5] [1 6] [5 6] [7 6] [9 6]
[2 8] [4 8] [6 8] [8 8] [9 8] [4 9] [7 9]}
#{[7 1] [5 2] [10 2] [2 3] [5 3] [9 3] [7 4] [11 4]
[3 5] [6 5] [8 5] [1 6] [4 6] [10 6] [7 7] [8 7]
[2 8] [3 8] [5 8] [10 8] [3 9] [7 10]}
#{[7 1] [6 2] [11 2] [2 3] [6 3] [7 3] [3 4] [9 4] [11 4]
[9 5] [1 6] [5 6] [6 6] [7 6] [10 6] [2 7] [9 7]
[4 8] [5 8] [10 8] [4 9] [7 10]}
#{[7 0] [2 2] [5 2] [11 2] [9 3] [6 4] [7 4] [11 4]
[1 5] [3 5] [5 5] [6 5] [7 5] [8 5] [11 6]
[4 7] [5 7] [8 7] [2 8] [10 9] [4 10] [7 10]}])
(is (= #{[7 0] [11 1] [2 2] [4 2] [7 2] [6 3] [3 4] [9 4] [12 4] [1 5] [8 5] [9 5]
[5 6] [6 6] [2 7] [11 7] [4 8] [6 8] [9 8] [4 10] [7 10] [10 10]}
(nth (elves-seq (parse-elves test-complex-data)) 10))))
(deftest part1-test
(are [expected input] (= expected (part1 input))
110 test-complex-data
3923 puzzle-data))
(deftest part2-test
(are [expected input] (= expected (part2 input))
20 test-complex-data
1019 puzzle-data)) | |
2d4d5c0505e8349b1b1d85c39e45080965a8e52f3f1ea4542b8c2e8fd765016e | simonmar/parconc-examples | logger.hs | import Control.Concurrent
import Control.Monad
-- -----------------------------------------------------------------------------
-- <<Logger
data Logger = Logger (MVar LogCommand)
data LogCommand = Message String | Stop (MVar ())
-- >>
< < initLogger
initLogger :: IO Logger
initLogger = do
m <- newEmptyMVar
let l = Logger m
forkIO (logger l)
return l
-- >>
-- <<logger
logger :: Logger -> IO ()
logger (Logger m) = loop
where
loop = do
cmd <- takeMVar m
case cmd of
Message msg -> do
putStrLn msg
loop
Stop s -> do
putStrLn "logger: stop"
putMVar s ()
-- >>
-- <<logMessage
logMessage :: Logger -> String -> IO ()
logMessage (Logger m) s = putMVar m (Message s)
-- >>
-- <<logStop
logStop :: Logger -> IO ()
logStop (Logger m) = do
s <- newEmptyMVar
putMVar m (Stop s)
takeMVar s
-- >>
-- <<main
main :: IO ()
main = do
l <- initLogger
logMessage l "hello"
logMessage l "bye"
logStop l
-- >>
| null | https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/logger.hs | haskell | -----------------------------------------------------------------------------
<<Logger
>>
>>
<<logger
>>
<<logMessage
>>
<<logStop
>>
<<main
>> | import Control.Concurrent
import Control.Monad
data Logger = Logger (MVar LogCommand)
data LogCommand = Message String | Stop (MVar ())
< < initLogger
initLogger :: IO Logger
initLogger = do
m <- newEmptyMVar
let l = Logger m
forkIO (logger l)
return l
logger :: Logger -> IO ()
logger (Logger m) = loop
where
loop = do
cmd <- takeMVar m
case cmd of
Message msg -> do
putStrLn msg
loop
Stop s -> do
putStrLn "logger: stop"
putMVar s ()
logMessage :: Logger -> String -> IO ()
logMessage (Logger m) s = putMVar m (Message s)
logStop :: Logger -> IO ()
logStop (Logger m) = do
s <- newEmptyMVar
putMVar m (Stop s)
takeMVar s
main :: IO ()
main = do
l <- initLogger
logMessage l "hello"
logMessage l "bye"
logStop l
|
f08ac67c08549119f8a57fee0cb65343fd49507d672bf5ab47d3cb2c0ed204ad | thheller/js-framework-shadow-grove | util.cljs | (ns bench.util)
(def adjectives ["pretty", "large", "big", "small", "tall", "short", "long", "handsome", "plain", "quaint", "clean", "elegant", "easy", "angry", "crazy", "helpful", "mushy", "odd", "unsightly", "adorable", "important", "inexpensive", "cheap", "expensive", "fancy"])
(def colours ["red", "yellow", "blue", "green", "pink", "brown", "purple", "brown", "white", "black", "orange"])
(def nouns ["table", "chair", "house", "bbq", "desk", "car", "pony", "cookie", "sandwich", "burger", "pizza", "mouse", "keyboard"])
(defn make-label []
(str (rand-nth adjectives) " " (rand-nth colours) " " (rand-nth nouns)))
(defn swap-rows [data]
(if (> (count data) 998)
(-> data
(assoc 1 (get data 998))
(assoc 998 (get data 1)))
data)) | null | https://raw.githubusercontent.com/thheller/js-framework-shadow-grove/8d1cf1589d0c9f17aeae79784cb46a26239657f9/src/main/bench/util.cljs | clojure | (ns bench.util)
(def adjectives ["pretty", "large", "big", "small", "tall", "short", "long", "handsome", "plain", "quaint", "clean", "elegant", "easy", "angry", "crazy", "helpful", "mushy", "odd", "unsightly", "adorable", "important", "inexpensive", "cheap", "expensive", "fancy"])
(def colours ["red", "yellow", "blue", "green", "pink", "brown", "purple", "brown", "white", "black", "orange"])
(def nouns ["table", "chair", "house", "bbq", "desk", "car", "pony", "cookie", "sandwich", "burger", "pizza", "mouse", "keyboard"])
(defn make-label []
(str (rand-nth adjectives) " " (rand-nth colours) " " (rand-nth nouns)))
(defn swap-rows [data]
(if (> (count data) 998)
(-> data
(assoc 1 (get data 998))
(assoc 998 (get data 1)))
data)) | |
2193fc74fedd00deaef7082a4f7e1ebf3143212c1dfa876369b7dc83027762cb | rfindler/lindenmayer | lex.rkt | #lang 2d racket/base
(provide lindenmayer-lexer)
(require racket/match racket/list)
(module+ test (require rackunit))
(struct errstate (mode data) #:transparent)
(define errlabel 'errlabel)
(define errnobrk 'errnobrk)
(define errresum 'errresum)
(define errnewln 'errnewln)
(define (sec-next state data)
(define transit '(("axiom" . axiom-new) ("rules" . rules-lhs) ("variables" . vars-lhs)))
(cond
[(and (>= (length data) 3)
(equal? (list-ref data 0) (list-ref data 2))
(assoc (bytes->string/utf-8 (list-ref data 1)) transit))
=> cdr]
[else (values state 'error)]))
(struct rule (match output to-state reset) #:transparent)
(define (make-lexer-table 2d)
(define cells (drop 2d 3))
(define cell-table (make-hash))
(define rule-count
(for/fold ([h-max 0])
([cells cells])
(for ([cell (first cells)])
(hash-set! cell-table cell (cdr cells)))
(apply max h-max (map second (car cells)))))
(define rule-table (make-hash))
(for ([i (in-range rule-count)])
(for ([from-state (hash-ref cell-table (list 0 i))])
(hash-set! rule-table from-state '())))
(define (pick j i)
(define eles (hash-ref cell-table (list j i)))
(unless (pair? eles)
(error 'make-lexer-table "expected something in cell (~a,~a) but found nothing"
i j))
(car eles))
(for ([i+1 (in-range rule-count 0 -1)])
(define i (- i+1 1))
(for ([from-state (hash-ref cell-table (list 0 i))])
(hash-set! rule-table from-state
(cons
(rule (pick 1 i)
(pick 2 i)
(pick 3 i)
(pick 4 i))
(hash-ref rule-table from-state)))))
rule-table)
(define (state-reset state)
(define reset-state (map rule-reset (hash-ref lexer-fsm state)))
(first (filter (λ (x) x) reset-state)))
(define sec-regexp
#rx"^(#+)[ \t]*([a-zA-Z]+)[ \t]*(#+)[ \t]*($|\n)")
FSM transition table of the lexer . The state of the FSM is stored in the
;; mode. The error state is handled specially; it must be able to make a
;; transition for arbitrary input string.
;;
;; The current error recovery strategy tries to re-lex with the previous state
;; after every spaces (and falls back to the 'rule-reset' state of the original state
;; when hitting a new line). Thus, when designing states, it's better not to
;; have a token that spans across spaces.
(define lexer-fsm
(make-lexer-table
;; state transition regular expression output symbol next state error recovery
`#2d
╔═══════════╦════════════════════════════════════╦═════════════╦═════════════╦═══════════╗
║ ,errlabel ║ #rx"^[^ \t\n]+" ║ error ║ ,errlabel ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errlabel ║ #rx"^[ \t]+" ║ ║ ,errresum ║ ║
╠═══════════╬════════════════════════════════════╣ white-space ╠═════════════╣ ║
║ ,errlabel ║ #rx"^\n[ \t]*" ║ ║ ,errnewln ║ #f ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errnobrk ║ #rx"^[^\n]+" ║ error ║ ,errlabel ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errnobrk ║ #px"^\n\\s*" ║ white-space ║ ,errnewln ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ any-new ║ ║ ║ ║ ║
║ axiom-new ║ ,sec-regexp ║ comment ║ ,sec-next ║ #f ║
║ rules-lhs ║ ║ ║ ║ ║
║ vars-lhs ║ ║ ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ any-new ║ ║ ║ any-new ║ ║
╠═══════════╣ ║ ╠═════════════╣ ║
║ axiom-new ║ ║ ║ axiom-new ║ ║
╠═══════════╣ #px"^\\s+" ║ white-space ╠═════════════╣ ║
║ rules-lhs ║ ║ ║ rules-lhs ║ ║
╠═══════════╣ ║ ╠═════════════╣ ║
║ vars-lhs ║ ║ ║ vars-lhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ any-new ║ #rx"^#lang[^\n]*(\n|$)" ║ other ║ any-new ║ any-new ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ axiom-new ║ #px"^[^\\s#]+" ║ symbol ║ axiom-nta ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-nta ║ #rx"^[ \t]+" ║ white-space ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╣ axiom-nta ║ ║
║ axiom-nta ║ #px"^[^\\s#]+" ║ symbol ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-nta ║ #px"^\n\\s*" ║ white-space ║ axiom-new ║ axiom-new ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #rx"^[ \t]+" ║ white-space ║ axiom-axm ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #px"^[^\\s#]+" ║ symbol ║ axiom-nta ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #px"^\n\\s*" ║ white-space ║ axiom-new ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ rules-lhs ║ #rx"^((?!->|→)[^ \t\n])" ║ symbol ║ rules-ntl ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-ntl ║ #rx"^[ \t]+" ║ white-space ║ rules-ntl ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-ntl ║ #rx"^(->|→)" ║ parenthesis ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^[ \t]+" ║ white-space ║ rules-rhs ║ rules-lhs ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^((?!->|→)[^ \t\n])+" ║ symbol ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^\n[ \t]" ║ ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╣ ╠═════════════╣ ║
║ rules-rhs ║ #rx"^\n(?=[^ \t])" ║ white-space ║ rules-lhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ vars-lhs ║ #rx"^[^ \t\n=#]+" ║ symbol ║ vars-equ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-equ ║ #rx"^[ \t]+" ║ white-space ║ vars-equ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-equ ║ #rx"^=" ║ parenthesis ║ vars-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ vars-lhs ║
║ vars-rhs ║ #rx"^[ \t]+" ║ white-space ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╣ vars-rhs ║ ║
║ vars-rhs ║ #rx"^[^ \t\n=#]+" ║ constant ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-rhs ║ #rx"^\n[ \t]*" ║ white-space ║ vars-lhs ║ ║
╚═══════════╩════════════════════════════════════╩═════════════╩═════════════╩═══════════╝))
(define (error-can-resume? port state)
(or (not (member state '(any-new axiom-new rules-lhs vars-lhs)))
(not (regexp-match-peek #rx"^(->|#)" port))))
(define (lindenmayer-lexer port offset mode)
(define-values (line col pos) (port-next-location port))
;; (or/c bytes syntax) natural mode -> result for the lexer
(define (make-token-values token type mode paren)
(define-values (line2 col2 pos2) (port-next-location port))
(values token type paren pos pos2 0 mode))
(define state (or mode 'any-new))
( printf " lexer : ~a ~s\n " state ( peek - string 20 0 port ) )
(cond
[(eof-object? (peek-char port))
(values (read-char port) 'eof #f 0 0 0 state)]
[(errstate? state)
(define-values (lexeme type data new-token-start new-token-end backup-delta new-mode)
(lindenmayer-lexer port offset
(if (errstate-data state)
errlabel
errnobrk)))
( printf " errstate : was : ~a , matched : ~s ; new mode : ~a\n " state lexeme new - mode )
(define old-state (errstate-mode state))
(define wrapped-mode
(cond [(equal? new-mode errlabel) state]
[(equal? new-mode errresum) old-state]
[(equal? new-mode errnewln) (state-reset old-state)]
[else (raise-result-error 'lindenmayer-lexer
"(or/c 'errlabel 'errresum 'errnewln)"
new-mode)]))
(values lexeme type data new-token-start new-token-end backup-delta wrapped-mode)]
[(for/or ([rule (hash-ref lexer-fsm state)])
(define match-result
(regexp-match-peek (rule-match rule) port))
#;(printf "lexer: ~s => ~s / ~s\n"
(rule-match rule) match-result (peek-string 20 0 port))
(and match-result (cons rule match-result)))
=>
(match-lambda
[(list rule matched-str substrs ...)
(read-bytes (bytes-length matched-str) port)
(define to-state (rule-to-state rule))
#;(printf "lexer: matched ~s; to-state: ~a\n" matched-str to-state)
(define-values (new-output new-state new-paren)
(cond
[(procedure? to-state)
(call-with-values
(λ () (to-state state substrs))
(case-lambda
[(new-state) (values (rule-output rule) new-state #f)]
[(new-state new-output) (values new-output new-state #f)]
[(new-state new-output new-info) (values new-output new-state new-info)]))]
[else (values (rule-output rule) to-state #f)]))
(make-token-values matched-str new-output new-state new-paren)])]
[else (lindenmayer-lexer port offset (errstate state (error-can-resume? port state)))]))
(module+ test
(require racket/port)
(define (test-lexer mode0 input)
(define port (open-input-string input))
(define (run* limit mode)
(define-values (lexeme type data new-token-start new-token-end backup-delta new-mode)
(lindenmayer-lexer port 0 mode))
(cond
[(or (eof-object? lexeme) (equal? limit 1)) (list mode)]
[else (cons (list mode type (bytes->string/utf-8 lexeme))
(run* (if (number? limit) (sub1 limit) #f) new-mode))]))
(run* #f mode0))
(check-equal? (test-lexer 'any-new "# axiom #")
`((any-new comment "# axiom #") axiom-new))
(check-equal? (test-lexer 'axiom-new "## variables ##\n")
`((axiom-new comment "## variables ##\n") vars-lhs))
(check-equal? (test-lexer 'rules-lhs "# rules #")
`((rules-lhs comment "# rules #") rules-lhs))
(check-equal? (test-lexer 'vars-lhs "# axiom #\t")
`((vars-lhs comment "# axiom #\t") axiom-new))
(check-equal?
(test-lexer 'any-new "#lang lindenmayer racket\n \t \n")
`((any-new other "#lang lindenmayer racket\n")
(any-new white-space " \t \n")
any-new))
(check-equal?
(test-lexer 'axiom-new " F X \t\n")
`((axiom-new white-space " ")
(axiom-new symbol "F")
(axiom-nta white-space " ")
(axiom-nta symbol "X")
(axiom-nta white-space " \t")
(axiom-nta white-space "\n")
axiom-new))
(check-equal?
(test-lexer 'rules-lhs " A ->AB\n")
`((rules-lhs white-space " ")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer 'rules-lhs "X→Y\n ")
`((rules-lhs symbol "X")
(rules-ntl parenthesis "→")
(rules-rhs symbol "Y")
(rules-rhs white-space "\n ")
rules-rhs))
(check-equal?
(test-lexer 'rules-lhs "X→Y\n Z W\n")
`((rules-lhs symbol "X")
(rules-ntl parenthesis "→")
(rules-rhs symbol "Y")
(rules-rhs white-space "\n ")
(rules-rhs symbol "Z")
(rules-rhs white-space " ")
(rules-rhs symbol "W")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer 'vars-lhs " \t n = 8\t\n ")
`((vars-lhs white-space " \t ")
(vars-lhs symbol "n")
(vars-equ white-space " ")
(vars-equ parenthesis "=")
(vars-rhs white-space " ")
(vars-rhs constant "8")
(vars-rhs white-space "\t")
(vars-rhs white-space "\n ")
vars-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A -> AB\n"
"B -> A\n"))
'((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
(rules-lhs symbol "B")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "A")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A -> AB\n"
"\n"
"B -> A\n"))
'((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "B")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "A")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A 1 -> AB\n"))
`((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl error "1")
(,(errstate 'rules-ntl #t) white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
rules-lhs))
)
| null | https://raw.githubusercontent.com/rfindler/lindenmayer/2ef7b4535d8ae1eb7cc2e16e2b630c30a4b9a34d/simple/lex.rkt | racket | mode. The error state is handled specially; it must be able to make a
transition for arbitrary input string.
The current error recovery strategy tries to re-lex with the previous state
after every spaces (and falls back to the 'rule-reset' state of the original state
when hitting a new line). Thus, when designing states, it's better not to
have a token that spans across spaces.
state transition regular expression output symbol next state error recovery
(or/c bytes syntax) natural mode -> result for the lexer
(printf "lexer: ~s => ~s / ~s\n"
(printf "lexer: matched ~s; to-state: ~a\n" matched-str to-state) | #lang 2d racket/base
(provide lindenmayer-lexer)
(require racket/match racket/list)
(module+ test (require rackunit))
(struct errstate (mode data) #:transparent)
(define errlabel 'errlabel)
(define errnobrk 'errnobrk)
(define errresum 'errresum)
(define errnewln 'errnewln)
(define (sec-next state data)
(define transit '(("axiom" . axiom-new) ("rules" . rules-lhs) ("variables" . vars-lhs)))
(cond
[(and (>= (length data) 3)
(equal? (list-ref data 0) (list-ref data 2))
(assoc (bytes->string/utf-8 (list-ref data 1)) transit))
=> cdr]
[else (values state 'error)]))
(struct rule (match output to-state reset) #:transparent)
(define (make-lexer-table 2d)
(define cells (drop 2d 3))
(define cell-table (make-hash))
(define rule-count
(for/fold ([h-max 0])
([cells cells])
(for ([cell (first cells)])
(hash-set! cell-table cell (cdr cells)))
(apply max h-max (map second (car cells)))))
(define rule-table (make-hash))
(for ([i (in-range rule-count)])
(for ([from-state (hash-ref cell-table (list 0 i))])
(hash-set! rule-table from-state '())))
(define (pick j i)
(define eles (hash-ref cell-table (list j i)))
(unless (pair? eles)
(error 'make-lexer-table "expected something in cell (~a,~a) but found nothing"
i j))
(car eles))
(for ([i+1 (in-range rule-count 0 -1)])
(define i (- i+1 1))
(for ([from-state (hash-ref cell-table (list 0 i))])
(hash-set! rule-table from-state
(cons
(rule (pick 1 i)
(pick 2 i)
(pick 3 i)
(pick 4 i))
(hash-ref rule-table from-state)))))
rule-table)
(define (state-reset state)
(define reset-state (map rule-reset (hash-ref lexer-fsm state)))
(first (filter (λ (x) x) reset-state)))
(define sec-regexp
#rx"^(#+)[ \t]*([a-zA-Z]+)[ \t]*(#+)[ \t]*($|\n)")
FSM transition table of the lexer . The state of the FSM is stored in the
(define lexer-fsm
(make-lexer-table
`#2d
╔═══════════╦════════════════════════════════════╦═════════════╦═════════════╦═══════════╗
║ ,errlabel ║ #rx"^[^ \t\n]+" ║ error ║ ,errlabel ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errlabel ║ #rx"^[ \t]+" ║ ║ ,errresum ║ ║
╠═══════════╬════════════════════════════════════╣ white-space ╠═════════════╣ ║
║ ,errlabel ║ #rx"^\n[ \t]*" ║ ║ ,errnewln ║ #f ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errnobrk ║ #rx"^[^\n]+" ║ error ║ ,errlabel ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ ,errnobrk ║ #px"^\n\\s*" ║ white-space ║ ,errnewln ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ any-new ║ ║ ║ ║ ║
║ axiom-new ║ ,sec-regexp ║ comment ║ ,sec-next ║ #f ║
║ rules-lhs ║ ║ ║ ║ ║
║ vars-lhs ║ ║ ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ any-new ║ ║ ║ any-new ║ ║
╠═══════════╣ ║ ╠═════════════╣ ║
║ axiom-new ║ ║ ║ axiom-new ║ ║
╠═══════════╣ #px"^\\s+" ║ white-space ╠═════════════╣ ║
║ rules-lhs ║ ║ ║ rules-lhs ║ ║
╠═══════════╣ ║ ╠═════════════╣ ║
║ vars-lhs ║ ║ ║ vars-lhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ any-new ║ #rx"^#lang[^\n]*(\n|$)" ║ other ║ any-new ║ any-new ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ axiom-new ║ #px"^[^\\s#]+" ║ symbol ║ axiom-nta ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-nta ║ #rx"^[ \t]+" ║ white-space ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╣ axiom-nta ║ ║
║ axiom-nta ║ #px"^[^\\s#]+" ║ symbol ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-nta ║ #px"^\n\\s*" ║ white-space ║ axiom-new ║ axiom-new ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #rx"^[ \t]+" ║ white-space ║ axiom-axm ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #px"^[^\\s#]+" ║ symbol ║ axiom-nta ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ axiom-axm ║ #px"^\n\\s*" ║ white-space ║ axiom-new ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ rules-lhs ║ #rx"^((?!->|→)[^ \t\n])" ║ symbol ║ rules-ntl ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-ntl ║ #rx"^[ \t]+" ║ white-space ║ rules-ntl ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-ntl ║ #rx"^(->|→)" ║ parenthesis ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^[ \t]+" ║ white-space ║ rules-rhs ║ rules-lhs ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^((?!->|→)[^ \t\n])+" ║ symbol ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ rules-rhs ║ #rx"^\n[ \t]" ║ ║ rules-rhs ║ ║
╠═══════════╬════════════════════════════════════╣ ╠═════════════╣ ║
║ rules-rhs ║ #rx"^\n(?=[^ \t])" ║ white-space ║ rules-lhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╬═══════════╣
║ vars-lhs ║ #rx"^[^ \t\n=#]+" ║ symbol ║ vars-equ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-equ ║ #rx"^[ \t]+" ║ white-space ║ vars-equ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-equ ║ #rx"^=" ║ parenthesis ║ vars-rhs ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ vars-lhs ║
║ vars-rhs ║ #rx"^[ \t]+" ║ white-space ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╣ vars-rhs ║ ║
║ vars-rhs ║ #rx"^[^ \t\n=#]+" ║ constant ║ ║ ║
╠═══════════╬════════════════════════════════════╬═════════════╬═════════════╣ ║
║ vars-rhs ║ #rx"^\n[ \t]*" ║ white-space ║ vars-lhs ║ ║
╚═══════════╩════════════════════════════════════╩═════════════╩═════════════╩═══════════╝))
(define (error-can-resume? port state)
(or (not (member state '(any-new axiom-new rules-lhs vars-lhs)))
(not (regexp-match-peek #rx"^(->|#)" port))))
(define (lindenmayer-lexer port offset mode)
(define-values (line col pos) (port-next-location port))
(define (make-token-values token type mode paren)
(define-values (line2 col2 pos2) (port-next-location port))
(values token type paren pos pos2 0 mode))
(define state (or mode 'any-new))
( printf " lexer : ~a ~s\n " state ( peek - string 20 0 port ) )
(cond
[(eof-object? (peek-char port))
(values (read-char port) 'eof #f 0 0 0 state)]
[(errstate? state)
(define-values (lexeme type data new-token-start new-token-end backup-delta new-mode)
(lindenmayer-lexer port offset
(if (errstate-data state)
errlabel
errnobrk)))
( printf " errstate : was : ~a , matched : ~s ; new mode : ~a\n " state lexeme new - mode )
(define old-state (errstate-mode state))
(define wrapped-mode
(cond [(equal? new-mode errlabel) state]
[(equal? new-mode errresum) old-state]
[(equal? new-mode errnewln) (state-reset old-state)]
[else (raise-result-error 'lindenmayer-lexer
"(or/c 'errlabel 'errresum 'errnewln)"
new-mode)]))
(values lexeme type data new-token-start new-token-end backup-delta wrapped-mode)]
[(for/or ([rule (hash-ref lexer-fsm state)])
(define match-result
(regexp-match-peek (rule-match rule) port))
(rule-match rule) match-result (peek-string 20 0 port))
(and match-result (cons rule match-result)))
=>
(match-lambda
[(list rule matched-str substrs ...)
(read-bytes (bytes-length matched-str) port)
(define to-state (rule-to-state rule))
(define-values (new-output new-state new-paren)
(cond
[(procedure? to-state)
(call-with-values
(λ () (to-state state substrs))
(case-lambda
[(new-state) (values (rule-output rule) new-state #f)]
[(new-state new-output) (values new-output new-state #f)]
[(new-state new-output new-info) (values new-output new-state new-info)]))]
[else (values (rule-output rule) to-state #f)]))
(make-token-values matched-str new-output new-state new-paren)])]
[else (lindenmayer-lexer port offset (errstate state (error-can-resume? port state)))]))
(module+ test
(require racket/port)
(define (test-lexer mode0 input)
(define port (open-input-string input))
(define (run* limit mode)
(define-values (lexeme type data new-token-start new-token-end backup-delta new-mode)
(lindenmayer-lexer port 0 mode))
(cond
[(or (eof-object? lexeme) (equal? limit 1)) (list mode)]
[else (cons (list mode type (bytes->string/utf-8 lexeme))
(run* (if (number? limit) (sub1 limit) #f) new-mode))]))
(run* #f mode0))
(check-equal? (test-lexer 'any-new "# axiom #")
`((any-new comment "# axiom #") axiom-new))
(check-equal? (test-lexer 'axiom-new "## variables ##\n")
`((axiom-new comment "## variables ##\n") vars-lhs))
(check-equal? (test-lexer 'rules-lhs "# rules #")
`((rules-lhs comment "# rules #") rules-lhs))
(check-equal? (test-lexer 'vars-lhs "# axiom #\t")
`((vars-lhs comment "# axiom #\t") axiom-new))
(check-equal?
(test-lexer 'any-new "#lang lindenmayer racket\n \t \n")
`((any-new other "#lang lindenmayer racket\n")
(any-new white-space " \t \n")
any-new))
(check-equal?
(test-lexer 'axiom-new " F X \t\n")
`((axiom-new white-space " ")
(axiom-new symbol "F")
(axiom-nta white-space " ")
(axiom-nta symbol "X")
(axiom-nta white-space " \t")
(axiom-nta white-space "\n")
axiom-new))
(check-equal?
(test-lexer 'rules-lhs " A ->AB\n")
`((rules-lhs white-space " ")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer 'rules-lhs "X→Y\n ")
`((rules-lhs symbol "X")
(rules-ntl parenthesis "→")
(rules-rhs symbol "Y")
(rules-rhs white-space "\n ")
rules-rhs))
(check-equal?
(test-lexer 'rules-lhs "X→Y\n Z W\n")
`((rules-lhs symbol "X")
(rules-ntl parenthesis "→")
(rules-rhs symbol "Y")
(rules-rhs white-space "\n ")
(rules-rhs symbol "Z")
(rules-rhs white-space " ")
(rules-rhs symbol "W")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer 'vars-lhs " \t n = 8\t\n ")
`((vars-lhs white-space " \t ")
(vars-lhs symbol "n")
(vars-equ white-space " ")
(vars-equ parenthesis "=")
(vars-rhs white-space " ")
(vars-rhs constant "8")
(vars-rhs white-space "\t")
(vars-rhs white-space "\n ")
vars-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A -> AB\n"
"B -> A\n"))
'((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
(rules-lhs symbol "B")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "A")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A -> AB\n"
"\n"
"B -> A\n"))
'((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "B")
(rules-ntl white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "A")
(rules-rhs white-space "\n")
rules-lhs))
(check-equal?
(test-lexer
#f
(string-append
"## axiom ##\n"
"\n"
"A\n"
"\n"
"## rules ##\n"
"\n"
"A 1 -> AB\n"))
`((#f comment "## axiom ##\n")
(axiom-new white-space "\n")
(axiom-new symbol "A")
(axiom-nta white-space "\n\n")
(axiom-new comment "## rules ##\n")
(rules-lhs white-space "\n")
(rules-lhs symbol "A")
(rules-ntl white-space " ")
(rules-ntl error "1")
(,(errstate 'rules-ntl #t) white-space " ")
(rules-ntl parenthesis "->")
(rules-rhs white-space " ")
(rules-rhs symbol "AB")
(rules-rhs white-space "\n")
rules-lhs))
)
|
9f734882eb1f8b052f22a536112358595111a70f563fa608be963beeb49fe96c | logicblocks/salutem | async.clj | (ns salutem.test.support.async
(:require
[clojure.core.async :as async]
[tick.alpha.api :as t]))
(defn <!!-or-timeout
([chan]
(<!!-or-timeout chan (t/new-duration 100 :millis)))
([chan timeout]
(async/alt!!
chan ([v] v)
(async/timeout (t/millis timeout))
(throw (ex-info "Timed out waiting on channel."
{:channel chan
:timeout (t/millis timeout)})))))
| null | https://raw.githubusercontent.com/logicblocks/salutem/7ed95354d84b505d8c5d4ebeaad9b77ddf22b479/core/test/shared/salutem/test/support/async.clj | clojure | (ns salutem.test.support.async
(:require
[clojure.core.async :as async]
[tick.alpha.api :as t]))
(defn <!!-or-timeout
([chan]
(<!!-or-timeout chan (t/new-duration 100 :millis)))
([chan timeout]
(async/alt!!
chan ([v] v)
(async/timeout (t/millis timeout))
(throw (ex-info "Timed out waiting on channel."
{:channel chan
:timeout (t/millis timeout)})))))
| |
edb09b44e5d79695694d4e1544921f7dc3a6b9b39087d3ad44a4ffb4e7ec713c | tsloughter/rebar3_tests | git_plugin_upgrade.erl | -module(git_plugin_upgrade).
-export([]).
| null | https://raw.githubusercontent.com/tsloughter/rebar3_tests/090bfef7d3a4790bb6b16e4c38df6e4c0460b4b2/git_plugin_upgrade/src/git_plugin_upgrade.erl | erlang | -module(git_plugin_upgrade).
-export([]).
| |
5e7316dce2c803dde3cd3c2e714ce2a6434a85b353397a1cf31e92b61c49c5ed | blockfrost/blockfrost-haskell | Pools.hs | -- | Cardano Pools reponses
module Blockfrost.Types.Cardano.Pools
( PoolEpoch (..)
, PoolInfo (..)
, PoolHistory (..)
, PoolMetadata (..)
, PoolRelay (..)
, PoolDelegator (..)
, PoolUpdate (..)
, PoolRegistrationAction (..)
, samplePoolRelay
) where
import Data.Aeson (FromJSON (..), ToJSON (..), object, pairs, withText)
import Data.Text (Text)
import Deriving.Aeson
import Servant.Docs (ToSample (..), samples, singleSample)
import Blockfrost.Types.Shared
-- | Retirement epoch for pool
data PoolEpoch = PoolEpoch
{ _poolEpochPoolId :: PoolId -- ^ Bech32 encoded pool ID
, _poolEpochEpoch :: Epoch -- ^ Retirement epoch number
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolEpoch", CamelToSnake]] PoolEpoch
instance ToSample PoolEpoch where
toSamples = pure $ samples
[ PoolEpoch "pool19u64770wqp6s95gkajc8udheske5e6ljmpq33awxk326zjaza0q" 225
, PoolEpoch "pool1dvla4zq98hpvacv20snndupjrqhuc79zl6gjap565nku6et5zdx" 215
, PoolEpoch "pool1wvccajt4eugjtf3k0ja3exjqdj7t8egsujwhcw4tzj4rzsxzw5w" 231
]
-- | Detailed pool information
data PoolInfo = PoolInfo
{ _poolInfoPoolId :: PoolId -- ^ Bech32 encoded pool ID
, _poolInfoHex :: Text -- ^ Hexadecimal pool ID.
, _poolInfoVrfKey :: Text -- ^ VRF key hash
, _poolInfoBlocksMinted :: Integer -- ^ Total minted blocks
, _poolInfoBlocksEpoch :: Integer -- ^ Number of blocks minted in the current epoch
, _poolInfoLiveStake :: Lovelaces
, _poolInfoLiveSize :: Double
, _poolInfoLiveSaturation :: Double
, _poolInfoLiveDelegators :: Double
, _poolInfoActiveStake :: Lovelaces
, _poolInfoActiveSize :: Double
, _poolInfoDeclaredPledge :: Lovelaces -- ^ Stake pool certificate pledge
, _poolInfoLivePledge :: Lovelaces -- ^ Stake pool current pledge
, _poolInfoMarginCost :: Rational -- ^ Margin tax cost of the stake pool
, _poolInfoFixedCost :: Lovelaces -- ^ Fixed tax cost of the stake pool
, _poolInfoRewardAccount :: Address -- ^ Bech32 reward account of the stake pool
, _poolInfoOwners :: [Address]
, _poolInfoRegistration :: [Text]
, _poolInfoRetirement :: [Text]
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolInfo", CamelToSnake]] PoolInfo
instance ToSample PoolInfo where
toSamples = pure $ singleSample
PoolInfo
{ _poolInfoPoolId = "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2q3lkdy"
, _poolInfoHex = "0f292fcaa02b8b2f9b3c8f9fd8e0bb21abedb692a6d5058df3ef2735"
, _poolInfoVrfKey = "0b5245f9934ec2151116fb8ec00f35fd00e0aa3b075c4ed12cce440f999d8233"
, _poolInfoBlocksMinted = 69
, _poolInfoBlocksEpoch = 4
, _poolInfoLiveStake = 6900000000
, _poolInfoLiveSize = 0.42
, _poolInfoLiveSaturation = 0.93
, _poolInfoLiveDelegators = 127
, _poolInfoActiveStake = 4200000000
, _poolInfoActiveSize = 0.43
, _poolInfoDeclaredPledge = 5000000000
, _poolInfoLivePledge = 5000000001
, _poolInfoMarginCost = 0.05
, _poolInfoFixedCost = 340000000
, _poolInfoRewardAccount = "stake1uxkptsa4lkr55jleztw43t37vgdn88l6ghclfwuxld2eykgpgvg3f"
, _poolInfoOwners = [ "stake1u98nnlkvkk23vtvf9273uq7cph5ww6u2yq2389psuqet90sv4xv9v" ]
, _poolInfoRegistration =
[ "9f83e5484f543e05b52e99988272a31da373f3aab4c064c76db96643a355d9dc"
, "7ce3b8c433bf401a190d58c8c483d8e3564dfd29ae8633c8b1b3e6c814403e95"
, "3e6e1200ce92977c3fe5996bd4d7d7e192bcb7e231bc762f9f240c76766535b9"
]
, _poolInfoRetirement = [ "252f622976d39e646815db75a77289cf16df4ad2b287dd8e3a889ce14c13d1a8" ]
}
-- | History of a stake pool parameters over epochs
data PoolHistory = PoolHistory
{ _poolHistoryEpoch :: Epoch -- ^ Epoch number
, _poolHistoryBlocks :: Integer -- ^ Number of blocks created by pool
^ Active ( Snapshot of live stake 2 epochs ago ) stake in Lovelaces
, _poolHistoryActiveSize :: Double -- ^ Pool size (percentage) of overall active stake at that epoch
, _poolHistoryDelegatorsCount :: Integer -- ^ Number of delegators for epoch
, _poolHistoryRewards :: Lovelaces -- ^ Total rewards received before distribution to delegators
, _poolHistoryFees :: Lovelaces -- ^ Pool operator rewards
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolHistory", CamelToSnake]] PoolHistory
instance ToSample PoolHistory where
toSamples = pure $ singleSample
PoolHistory
{ _poolHistoryEpoch = 233
, _poolHistoryBlocks = 22
, _poolHistoryActiveStake = 20485965693569
, _poolHistoryActiveSize = 1.2345
, _poolHistoryDelegatorsCount = 115
, _poolHistoryRewards = 206936253674159
, _poolHistoryFees = 1290968354
}
-- | Stake pool registration metadata
data PoolMetadata = PoolMetadata
{ _poolMetadataPoolId :: PoolId -- ^ Bech32 pool ID
, _poolMetadataHex :: Text -- ^ Hexadecimal pool ID
, _poolMetadataUrl :: Maybe Text -- ^ URL to the stake pool metadata
, _poolMetadataHash :: Maybe Text -- ^ Hash of the metadata file
, _poolMetadataTicker :: Maybe Text -- ^ Ticker of the stake pool
, _poolMetadataName :: Maybe Text -- ^ Name of the stake pool
, _poolMetadataDescription :: Maybe Text -- ^ Description of the stake pool
, _poolMetadataHomepage :: Maybe Text -- ^ Home page of the stake pool
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolMetadata", CamelToSnake]] PoolMetadata
-- We need this more specific
-- instance since API returns
-- empty object if there's no metadata
instance {-# OVERLAPS #-} ToJSON (Maybe PoolMetadata) where
toJSON Nothing = object mempty
toJSON (Just pm) = toJSON pm
toEncoding Nothing = pairs mempty
toEncoding (Just pm) = toEncoding pm
instance {-# OVERLAPS #-} FromJSON (Maybe PoolMetadata) where
parseJSON x | x == object [] = pure Nothing
parseJSON x = Just <$> parseJSON x
instance ToSample PoolMetadata where
toSamples = pure $ singleSample samplePoolMetadata
samplePoolMetadata :: PoolMetadata
samplePoolMetadata =
PoolMetadata
{ _poolMetadataPoolId = "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2q3lkdy"
, _poolMetadataHex = "0f292fcaa02b8b2f9b3c8f9fd8e0bb21abedb692a6d5058df3ef2735"
, _poolMetadataUrl = Just ""
, _poolMetadataHash = Just "47c0c68cb57f4a5b4a87bad896fc274678e7aea98e200fa14a1cb40c0cab1d8c"
, _poolMetadataTicker = Just "NUTS"
, _poolMetadataName = Just "Stake Nuts"
, _poolMetadataDescription = Just "The best pool ever"
, _poolMetadataHomepage = Just "/"
}
-- | Relays of a stake pool
data PoolRelay = PoolRelay
^ IPv4 address of the relay
, _poolRelayIpv6 :: Maybe Text -- ^ IPv6 address of the relay
, _poolRelayDns :: Maybe Text -- ^ DNS name of the relay
, _poolRelayDnsSrv :: Maybe Text -- ^ DNS SRV entry of the relay
, _poolRelayPort :: Integer -- ^ Network port of the relay
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolRelay", CamelToSnake]] PoolRelay
instance ToSample PoolRelay where
toSamples = pure $ singleSample samplePoolRelay
| Example of ` PoolRelay `
samplePoolRelay :: PoolRelay
samplePoolRelay =
PoolRelay
{ _poolRelayIpv4 = Just "4.4.4.4"
, _poolRelayIpv6 = Just ""
, _poolRelayDns = Just "relay1.stakenuts.com"
, _poolRelayDnsSrv = Just "_relays._tcp.relays.stakenuts.com"
, _poolRelayPort = 3001
}
-- | Stake pool delegator
data PoolDelegator = PoolDelegator
{ _poolDelegatorAddress :: Text -- ^ Bech32 encoded stake addresses
, _poolDelegatorLiveStake :: Lovelaces -- ^ Currently delegated amount
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolDelegator", CamelToSnake]] PoolDelegator
instance ToSample PoolDelegator where
toSamples = pure $ samples
[ PoolDelegator
{ _poolDelegatorAddress = "stake1ux4vspfvwuus9uwyp5p3f0ky7a30jq5j80jxse0fr7pa56sgn8kha"
, _poolDelegatorLiveStake = 1137959159981411
}
, PoolDelegator
{ _poolDelegatorAddress = "stake1uylayej7esmarzd4mk4aru37zh9yz0luj3g9fsvgpfaxulq564r5u"
, _poolDelegatorLiveStake = 16958865648
}
, PoolDelegator
{ _poolDelegatorAddress = "stake1u8lr2pnrgf8f7vrs9lt79hc3sxm8s2w4rwvgpncks3axx6q93d4ck"
, _poolDelegatorLiveStake = 18605647
}
]
-- | Registration action of a pool
data PoolRegistrationAction = PoolRegistered | PoolDeregistered
deriving stock (Show, Eq, Generic)
instance ToJSON PoolRegistrationAction where
toJSON PoolRegistered = toJSON ("registered" :: Text)
toJSON PoolDeregistered = toJSON ("deregistered" :: Text)
toEncoding PoolRegistered = toEncoding ("registered" :: Text)
toEncoding PoolDeregistered = toEncoding ("deregistered" :: Text)
instance FromJSON PoolRegistrationAction where
parseJSON = withText "action" $ \case
"registered" -> pure PoolRegistered
"deregistered" -> pure PoolDeregistered
x -> fail ("Expected registration action got " ++ show x)
instance ToSample PoolRegistrationAction where
toSamples = pure $ samples [ PoolRegistered, PoolDeregistered ]
-- | Certificate update to the stake pool
data PoolUpdate = PoolUpdate
{ _poolUpdateTxHash :: TxHash -- ^ Transaction ID
, _poolUpdateCertIndex :: Integer -- ^ Certificate within the transaction
, _poolUpdateAction :: PoolRegistrationAction -- ^ Action in the certificate
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolUpdate", CamelToSnake]] PoolUpdate
instance ToSample PoolUpdate where
toSamples = pure $ samples
[ PoolUpdate
{ _poolUpdateTxHash = "6804edf9712d2b619edb6ac86861fe93a730693183a262b165fcc1ba1bc99cad"
, _poolUpdateCertIndex = 0
, _poolUpdateAction = PoolRegistered
}
, PoolUpdate
{ _poolUpdateTxHash = "9c190bc1ac88b2ab0c05a82d7de8b71b67a9316377e865748a89d4426c0d3005"
, _poolUpdateCertIndex = 0
, _poolUpdateAction = PoolDeregistered
}
, PoolUpdate
{ _poolUpdateTxHash = "e14a75b0eb2625de7055f1f580d70426311b78e0d36dd695a6bdc96c7b3d80e0"
, _poolUpdateCertIndex = 1
, _poolUpdateAction = PoolRegistered
}
]
| null | https://raw.githubusercontent.com/blockfrost/blockfrost-haskell/5ced57686d95e7b14569e96f4244b701f1e321e4/blockfrost-api/src/Blockfrost/Types/Cardano/Pools.hs | haskell | | Cardano Pools reponses
| Retirement epoch for pool
^ Bech32 encoded pool ID
^ Retirement epoch number
| Detailed pool information
^ Bech32 encoded pool ID
^ Hexadecimal pool ID.
^ VRF key hash
^ Total minted blocks
^ Number of blocks minted in the current epoch
^ Stake pool certificate pledge
^ Stake pool current pledge
^ Margin tax cost of the stake pool
^ Fixed tax cost of the stake pool
^ Bech32 reward account of the stake pool
| History of a stake pool parameters over epochs
^ Epoch number
^ Number of blocks created by pool
^ Pool size (percentage) of overall active stake at that epoch
^ Number of delegators for epoch
^ Total rewards received before distribution to delegators
^ Pool operator rewards
| Stake pool registration metadata
^ Bech32 pool ID
^ Hexadecimal pool ID
^ URL to the stake pool metadata
^ Hash of the metadata file
^ Ticker of the stake pool
^ Name of the stake pool
^ Description of the stake pool
^ Home page of the stake pool
We need this more specific
instance since API returns
empty object if there's no metadata
# OVERLAPS #
# OVERLAPS #
| Relays of a stake pool
^ IPv6 address of the relay
^ DNS name of the relay
^ DNS SRV entry of the relay
^ Network port of the relay
| Stake pool delegator
^ Bech32 encoded stake addresses
^ Currently delegated amount
| Registration action of a pool
| Certificate update to the stake pool
^ Transaction ID
^ Certificate within the transaction
^ Action in the certificate |
module Blockfrost.Types.Cardano.Pools
( PoolEpoch (..)
, PoolInfo (..)
, PoolHistory (..)
, PoolMetadata (..)
, PoolRelay (..)
, PoolDelegator (..)
, PoolUpdate (..)
, PoolRegistrationAction (..)
, samplePoolRelay
) where
import Data.Aeson (FromJSON (..), ToJSON (..), object, pairs, withText)
import Data.Text (Text)
import Deriving.Aeson
import Servant.Docs (ToSample (..), samples, singleSample)
import Blockfrost.Types.Shared
data PoolEpoch = PoolEpoch
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolEpoch", CamelToSnake]] PoolEpoch
instance ToSample PoolEpoch where
toSamples = pure $ samples
[ PoolEpoch "pool19u64770wqp6s95gkajc8udheske5e6ljmpq33awxk326zjaza0q" 225
, PoolEpoch "pool1dvla4zq98hpvacv20snndupjrqhuc79zl6gjap565nku6et5zdx" 215
, PoolEpoch "pool1wvccajt4eugjtf3k0ja3exjqdj7t8egsujwhcw4tzj4rzsxzw5w" 231
]
data PoolInfo = PoolInfo
, _poolInfoLiveStake :: Lovelaces
, _poolInfoLiveSize :: Double
, _poolInfoLiveSaturation :: Double
, _poolInfoLiveDelegators :: Double
, _poolInfoActiveStake :: Lovelaces
, _poolInfoActiveSize :: Double
, _poolInfoOwners :: [Address]
, _poolInfoRegistration :: [Text]
, _poolInfoRetirement :: [Text]
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolInfo", CamelToSnake]] PoolInfo
instance ToSample PoolInfo where
toSamples = pure $ singleSample
PoolInfo
{ _poolInfoPoolId = "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2q3lkdy"
, _poolInfoHex = "0f292fcaa02b8b2f9b3c8f9fd8e0bb21abedb692a6d5058df3ef2735"
, _poolInfoVrfKey = "0b5245f9934ec2151116fb8ec00f35fd00e0aa3b075c4ed12cce440f999d8233"
, _poolInfoBlocksMinted = 69
, _poolInfoBlocksEpoch = 4
, _poolInfoLiveStake = 6900000000
, _poolInfoLiveSize = 0.42
, _poolInfoLiveSaturation = 0.93
, _poolInfoLiveDelegators = 127
, _poolInfoActiveStake = 4200000000
, _poolInfoActiveSize = 0.43
, _poolInfoDeclaredPledge = 5000000000
, _poolInfoLivePledge = 5000000001
, _poolInfoMarginCost = 0.05
, _poolInfoFixedCost = 340000000
, _poolInfoRewardAccount = "stake1uxkptsa4lkr55jleztw43t37vgdn88l6ghclfwuxld2eykgpgvg3f"
, _poolInfoOwners = [ "stake1u98nnlkvkk23vtvf9273uq7cph5ww6u2yq2389psuqet90sv4xv9v" ]
, _poolInfoRegistration =
[ "9f83e5484f543e05b52e99988272a31da373f3aab4c064c76db96643a355d9dc"
, "7ce3b8c433bf401a190d58c8c483d8e3564dfd29ae8633c8b1b3e6c814403e95"
, "3e6e1200ce92977c3fe5996bd4d7d7e192bcb7e231bc762f9f240c76766535b9"
]
, _poolInfoRetirement = [ "252f622976d39e646815db75a77289cf16df4ad2b287dd8e3a889ce14c13d1a8" ]
}
data PoolHistory = PoolHistory
^ Active ( Snapshot of live stake 2 epochs ago ) stake in Lovelaces
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolHistory", CamelToSnake]] PoolHistory
instance ToSample PoolHistory where
toSamples = pure $ singleSample
PoolHistory
{ _poolHistoryEpoch = 233
, _poolHistoryBlocks = 22
, _poolHistoryActiveStake = 20485965693569
, _poolHistoryActiveSize = 1.2345
, _poolHistoryDelegatorsCount = 115
, _poolHistoryRewards = 206936253674159
, _poolHistoryFees = 1290968354
}
data PoolMetadata = PoolMetadata
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolMetadata", CamelToSnake]] PoolMetadata
toJSON Nothing = object mempty
toJSON (Just pm) = toJSON pm
toEncoding Nothing = pairs mempty
toEncoding (Just pm) = toEncoding pm
parseJSON x | x == object [] = pure Nothing
parseJSON x = Just <$> parseJSON x
instance ToSample PoolMetadata where
toSamples = pure $ singleSample samplePoolMetadata
samplePoolMetadata :: PoolMetadata
samplePoolMetadata =
PoolMetadata
{ _poolMetadataPoolId = "pool1pu5jlj4q9w9jlxeu370a3c9myx47md5j5m2str0naunn2q3lkdy"
, _poolMetadataHex = "0f292fcaa02b8b2f9b3c8f9fd8e0bb21abedb692a6d5058df3ef2735"
, _poolMetadataUrl = Just ""
, _poolMetadataHash = Just "47c0c68cb57f4a5b4a87bad896fc274678e7aea98e200fa14a1cb40c0cab1d8c"
, _poolMetadataTicker = Just "NUTS"
, _poolMetadataName = Just "Stake Nuts"
, _poolMetadataDescription = Just "The best pool ever"
, _poolMetadataHomepage = Just "/"
}
data PoolRelay = PoolRelay
^ IPv4 address of the relay
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolRelay", CamelToSnake]] PoolRelay
instance ToSample PoolRelay where
toSamples = pure $ singleSample samplePoolRelay
| Example of ` PoolRelay `
samplePoolRelay :: PoolRelay
samplePoolRelay =
PoolRelay
{ _poolRelayIpv4 = Just "4.4.4.4"
, _poolRelayIpv6 = Just ""
, _poolRelayDns = Just "relay1.stakenuts.com"
, _poolRelayDnsSrv = Just "_relays._tcp.relays.stakenuts.com"
, _poolRelayPort = 3001
}
data PoolDelegator = PoolDelegator
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolDelegator", CamelToSnake]] PoolDelegator
instance ToSample PoolDelegator where
toSamples = pure $ samples
[ PoolDelegator
{ _poolDelegatorAddress = "stake1ux4vspfvwuus9uwyp5p3f0ky7a30jq5j80jxse0fr7pa56sgn8kha"
, _poolDelegatorLiveStake = 1137959159981411
}
, PoolDelegator
{ _poolDelegatorAddress = "stake1uylayej7esmarzd4mk4aru37zh9yz0luj3g9fsvgpfaxulq564r5u"
, _poolDelegatorLiveStake = 16958865648
}
, PoolDelegator
{ _poolDelegatorAddress = "stake1u8lr2pnrgf8f7vrs9lt79hc3sxm8s2w4rwvgpncks3axx6q93d4ck"
, _poolDelegatorLiveStake = 18605647
}
]
data PoolRegistrationAction = PoolRegistered | PoolDeregistered
deriving stock (Show, Eq, Generic)
instance ToJSON PoolRegistrationAction where
toJSON PoolRegistered = toJSON ("registered" :: Text)
toJSON PoolDeregistered = toJSON ("deregistered" :: Text)
toEncoding PoolRegistered = toEncoding ("registered" :: Text)
toEncoding PoolDeregistered = toEncoding ("deregistered" :: Text)
instance FromJSON PoolRegistrationAction where
parseJSON = withText "action" $ \case
"registered" -> pure PoolRegistered
"deregistered" -> pure PoolDeregistered
x -> fail ("Expected registration action got " ++ show x)
instance ToSample PoolRegistrationAction where
toSamples = pure $ samples [ PoolRegistered, PoolDeregistered ]
data PoolUpdate = PoolUpdate
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON)
via CustomJSON '[FieldLabelModifier '[StripPrefix "_poolUpdate", CamelToSnake]] PoolUpdate
instance ToSample PoolUpdate where
toSamples = pure $ samples
[ PoolUpdate
{ _poolUpdateTxHash = "6804edf9712d2b619edb6ac86861fe93a730693183a262b165fcc1ba1bc99cad"
, _poolUpdateCertIndex = 0
, _poolUpdateAction = PoolRegistered
}
, PoolUpdate
{ _poolUpdateTxHash = "9c190bc1ac88b2ab0c05a82d7de8b71b67a9316377e865748a89d4426c0d3005"
, _poolUpdateCertIndex = 0
, _poolUpdateAction = PoolDeregistered
}
, PoolUpdate
{ _poolUpdateTxHash = "e14a75b0eb2625de7055f1f580d70426311b78e0d36dd695a6bdc96c7b3d80e0"
, _poolUpdateCertIndex = 1
, _poolUpdateAction = PoolRegistered
}
]
|
dedbe422df5036805fd4f9d2d02841dbb0558178dc84f9467f053db5156d9355 | RolfRolles/PandemicML | Z3Stuff.ml | Functionality provided in this module:
* Making a new context -- no dependencies
* Making "sorts", given a context and an IR.typereg size -- needs a context
* Mapping IR.var terms into Z3.ast terms -- relies upon previous function
* Making a new variable, given a context, a name, and a type -- needs a context
* Making numerals from integers, and given a size -- needs a context
* Making integer booleans from Z3's internal representation -- needs a context
* Making Z3.ast terms from IR.expr terms -- needs a context and all of the above
* Asserting Z3 statements from IR.instr terms (assert a postcondition too)
* Checking the validity of the context
* Retrieving the result
open IR
let mk_context () = Z3.mk_context_x [|("MODEL", "true")|]
let mk_var ctx name ty = Z3.mk_const ctx (Z3.mk_string_symbol ctx name) ty
let sort_of_typereg ctx =
let bv1sort = Z3.mk_bv_sort ctx 1 in
let bv8sort = Z3.mk_bv_sort ctx 8 in
let bv16sort = Z3.mk_bv_sort ctx 16 in
let bv32sort = Z3.mk_bv_sort ctx 32 in
let bv64sort = Z3.mk_bv_sort ctx 64 in
function
| TypeReg_1 -> bv1sort
| TypeReg_8 -> bv8sort
| TypeReg_16 -> bv16sort
| TypeReg_32 -> bv32sort
| TypeReg_64 -> bv64sort
let z3var_of_variable tr2sort ctx =
let htbl = Hashtbl.create 50 in
(fun v ->
let p = PpIR.ppVar v in
let r = try Hashtbl.find htbl p with Not_found ->
(let s =
match v with
| Variable(_,s) -> tr2sort s
| Mem(_,_,_) -> Z3.mk_array_sort ctx (tr2sort (TypeReg_32)) (tr2sort (TypeReg_8))
in
mk_var ctx p s) in
Hashtbl.replace htbl p r; r)
let symbolic_execute instrs postcondition =
let ctx = mk_context () in
let tr2s = sort_of_typereg ctx in
let z3var_of_variable = z3var_of_variable tr2s ctx in
let typereg_of_intexpr e = IRTypeCheck.type_of_integer_type (IRTypeCheck.typecheck_expr e) in
let bits_of_intexpr e = IRUtil.bits (typereg_of_intexpr e) in
let z3num_of_int i s = Z3.mk_numeral ctx (Printf.sprintf "%d" i) (tr2s s) in
let z3num_of_int64 i s = Z3.mk_numeral ctx (Printf.sprintf "%Ld" i) (tr2s s) in
let z3boolify expr = Z3.mk_ite ctx expr (z3num_of_int 1 (TypeReg_1)) (z3num_of_int 0 (TypeReg_1)) in
let rec z3ast_of_irexpr = function
| Binop(l,o,r) ->
let z3l = z3ast_of_irexpr l in
let d f = f ctx z3l (z3ast_of_irexpr r) in
(match o with
| Add -> d Z3.mk_bvadd
| Sub -> d Z3.mk_bvsub
| Mul -> d Z3.mk_bvmul
| SDiv -> d Z3.mk_bvsdiv
| UDiv -> d Z3.mk_bvudiv
| SMod -> d Z3.mk_bvsrem
| UMod -> d Z3.mk_bvurem
| Shl | Shr | Sar ->
let open IRTypeCheck in
let z3r = z3ast_of_irexpr (IRUtil.mk_unsigned_cast (typereg_of_intexpr l) r) in
(match o with
| Shl -> Z3.mk_bvshl ctx z3l z3r
| Shr -> Z3.mk_bvlshr ctx z3l z3r
| Sar -> Z3.mk_bvashr ctx z3l z3r
| _ -> failwith "impossible")
| And -> d Z3.mk_bvand
| Or -> d Z3.mk_bvor
| Xor -> d Z3.mk_bvxor
| EQ -> z3boolify (d Z3.mk_eq)
| NE -> Z3.mk_ite ctx (d Z3.mk_eq) (z3num_of_int 0 (TypeReg_1)) (z3num_of_int 1 (TypeReg_1))
| ULT -> z3boolify (d Z3.mk_bvult)
| ULE -> z3boolify (d Z3.mk_bvule)
| SLT -> z3boolify (d Z3.mk_bvslt)
| SLE -> z3boolify (d Z3.mk_bvsle))
| Unop(Neg,e) -> Z3.mk_bvneg ctx (z3ast_of_irexpr e)
| Unop(Not,e) -> Z3.mk_bvnot ctx (z3ast_of_irexpr e)
| Cast(Unsigned,s,e) -> Z3.mk_zero_ext ctx (IRUtil.bits s - (bits_of_intexpr e)) (z3ast_of_irexpr e)
| Cast(Signed,s,e) -> Z3.mk_sign_ext ctx (IRUtil.bits s - (bits_of_intexpr e)) (z3ast_of_irexpr e)
| Cast(High,s,e) -> let h = IRUtil.bits s in Z3.mk_extract ctx (h-1) (h - bits_of_intexpr e) (z3ast_of_irexpr e)
| Cast(Low,s,e) -> let l = IRUtil.bits s in Z3.mk_extract ctx (l-1) 0 (z3ast_of_irexpr e)
| Load(m,a,s) -> if s = (TypeReg_1) then failwith "z3ast_of_irexpr: 1-bit load";
let m = z3ast_of_irexpr m in
let a = z3ast_of_irexpr a in
let n1 = IRUtil.bits s in
let n = n1 / 8 in
(* Read n bytes linearly (a[i] where 0 <= i < n) and store them into a list *)
let rec aux l i =
if i > (n-1)
then List.rev l (* Remove List.rev for big-endian; TypeReg_32 index might break later assumptions *)
else aux ((Z3.mk_select ctx m (Z3.mk_bvadd ctx (z3num_of_int i (TypeReg_32)) a))::l) (i+1)
in
let l = aux [] 0 in
(* Map these bytes into bit-vectors of size n1 (the size of the read) *)
let l = List.map (fun z3e -> Z3.mk_zero_ext ctx (n1-8) z3e) l in
Shift each byte ( now byte / word / dword ) left by a multiple of 8 and OR them together
let res,_ = List.fold_left
(fun (folded,sf) z3expr -> (Z3.mk_bvor ctx (Z3.mk_bvshl ctx z3expr (z3num_of_int sf (TypeReg_32))) folded,sf+8))
(List.hd l,8)
(List.tl l)
in res
| Store(m,a,t,s) -> if s = (TypeReg_1) then failwith "z3ast_of_irexpr: 1-bit store";
let m = z3ast_of_irexpr m in
let a = z3ast_of_irexpr a in
let t = z3ast_of_irexpr t in
let n1 = IRUtil.bits s in
let n = n1 / 8 in
(* Extract n bytes (BV(8)s) from the bitvector specified in t *)
let rec aux l i =
if i > (n-1)
then List.rev l (* Remove List.rev for big-endian; TypeReg_32 index might break later assumptions *)
else aux ((Z3.mk_extract ctx 7 0 (Z3.mk_bvlshr ctx t (z3num_of_int (i*8) s)))::l) (i+1)
in
let l = aux [] 0 in
Store byte # i to a[i ] , where 0 < = i < n.
let res,_ = List.fold_left
(fun (folded,i) z3byte -> (Z3.mk_store ctx folded (Z3.mk_bvadd ctx (z3num_of_int i (TypeReg_32)) a) z3byte,i+1))
(m,0)
l
in res
| Var(v) -> z3var_of_variable v
| Const(i,s) -> z3num_of_int64 i s
| Let(v,eexpr,ein) -> invalid_arg "z3ast_of_irexpr: Let"
in
let instr_to_z3 = function
| Assign(v,e) -> Z3.assert_cnstr ctx (Z3.mk_eq ctx (z3var_of_variable v) (z3ast_of_irexpr e))
| Assert(e) -> Z3.assert_cnstr ctx (z3ast_of_irexpr e)
| Label(_) -> ()
| Comment(_) -> ()
| Special(_) -> invalid_arg "instr_to_z3: Special"
| Jmp(e) -> invalid_arg "instr_to_z3: Jmp"
| CJmp(eb,et,en) -> invalid_arg "instr_to_z3: CJmp"
| Halt(e) -> invalid_arg "instr_to_z3: Halt"
in
let ssa_instrs,ssactxt = IRSSA.bb_to_ssa 38 instrs in
List.iter instr_to_z3 ssa_instrs;
let postcondition = IRLocalOpt.replace_var_with_var ssactxt postcondition in
Z3.assert_cnstr ctx (Z3.mk_eq ctx (z3num_of_int 1 (TypeReg_1)) (z3ast_of_irexpr postcondition));
let (result,m) = Z3.check_and_get_model ctx in
let rsyns = match result with
| Z3.L_FALSE -> "unsat"
| Z3.L_UNDEF -> "unknown"
| Z3.L_TRUE -> "sat"
in
let rmod = (Z3.model_to_string ctx m) in
Z3.del_model ctx m;
Z3.del_context ctx;
(rsyns^"\n"^rmod)
let mk_dword ctx i = Z3.mk_numeral ctx (Printf.sprintf "%ld" i) (Z3.mk_bv_sort ctx 32)
let store_byte_at mem (addr:int32) (b:int32) = (fun a -> if a = addr then b else mem a)
Assert that address ` addr ` in array ` memvar ` is equal to 8 - bit ( int32 ) value ` b ` in context ` ctx `
let assert_byte_at ctx memvar addr b =
Z3.assert_cnstr
ctx
(Z3.mk_eq
ctx
(Z3.mk_select ctx memvar (mk_dword ctx addr))
(Z3.mk_numeral ctx (Printf.sprintf "%ld" b) (Z3.mk_bv_sort ctx 8)))
let assert_and_store_byte_at mem ctx memvar addr b =
assert_byte_at ctx memvar addr b;
store_byte_at mem addr b
let gb d fac = Int32.logand (Int32.shift_right d fac) 0xffl
let gw d fac = Int32.logand (Int32.shift_right d fac) 0xffffl
let aa a i = Int32.add a i
let sbm mem a i s d = store_byte_at mem (aa a i) (gb d s)
let abm ctx m a i s d = assert_byte_at ctx m (aa a i) (gb d s)
let store_word_at mem addr w =
let mem0 = sbm mem addr 0l 00 w in
let mem1 = sbm mem0 addr 1l 08 w in
mem1
let assert_word_at ctx memvar addr w =
abm ctx memvar addr 0l 00 w;
abm ctx memvar addr 1l 08 w
let assert_and_store_word_at mem ctx memvar addr w =
assert_word_at ctx memvar addr w;
store_word_at mem addr w
let store_dword_at mem addr d =
let mem0 = store_word_at mem addr d in
let mem1 = store_word_at mem0 (aa addr 2l) (gw d 16) in
mem1
let assert_dword_at ctx memvar addr d =
assert_word_at ctx memvar addr d;
assert_word_at ctx memvar (aa addr 2l) (gw d 16)
let assert_and_store_dword_at mem ctx memvar addr d =
assert_dword_at ctx memvar addr d;
store_dword_at mem addr d
let store_string_at mem addr str =
let l = String.length str in
let rec aux mem i =
if i < l
then
aux (sbm mem addr (Int32.of_int i) 0 (Int32.of_int (Char.code str.[i]))) (i+1)
else mem
in
sbm (aux mem 0) addr (Int32.of_int l) 0 0l
let assert_string_at ctx memvar addr str =
let l = String.length str in
let rec aux i =
if i < l
then
(abm ctx memvar addr (Int32.of_int i) 0 (Int32.of_int (Char.code str.[i]));
aux (i+1))
else ()
in
aux 0;
abm ctx memvar addr (Int32.of_int l) 0 0l
let assert_and_store_string_at mem ctx memvar addr str =
assert_string_at ctx memvar addr str;
store_string_at mem addr str
| null | https://raw.githubusercontent.com/RolfRolles/PandemicML/9c31ecaf9c782dbbeb6cf502bc2a6730316d681e/Incubator/Z3Stuff.ml | ocaml | Read n bytes linearly (a[i] where 0 <= i < n) and store them into a list
Remove List.rev for big-endian; TypeReg_32 index might break later assumptions
Map these bytes into bit-vectors of size n1 (the size of the read)
Extract n bytes (BV(8)s) from the bitvector specified in t
Remove List.rev for big-endian; TypeReg_32 index might break later assumptions | Functionality provided in this module:
* Making a new context -- no dependencies
* Making "sorts", given a context and an IR.typereg size -- needs a context
* Mapping IR.var terms into Z3.ast terms -- relies upon previous function
* Making a new variable, given a context, a name, and a type -- needs a context
* Making numerals from integers, and given a size -- needs a context
* Making integer booleans from Z3's internal representation -- needs a context
* Making Z3.ast terms from IR.expr terms -- needs a context and all of the above
* Asserting Z3 statements from IR.instr terms (assert a postcondition too)
* Checking the validity of the context
* Retrieving the result
open IR
let mk_context () = Z3.mk_context_x [|("MODEL", "true")|]
let mk_var ctx name ty = Z3.mk_const ctx (Z3.mk_string_symbol ctx name) ty
let sort_of_typereg ctx =
let bv1sort = Z3.mk_bv_sort ctx 1 in
let bv8sort = Z3.mk_bv_sort ctx 8 in
let bv16sort = Z3.mk_bv_sort ctx 16 in
let bv32sort = Z3.mk_bv_sort ctx 32 in
let bv64sort = Z3.mk_bv_sort ctx 64 in
function
| TypeReg_1 -> bv1sort
| TypeReg_8 -> bv8sort
| TypeReg_16 -> bv16sort
| TypeReg_32 -> bv32sort
| TypeReg_64 -> bv64sort
let z3var_of_variable tr2sort ctx =
let htbl = Hashtbl.create 50 in
(fun v ->
let p = PpIR.ppVar v in
let r = try Hashtbl.find htbl p with Not_found ->
(let s =
match v with
| Variable(_,s) -> tr2sort s
| Mem(_,_,_) -> Z3.mk_array_sort ctx (tr2sort (TypeReg_32)) (tr2sort (TypeReg_8))
in
mk_var ctx p s) in
Hashtbl.replace htbl p r; r)
let symbolic_execute instrs postcondition =
let ctx = mk_context () in
let tr2s = sort_of_typereg ctx in
let z3var_of_variable = z3var_of_variable tr2s ctx in
let typereg_of_intexpr e = IRTypeCheck.type_of_integer_type (IRTypeCheck.typecheck_expr e) in
let bits_of_intexpr e = IRUtil.bits (typereg_of_intexpr e) in
let z3num_of_int i s = Z3.mk_numeral ctx (Printf.sprintf "%d" i) (tr2s s) in
let z3num_of_int64 i s = Z3.mk_numeral ctx (Printf.sprintf "%Ld" i) (tr2s s) in
let z3boolify expr = Z3.mk_ite ctx expr (z3num_of_int 1 (TypeReg_1)) (z3num_of_int 0 (TypeReg_1)) in
let rec z3ast_of_irexpr = function
| Binop(l,o,r) ->
let z3l = z3ast_of_irexpr l in
let d f = f ctx z3l (z3ast_of_irexpr r) in
(match o with
| Add -> d Z3.mk_bvadd
| Sub -> d Z3.mk_bvsub
| Mul -> d Z3.mk_bvmul
| SDiv -> d Z3.mk_bvsdiv
| UDiv -> d Z3.mk_bvudiv
| SMod -> d Z3.mk_bvsrem
| UMod -> d Z3.mk_bvurem
| Shl | Shr | Sar ->
let open IRTypeCheck in
let z3r = z3ast_of_irexpr (IRUtil.mk_unsigned_cast (typereg_of_intexpr l) r) in
(match o with
| Shl -> Z3.mk_bvshl ctx z3l z3r
| Shr -> Z3.mk_bvlshr ctx z3l z3r
| Sar -> Z3.mk_bvashr ctx z3l z3r
| _ -> failwith "impossible")
| And -> d Z3.mk_bvand
| Or -> d Z3.mk_bvor
| Xor -> d Z3.mk_bvxor
| EQ -> z3boolify (d Z3.mk_eq)
| NE -> Z3.mk_ite ctx (d Z3.mk_eq) (z3num_of_int 0 (TypeReg_1)) (z3num_of_int 1 (TypeReg_1))
| ULT -> z3boolify (d Z3.mk_bvult)
| ULE -> z3boolify (d Z3.mk_bvule)
| SLT -> z3boolify (d Z3.mk_bvslt)
| SLE -> z3boolify (d Z3.mk_bvsle))
| Unop(Neg,e) -> Z3.mk_bvneg ctx (z3ast_of_irexpr e)
| Unop(Not,e) -> Z3.mk_bvnot ctx (z3ast_of_irexpr e)
| Cast(Unsigned,s,e) -> Z3.mk_zero_ext ctx (IRUtil.bits s - (bits_of_intexpr e)) (z3ast_of_irexpr e)
| Cast(Signed,s,e) -> Z3.mk_sign_ext ctx (IRUtil.bits s - (bits_of_intexpr e)) (z3ast_of_irexpr e)
| Cast(High,s,e) -> let h = IRUtil.bits s in Z3.mk_extract ctx (h-1) (h - bits_of_intexpr e) (z3ast_of_irexpr e)
| Cast(Low,s,e) -> let l = IRUtil.bits s in Z3.mk_extract ctx (l-1) 0 (z3ast_of_irexpr e)
| Load(m,a,s) -> if s = (TypeReg_1) then failwith "z3ast_of_irexpr: 1-bit load";
let m = z3ast_of_irexpr m in
let a = z3ast_of_irexpr a in
let n1 = IRUtil.bits s in
let n = n1 / 8 in
let rec aux l i =
if i > (n-1)
else aux ((Z3.mk_select ctx m (Z3.mk_bvadd ctx (z3num_of_int i (TypeReg_32)) a))::l) (i+1)
in
let l = aux [] 0 in
let l = List.map (fun z3e -> Z3.mk_zero_ext ctx (n1-8) z3e) l in
Shift each byte ( now byte / word / dword ) left by a multiple of 8 and OR them together
let res,_ = List.fold_left
(fun (folded,sf) z3expr -> (Z3.mk_bvor ctx (Z3.mk_bvshl ctx z3expr (z3num_of_int sf (TypeReg_32))) folded,sf+8))
(List.hd l,8)
(List.tl l)
in res
| Store(m,a,t,s) -> if s = (TypeReg_1) then failwith "z3ast_of_irexpr: 1-bit store";
let m = z3ast_of_irexpr m in
let a = z3ast_of_irexpr a in
let t = z3ast_of_irexpr t in
let n1 = IRUtil.bits s in
let n = n1 / 8 in
let rec aux l i =
if i > (n-1)
else aux ((Z3.mk_extract ctx 7 0 (Z3.mk_bvlshr ctx t (z3num_of_int (i*8) s)))::l) (i+1)
in
let l = aux [] 0 in
Store byte # i to a[i ] , where 0 < = i < n.
let res,_ = List.fold_left
(fun (folded,i) z3byte -> (Z3.mk_store ctx folded (Z3.mk_bvadd ctx (z3num_of_int i (TypeReg_32)) a) z3byte,i+1))
(m,0)
l
in res
| Var(v) -> z3var_of_variable v
| Const(i,s) -> z3num_of_int64 i s
| Let(v,eexpr,ein) -> invalid_arg "z3ast_of_irexpr: Let"
in
let instr_to_z3 = function
| Assign(v,e) -> Z3.assert_cnstr ctx (Z3.mk_eq ctx (z3var_of_variable v) (z3ast_of_irexpr e))
| Assert(e) -> Z3.assert_cnstr ctx (z3ast_of_irexpr e)
| Label(_) -> ()
| Comment(_) -> ()
| Special(_) -> invalid_arg "instr_to_z3: Special"
| Jmp(e) -> invalid_arg "instr_to_z3: Jmp"
| CJmp(eb,et,en) -> invalid_arg "instr_to_z3: CJmp"
| Halt(e) -> invalid_arg "instr_to_z3: Halt"
in
let ssa_instrs,ssactxt = IRSSA.bb_to_ssa 38 instrs in
List.iter instr_to_z3 ssa_instrs;
let postcondition = IRLocalOpt.replace_var_with_var ssactxt postcondition in
Z3.assert_cnstr ctx (Z3.mk_eq ctx (z3num_of_int 1 (TypeReg_1)) (z3ast_of_irexpr postcondition));
let (result,m) = Z3.check_and_get_model ctx in
let rsyns = match result with
| Z3.L_FALSE -> "unsat"
| Z3.L_UNDEF -> "unknown"
| Z3.L_TRUE -> "sat"
in
let rmod = (Z3.model_to_string ctx m) in
Z3.del_model ctx m;
Z3.del_context ctx;
(rsyns^"\n"^rmod)
let mk_dword ctx i = Z3.mk_numeral ctx (Printf.sprintf "%ld" i) (Z3.mk_bv_sort ctx 32)
let store_byte_at mem (addr:int32) (b:int32) = (fun a -> if a = addr then b else mem a)
Assert that address ` addr ` in array ` memvar ` is equal to 8 - bit ( int32 ) value ` b ` in context ` ctx `
let assert_byte_at ctx memvar addr b =
Z3.assert_cnstr
ctx
(Z3.mk_eq
ctx
(Z3.mk_select ctx memvar (mk_dword ctx addr))
(Z3.mk_numeral ctx (Printf.sprintf "%ld" b) (Z3.mk_bv_sort ctx 8)))
let assert_and_store_byte_at mem ctx memvar addr b =
assert_byte_at ctx memvar addr b;
store_byte_at mem addr b
let gb d fac = Int32.logand (Int32.shift_right d fac) 0xffl
let gw d fac = Int32.logand (Int32.shift_right d fac) 0xffffl
let aa a i = Int32.add a i
let sbm mem a i s d = store_byte_at mem (aa a i) (gb d s)
let abm ctx m a i s d = assert_byte_at ctx m (aa a i) (gb d s)
let store_word_at mem addr w =
let mem0 = sbm mem addr 0l 00 w in
let mem1 = sbm mem0 addr 1l 08 w in
mem1
let assert_word_at ctx memvar addr w =
abm ctx memvar addr 0l 00 w;
abm ctx memvar addr 1l 08 w
let assert_and_store_word_at mem ctx memvar addr w =
assert_word_at ctx memvar addr w;
store_word_at mem addr w
let store_dword_at mem addr d =
let mem0 = store_word_at mem addr d in
let mem1 = store_word_at mem0 (aa addr 2l) (gw d 16) in
mem1
let assert_dword_at ctx memvar addr d =
assert_word_at ctx memvar addr d;
assert_word_at ctx memvar (aa addr 2l) (gw d 16)
let assert_and_store_dword_at mem ctx memvar addr d =
assert_dword_at ctx memvar addr d;
store_dword_at mem addr d
let store_string_at mem addr str =
let l = String.length str in
let rec aux mem i =
if i < l
then
aux (sbm mem addr (Int32.of_int i) 0 (Int32.of_int (Char.code str.[i]))) (i+1)
else mem
in
sbm (aux mem 0) addr (Int32.of_int l) 0 0l
let assert_string_at ctx memvar addr str =
let l = String.length str in
let rec aux i =
if i < l
then
(abm ctx memvar addr (Int32.of_int i) 0 (Int32.of_int (Char.code str.[i]));
aux (i+1))
else ()
in
aux 0;
abm ctx memvar addr (Int32.of_int l) 0 0l
let assert_and_store_string_at mem ctx memvar addr str =
assert_string_at ctx memvar addr str;
store_string_at mem addr str
|
439abb8c729405b156484e0983d2865cbf37d0111cf596a54d8a283add2fabfa | GlideAngle/flare-timing | SpeedFraction.hs | module SpeedFraction (speedFractionUnits, speedFractionInputs, speedFraction) where
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit as HU ((@?=), testCase)
import Data.Ratio ((%))
import Data.UnitsOfMeasure (u, convert)
import Data.UnitsOfMeasure.Internal (Quantity(..))
import Flight.Units ()
import qualified "flight-gap-allot" Flight.Score as FS
import "flight-gap-allot" Flight.Score
( BestTime(..)
, PilotTime(..)
, SpeedFraction(..)
, PowerExponent(..)
, isNormal
)
import TestNewtypes
maxS:: SpeedFraction
maxS = SpeedFraction (1 % 1)
minS :: SpeedFraction
minS = SpeedFraction (0 % 1)
halfS :: SpeedFraction
halfS = SpeedFraction (1 % 2)
point8S :: SpeedFraction
point8S = SpeedFraction (4 % 5)
powerExp :: PowerExponent
powerExp = FS.powerExp23
hms :: Integer -> Integer -> Integer -> Quantity Double [u| h |]
hms h m s =
convert secs
where
secs :: Quantity Double [u| s |]
secs = MkQuantity . fromIntegral $ ((h * 60 + m) * 60 + s)
speedFractionUnits :: TestTree
speedFractionUnits = testGroup "Speed fraction unit tests"
[ timeUnits
, maxUnits
, minUnits
, point5Units
, point8Units
]
timeUnits :: TestTree
timeUnits = testGroup "Time tests"
[ HU.testCase "1 hr = 1:00:00" $ [u| 1h |] `compare` hms 1 0 0 @?= EQ
, HU.testCase "2 hr = 2:00:00" $ [u| 2h |] `compare` hms 2 0 0 @?= EQ
, HU.testCase "3 hr = 3:00:00" $ [u| 3h |] `compare` hms 3 0 0 @?= EQ
]
maxUnits :: TestTree
maxUnits = testGroup "Maximum tests"
[ HU.testCase "1 hr best time, 1:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 0 0) `compare` maxS
@?= EQ
, HU.testCase "2 hr best time, 2:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 0 0) `compare` maxS
@?= EQ
, HU.testCase "3 hr best time, 3:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 0 0) `compare` maxS
@?= EQ
, HU.testCase "1 hr best time, 1:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 0 1) `compare` maxS
@?= LT
, HU.testCase "2 hr best time, 2:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 0 1) `compare` maxS
@?= LT
, HU.testCase "3 hr best time, 3:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 0 1) `compare` maxS
@?= LT
]
minUnits :: TestTree
minUnits = testGroup "Minimum tests"
[ HU.testCase "1 hr best time, 1:59:59 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 59 59) `compare` minS
@?= GT
, HU.testCase "1 hr best time, 2:00:00 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 2 0 0) `compare` minS
@?= EQ
, HU.testCase "2 hr best time, 3:24:51 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 3 24 51) `compare` minS
@?= GT
, HU.testCase "2 hr best time, 3:24:52 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 3 24 52) `compare` minS
@?= EQ
, HU.testCase "3 hr best time, 4:43:55 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 4 43 55) `compare` minS
@?= GT
, HU.testCase "3 hr best time, 4:43:56 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 4 43 56) `compare` minS
@?= EQ
]
point5Units :: TestTree
point5Units = testGroup "50 % tests"
[ HU.testCase "1 hr best time, 1:21:12 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 21 12) `compare` halfS
@?= GT
, HU.testCase "1 hr best time, 1:21:13 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 21 13) `compare` halfS
@?= LT
, HU.testCase "2 hr best time, 2:29:59 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 29 59) `compare` halfS
@?= GT
, HU.testCase "2 hr best time, 2:30:00 pilot time = 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 30 0) `compare` halfS
@?= EQ
, HU.testCase "2 hr best time, 2:30:01 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 30 1) `compare` halfS
@?= LT
, HU.testCase "3 hr best time, 3:36:44 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 36 44) `compare` halfS
@?= GT
, HU.testCase "3 hr best time, 3:36:45 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 36 45) `compare` halfS
@?= LT
]
point8Units :: TestTree
point8Units = testGroup "80 % tests"
[ HU.testCase "1 hr best time, 1:05:21 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 5 21) `compare` point8S
@?= GT
, HU.testCase "1 hr best time, 1:05:22 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 5 22) `compare` point8S
@?= LT
, HU.testCase "2 hr best time, 2:07:35 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 7 35) `compare` point8S
@?= GT
, HU.testCase "2 hr best time, 2:07:36 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 7 36) `compare` point8S
@?= LT
, HU.testCase "3 hr best time, 3:09:17 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 9 17) `compare` point8S
@?= GT
, HU.testCase "3 hr best time, 3:09:18 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 9 18) `compare` point8S
@?= LT
]
fromHour :: Quantity Double [u| h |] -> BestTime (Quantity Double [u| h |])
fromHour = BestTime
speedFractionInputs :: SfTest -> Bool
speedFractionInputs
(SfTest (BestTime best, PilotTime pilot)) =
best <= pilot
speedFraction :: SfTest -> Bool
speedFraction (SfTest (best, pilot)) =
(\(SpeedFraction x) -> isNormal x) $ FS.speedFraction powerExp best pilot
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/172a9b199eb1ff72c967669dc349cbf8d9c4bc52/lang-haskell/gap-math/test-suite-math/SpeedFraction.hs | haskell | module SpeedFraction (speedFractionUnits, speedFractionInputs, speedFraction) where
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit as HU ((@?=), testCase)
import Data.Ratio ((%))
import Data.UnitsOfMeasure (u, convert)
import Data.UnitsOfMeasure.Internal (Quantity(..))
import Flight.Units ()
import qualified "flight-gap-allot" Flight.Score as FS
import "flight-gap-allot" Flight.Score
( BestTime(..)
, PilotTime(..)
, SpeedFraction(..)
, PowerExponent(..)
, isNormal
)
import TestNewtypes
maxS:: SpeedFraction
maxS = SpeedFraction (1 % 1)
minS :: SpeedFraction
minS = SpeedFraction (0 % 1)
halfS :: SpeedFraction
halfS = SpeedFraction (1 % 2)
point8S :: SpeedFraction
point8S = SpeedFraction (4 % 5)
powerExp :: PowerExponent
powerExp = FS.powerExp23
hms :: Integer -> Integer -> Integer -> Quantity Double [u| h |]
hms h m s =
convert secs
where
secs :: Quantity Double [u| s |]
secs = MkQuantity . fromIntegral $ ((h * 60 + m) * 60 + s)
speedFractionUnits :: TestTree
speedFractionUnits = testGroup "Speed fraction unit tests"
[ timeUnits
, maxUnits
, minUnits
, point5Units
, point8Units
]
timeUnits :: TestTree
timeUnits = testGroup "Time tests"
[ HU.testCase "1 hr = 1:00:00" $ [u| 1h |] `compare` hms 1 0 0 @?= EQ
, HU.testCase "2 hr = 2:00:00" $ [u| 2h |] `compare` hms 2 0 0 @?= EQ
, HU.testCase "3 hr = 3:00:00" $ [u| 3h |] `compare` hms 3 0 0 @?= EQ
]
maxUnits :: TestTree
maxUnits = testGroup "Maximum tests"
[ HU.testCase "1 hr best time, 1:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 0 0) `compare` maxS
@?= EQ
, HU.testCase "2 hr best time, 2:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 0 0) `compare` maxS
@?= EQ
, HU.testCase "3 hr best time, 3:00:00 pilot time = 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 0 0) `compare` maxS
@?= EQ
, HU.testCase "1 hr best time, 1:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 0 1) `compare` maxS
@?= LT
, HU.testCase "2 hr best time, 2:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 0 1) `compare` maxS
@?= LT
, HU.testCase "3 hr best time, 3:00:01 pilot time < 1 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 0 1) `compare` maxS
@?= LT
]
minUnits :: TestTree
minUnits = testGroup "Minimum tests"
[ HU.testCase "1 hr best time, 1:59:59 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 59 59) `compare` minS
@?= GT
, HU.testCase "1 hr best time, 2:00:00 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 2 0 0) `compare` minS
@?= EQ
, HU.testCase "2 hr best time, 3:24:51 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 3 24 51) `compare` minS
@?= GT
, HU.testCase "2 hr best time, 3:24:52 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 3 24 52) `compare` minS
@?= EQ
, HU.testCase "3 hr best time, 4:43:55 pilot time > 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 4 43 55) `compare` minS
@?= GT
, HU.testCase "3 hr best time, 4:43:56 pilot time = 0 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 4 43 56) `compare` minS
@?= EQ
]
point5Units :: TestTree
point5Units = testGroup "50 % tests"
[ HU.testCase "1 hr best time, 1:21:12 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 21 12) `compare` halfS
@?= GT
, HU.testCase "1 hr best time, 1:21:13 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 21 13) `compare` halfS
@?= LT
, HU.testCase "2 hr best time, 2:29:59 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 29 59) `compare` halfS
@?= GT
, HU.testCase "2 hr best time, 2:30:00 pilot time = 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 30 0) `compare` halfS
@?= EQ
, HU.testCase "2 hr best time, 2:30:01 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 30 1) `compare` halfS
@?= LT
, HU.testCase "3 hr best time, 3:36:44 pilot time > 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 36 44) `compare` halfS
@?= GT
, HU.testCase "3 hr best time, 3:36:45 pilot time < 0.5 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 36 45) `compare` halfS
@?= LT
]
point8Units :: TestTree
point8Units = testGroup "80 % tests"
[ HU.testCase "1 hr best time, 1:05:21 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 5 21) `compare` point8S
@?= GT
, HU.testCase "1 hr best time, 1:05:22 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 1h |])
(PilotTime $ hms 1 5 22) `compare` point8S
@?= LT
, HU.testCase "2 hr best time, 2:07:35 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 7 35) `compare` point8S
@?= GT
, HU.testCase "2 hr best time, 2:07:36 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 2h |])
(PilotTime $ hms 2 7 36) `compare` point8S
@?= LT
, HU.testCase "3 hr best time, 3:09:17 pilot time > 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 9 17) `compare` point8S
@?= GT
, HU.testCase "3 hr best time, 3:09:18 pilot time < 0.8 speed fraction" $
FS.speedFraction
powerExp
(fromHour [u| 3h |])
(PilotTime $ hms 3 9 18) `compare` point8S
@?= LT
]
fromHour :: Quantity Double [u| h |] -> BestTime (Quantity Double [u| h |])
fromHour = BestTime
speedFractionInputs :: SfTest -> Bool
speedFractionInputs
(SfTest (BestTime best, PilotTime pilot)) =
best <= pilot
speedFraction :: SfTest -> Bool
speedFraction (SfTest (best, pilot)) =
(\(SpeedFraction x) -> isNormal x) $ FS.speedFraction powerExp best pilot
| |
d5860000b9485083b2c1df3ea090374a9d80b2ebd02836809d92c28177f02130 | fab13n/lamtez | typecheck.ml | open Utils
module Ctx = Typecheck_ctx
module A = Ast
module P = String_of_ast
module StringSet = Set.Make(String)
type typed_contract = {
ctx: Ctx.t;
storage_type: A.etype;
param_type: A.etype;
result_type: A.etype;
storage_init: A.expr option;
code: A.expr }
let _DEBUG_ = ref false
let debug_indent = ref 0
(* Translates a pattern, and the type it matches, into type constraints
* added to `ctx`;
* Pushes the pattern variables into ctx' expression variables;
* Keeps a bookmark to allow the removal of those evars.
*)
let rec push_pattern_bindings ctx pattern etype : (Ctx.t*Ctx.bookmark) =
match pattern with
| A.PId id ->
let scheme = ([], etype) in
Ctx.push_evars [id, scheme] ctx
| A.PAny -> ctx, Ctx.bookmark_empty
| A.PTuple plist ->
let tlist = List.map (fun _ -> A.fresh_tvar ~prefix:"tuple" ()) plist in
let ctx, _ = Ctx.unify ctx etype (A.ttuple tlist) in
let fold (ctx, bmrk) p t =
let ctx, bmrk' = push_pattern_bindings ctx p t in
ctx, bmrk @ bmrk' in
List.fold_left2 fold (ctx, Ctx.bookmark_empty) plist tlist
| A.PProduct tagged_pattern_list ->
(* Assume that etype has the corresponding product's type,
* and that every field has the corresponding field type *)
let pname = Ctx.name_of_product_tag ctx (fst @@ List.hd tagged_pattern_list) in
let tprod, tagged_type_list = Ctx.instantiate_composite pname @@ Ctx.product_of_name ctx pname in
let ctx, _ = Ctx.unify ctx etype tprod in
let fold (ctx, bmrk) (tag, pattern) =
let etype = List.assoc tag tagged_type_list in
let ctx, bmrk' = push_pattern_bindings ctx pattern etype in
ctx, bmrk@bmrk' in
List.fold_left fold (ctx, Ctx.bookmark_empty) tagged_pattern_list
let rec typecheck_expr ctx expr =
if !_DEBUG_ then begin
print_endline (String.make (2 * !debug_indent) ' '^"Typing "^P.string_of_expr expr);
incr debug_indent
end;
let ctx, t = match expr with
| A.ELit(_, c) -> begin match c with
| A.LNat _ -> ctx, A.tprim "nat"
| A.LInt _ -> ctx, A.tprim "int"
| A.LString _ -> ctx, A.tprim "string"
| A.LTez _ -> ctx, A.tprim "tez"
| A.LSig _ -> ctx, A.tprim "sig"
| A.LTime _ -> ctx, A.tprim "time"
| A.LKey _ -> ctx, A.tprim "key"
end
| A.EColl(_, A.CList, list) -> typecheck_EColl_CList ctx list
| A.EColl(_, A.CMap, list) -> typecheck_EColl_CMap ctx list
| A.EColl(_, A.CSet, list) -> typecheck_EColl_CSet ctx list
| A.EId(_, id) ->
let scheme = Ctx.scheme_of_evar ctx id in
ctx, Ctx.instantiate_scheme scheme
| A.ELambda(_, p_prm, t_prm, e_res) -> typecheck_ELambda ctx expr p_prm t_prm e_res
| A.ELet(_, id, t_id, e0, e1) -> typecheck_ELetIn ctx id t_id e0 e1
| A.EApp(_, f, arg) -> typecheck_EApp ctx f arg
| A.ETypeAnnot(_, e, t) -> let ctx, te = typecheck_expr ctx e in Ctx.unify ctx te t
| A.ETuple(_, list) -> typecheck_ETuple ctx list
| A.ESequence(_, list) -> typecheck_ESequence ctx list
| A.ETupleGet(_, e, n) -> typecheck_ETupleGet ctx e n
| A.EProduct(_, pairs) -> typecheck_EProduct ctx pairs
| A.EProductGet(_, e, tag) -> typecheck_EProductGet ctx e tag
| A.EProductSet(_, e0, tag, e1) -> typecheck_EProductSet ctx e0 tag e1
| A.EStoreSet(_, v, e0, e1) -> typecheck_EStoreSet ctx v e0 e1
| A.ESum(_, tag, e) -> typecheck_ESum ctx tag e
| A.ESumCase(_, e, cases) -> typecheck_ESumCase ctx e cases
| A.EBinOp(loc, a, op, b) -> typecheck_EBinOp ctx loc a op b
| A.EUnOp(_, op, a) -> typecheck_EUnOp ctx op a
in
let t = Ctx.expand_type ctx t in
let ctx = Ctx.save_type expr t ctx in
if !_DEBUG_ then begin
decr debug_indent;
print_endline (String.make (2 * !debug_indent) ' '^"Result "^P.string_of_expr expr^" :: "^P.string_of_type t);
end;
ctx, t
and typecheck_EColl_CList ctx elts =
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, elt_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"elt" ()) elts in
ctx, A.TApp(A.noloc, "list", [elt_type])
and typecheck_EColl_CMap ctx elts =
let rec split (klist, vlist) = function
| k :: v :: rest -> split (k :: klist, v :: vlist) rest
| [] -> (klist, vlist)
| [_] -> assert false
in
let klist, vlist = split ([], []) elts in
let ctx, types = list_fold_map typecheck_expr ctx elts in
let ctx, types = list_fold_map typecheck_expr ctx elts in
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, k_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"key" ()) klist in
let ctx, v_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"val" ()) vlist in
ctx, A.TApp(A.noloc, "map", [k_type; v_type])
and typecheck_EColl_CSet ctx elts =
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, elt_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"elt" ()) elts in
ctx, A.TApp(A.noloc, "set", [elt_type])
and typecheck_ELambda ctx l p_prm t_prm e_res =
TODO forbid global vars shadowing ?
(* Type e supposing that id has type t_arg. *)
let cmb = (* Combinator or closure? *)
let prm = A.pattern_binds_list p_prm in
let globals = Standard_ctx.globals in
let free_evars = A.get_free_evars ~except:(prm@globals) e_res in
A.M.is_empty free_evars in
let ctx, bmrk = push_pattern_bindings ctx p_prm t_prm in
let ctx, t_res = typecheck_expr ctx e_res in
let ctx = Ctx.pop_evars bmrk ctx in
let tlambda = A.TLambda(A.noloc, t_prm, t_res, cmb) in
ctx, tlambda
and typecheck_ELetIn ctx pattern sp e0 e1 =
TODO forbid global vars shadowing ?
if fst sp <> [] then unsupported "Polymorphic types";
let ctx, t0 = typecheck_expr ctx e0 in
let ctx, t0 = Ctx.unify ctx t0 (snd sp) in
let ctx, bmrk = push_pattern_bindings ctx pattern (snd sp) in
(* TODO let-gen: tvars in bookmarked evars which don't occur anywhere else
* in `ctx` can be generalized in these evars' type schemes. *)
let ctx, t1 = typecheck_expr ctx e1 in
let ctx = Ctx.pop_evars bmrk ctx in
ctx, t1
and typecheck_ETuple ctx list =
let ctx, types = list_fold_map typecheck_expr ctx list in
ctx, A.TTuple(A.noloc, types)
and typecheck_ESequence ctx list =
let rlist = List.rev list in
let last = List.hd rlist in
let but_last = List.rev (List.tl rlist) in
let fold ctx e =
let ctx, t = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t A.tunit in
ctx in
let ctx = List.fold_left fold ctx but_last in
typecheck_expr ctx last
and typecheck_EApp ctx f arg =
let ctx, t_f = typecheck_expr ctx f in
let ctx, t_arg = typecheck_expr ctx arg in
let t_prm, t_res = match t_f with
| A.TLambda(_, t_prm, t_res, _) -> t_prm, t_res
| _ -> type_error (A.loc_of_expr f) "Applying a non-function" in
let ctx, _ = Ctx.unify ctx t_arg t_prm in
ctx, t_res
and typecheck_ETupleGet ctx e n =
let ctx, t_e = typecheck_expr ctx e in
begin match t_e with
| A.TTuple(_, types) ->
begin try ctx, List.nth types n
with Failure _ -> type_error (A.loc_of_expr e) "Out of tuple index" end
| _ -> type_error (A.loc_of_expr e) "Not a tuple"
end
and typecheck_EProduct ctx e_pairs =
let tag0 = fst (List.hd e_pairs) in
let name = Ctx.name_of_product_tag ctx tag0 in
let t_result, t_items = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let f ctx (tag, e) =
let ctx, t = typecheck_expr ctx e in
let ctx, t = Ctx.unify ctx t (List.assoc tag t_items) in
ctx, (tag, t) in
let ctx, t_pairs = list_fold_map f ctx e_pairs in
ctx, t_result
and typecheck_ESumCase ctx e e_cases =
let tag0, _ = List.hd e_cases in
let name = try Ctx.name_of_sum_tag ctx tag0
with Not_found -> type_error (A.loc_of_expr e) (tag0^" is not a sum tag") in
let t_sum, case_types = Ctx.instantiate_composite name (Ctx.sum_of_name ctx name) in
let ctx, t_e = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t_e t_sum in
TODO check that declaration and case domains are equal .
let ctx, t_pairs = list_fold_map
(fun ctx (tag, (p, e)) ->
TODO forbid global vars shadowing ?
let t = List.assoc tag case_types in
let ctx, bmrk = push_pattern_bindings ctx p t in
let ctx, t = typecheck_expr ctx e in
let ctx = Ctx.pop_evars bmrk ctx in
ctx, (tag, t))
ctx e_cases in
let ctx, t = List.fold_left
TODO order ?
(ctx, snd(List.hd t_pairs)) (List.tl t_pairs) in
ctx, t
and typecheck_EProductGet ctx e_product tag =
let name = try Ctx.name_of_product_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e_product) (tag^" is not a product tag") in
let t_product0, field_types = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let ctx, t_product1 = typecheck_expr ctx e_product in
let ctx, _ = Ctx.unify ctx t_product1 t_product0 in
let t = List.assoc tag field_types in
ctx, t
and typecheck_EProductSet ctx e_product tag e_field =
let name = try Ctx.name_of_product_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e_product) (tag^" is not a product tag") in
let t_product0, field_types = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let ctx, t_product1 = typecheck_expr ctx e_product in
let ctx, t_product2 = Ctx.unify ctx t_product1 t_product0 in
let t_field0 = List.assoc tag field_types in
let ctx, t_field1 = typecheck_expr ctx e_field in
let ctx, _ = Ctx.unify ctx t_field1 t_field0 in
ctx, t_product2
and typecheck_EStoreSet ctx v e_field e =
let _, field_types = Ctx.instantiate_composite "@" (Ctx.product_of_name ctx "@") in
let t_field0 = List.assoc v field_types in
let ctx, t_field1 = typecheck_expr ctx e_field in
let ctx, _ = Ctx.unify ctx t_field1 t_field0 in
typecheck_expr ctx e
and typecheck_ESum ctx tag e =
let name = try Ctx.name_of_sum_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e) (tag^" is not a sum tag") in
let t_sum, case_types = Ctx.instantiate_composite name (Ctx.sum_of_name ctx name) in
let ctx, t_e = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t_e (List.assoc tag case_types) in
ctx, t_sum
and typecheck_EBinOp ctx loc a op b =
let prims_in candidates responses = List.for_all (fun t-> List.mem t responses) candidates in
let p n = A.TApp(A.noloc, n, []) in
let ctx, ta = typecheck_expr ctx a in
let ctx, tb = typecheck_expr ctx b in
let error op = type_error loc ("Cannot "^op^" "^P.string_of_type ta^" and "^P.string_of_type tb) in
match op with
| A.BConcat ->
let ctx, _ = Ctx.unify ctx ta (p "string") in
let ctx, _ = Ctx.unify ctx tb (p "string") in
ctx, A.TApp(A.noloc, "string", [])
| A.BAdd ->
(* nat² -> nat | (nat|int)² -> int | nat time -> time | tez² -> tez *)
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
TODO should n't this be time int->time instead ?
| A.TApp(_, "nat", []), A.TApp(_, "time", []) | A.TApp(_, "time", []), A.TApp(_, "nat", []) -> ctx, p "time"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
type_error loc ("Need more type annotation to determine wether addition is "^
"(nat, int) -> int, (nat, nat) -> nat or (nat, time) -> time.")
(* let ctx, _ = Ctx.unify ctx (A.TId(_, id)) (p "int") in ctx, p "int" *)
| (A.TId _ as tid), A.TApp(_, "int", []) | A.TApp(_, "int", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, p "tez"
| (A.TId _ as tid), A.TApp(_, "time", []) | A.TApp(_, "time", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, p "nat"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more type annotation to determine addition type.")
| _ -> error "add"
end
| A.BSub ->
(* (int|nat)² -> int | tez² -> tez *)
begin match ta, tb with
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| (A.TId _ as tid), A.TApp(_, t, []) | A.TApp(_, t, []), (A.TId _ as tid) when prims_in [t] ["nat"; "int"] ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, p "tez"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more annotations to determine substraction type.")
(* let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int" *)
| _ -> error "substract"
end
| A.BMul ->
nat² - > nat | ( int|nat)² - > int | tez
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
| A.TApp(_, "tez", []), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
type_error loc ("Need more type annotation to determine wether multiplication is "^
"(nat, int) -> int, (nat, nat) -> nat or (nat, tez) -> tez.")
(* let ctx, _ = Ctx.unify ctx (A.TId(_, id)) (p "int") in ctx, p "int" *)
| (A.TId _ as tid), A.TApp(_, "int", []) | A.TApp(_, "int", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, p "tez"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more annotations to determine multiplication type.")
(* let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int" *)
| _ -> error "multiply"
end
| A.BDiv ->
nat² - > option ( nat*nat ) | ( nat|int)² - > option(int*nat )
| tez nat - > option(tez*tez ) | tez tez - > option(nat*tez )
| tez nat -> option(tez*tez) | tez tez -> option(nat*tez) *)
let op x y = A.TApp(A.noloc, "option", [A.TTuple(A.noloc, [A.TApp(A.noloc, x, []); A.TApp(A.noloc, y, [])])]) in
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, op "nat" "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, op "int" "nat"
| A.TApp(_, "tez", []), A.TApp(_, "nat", []) -> ctx, op "tez" "tez"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, op "nat" "tez"
| (A.TId _ as tid), A.TApp(_, t, []) | A.TApp(_, t, []), (A.TId _ as tid) when prims_in [t] ["int"; "nat"] ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, op "int" "nat"
| (A.TId _ as tid), A.TApp(_, "tez", []) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, op "nat" "tez"
` t1 ` Could be either tez or nat ; let 's arbitrarily pick
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, op "tez" "tez"
| A.TId(_, id0), A.TId(_, id1) ->
let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int"
| _ -> error "divide"
end
| A.BEq | A.BNeq | A.BLt | A.BLe | A.BGt | A.BGe ->
(* a² -> bool *)
let ctx, _ = Ctx.unify ctx ta tb in ctx, p "bool"
| A.BOr | A.BAnd | A.BXor ->
(* bool² -> bool | nat² -> nat *)
begin match ta, tb with
| A.TApp(_, "bool", []), A.TApp(_, "bool", []) -> ctx, p "bool"
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TId(_, id), A.TApp(_, t, []) | A.TApp(_, t, []), A.TId(_, id) when prims_in [t] ["nat"; "bool"] ->
let ctx, _ = Ctx.unify ctx ta tb in ctx, p t
have to choose arbitrarily between bool and
let ctx, _ = Ctx.unify ctx ta (p "bool") in
let ctx, _ = Ctx.unify ctx tb (p "bool") in
ctx, p "bool"
| _ -> error "apply logical operator"
end
| A.BLsl | A.BLsr ->
(* nat² -> nat *)
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
let ctx, _ = Ctx.unify ctx ta tb in ctx, p "nat"
have to choose arbitrarily between bool and
let ctx, _ = Ctx.unify ctx ta (p "nat") in
let ctx, _ = Ctx.unify ctx tb (p "nat") in
ctx, p "nat"
| _ -> error "bit-shift"
end
and typecheck_EUnOp ctx op a =
let p n = A.TApp(A.noloc, n, []) in
let ctx, ta = typecheck_expr ctx a in
match op with
| A.UAbs ->
(* int -> nat *)
begin match ta with
| A.TApp(_, "int", []) -> ctx, p "nat"
| A.TApp(_, "nat", []) -> type_error (A.loc_of_expr a) "no point in getting the absolute val of a nat"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "int") in ctx, p "nat"
| _ -> type_error (A.loc_of_expr a) "Cannot get abs of that"
end
| A.UNot ->
(* bool -> bool | (nat|int) -> int *)
begin match ta with
| A.TApp(_, "int", []) | A.TApp(_, "nat", []) -> ctx, p "int"
| A.TApp(_, "bool", []) -> ctx, p "bool"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "bool") in ctx, p "bool"
| _ -> type_error (A.loc_of_expr a) "Cannot get opposite of that"
end
| A.UNeg ->
(* (nat|int) -> int *)
begin match ta with
| A.TApp(_, "int", []) | A.TApp(_, "nat", []) -> ctx, p "int"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "int") in ctx, p "int"
| _ -> type_error (A.loc_of_expr a) "Cannot get the negation of that"
end
let typecheck_decl ctx = function
| A.DPrim(_, var, params) -> Ctx.add_prim var params ctx
| A.DAlias(_, var, params, t) -> Ctx.add_alias var (params, t) ctx
| A.DProduct(_, var, params, cases) -> Ctx.add_product var params cases ctx
| A.DSum(_, var, params, cases) -> Ctx.add_sum var params cases ctx
let typecheck_store (tag, etype, init) (ctx, fields, inits) =
if List.mem_assoc tag fields then unsound("Storage field "^tag^" redefined");
let ctx, inits = match inits, init with
| None, _ | _, None -> ctx, None
| Some inits, Some init ->
let ctx, t_init = typecheck_expr ctx init in
let ctx, _ = Ctx.unify ctx etype t_init in
ctx, Some ((tag, init)::inits)
in
(ctx, (tag, etype)::fields, inits)
let check_contract_calls expr =
let rec forbidden list where =
if List.exists f list
then unsupported ("Contract calls forbidden in "^where)
else false
and f = function
| A.ELit _ | A.EId _ -> false
| A.EProductGet(_, e, _) | A.ESum(_, _, e) | A.EUnOp(_, _, e) | A.ETypeAnnot(_, e, _) -> f e
| A.ESumCase(_, e, list) -> List.exists (fun (v, (_, e)) -> v<>"call-contract" && f e) list
| A.ESequence(_, list) -> List.exists f list
| A.EColl(_, _, list) -> forbidden list "collections"
| A.ELambda(_, A.PId "call-contract", _, _) -> false
| A.ELambda(_, _, _, e) -> forbidden [e] "functions"
| A.EApp(_, e0, e1) -> forbidden [e0; e1] "function applications"
| A.EBinOp(_, e0, _, e1) -> forbidden [e0; e1] "binary operators"
| A.EProductSet(_, e0, _, e1) -> forbidden [e0; e1] "product updates"
| A.EStoreSet(_, _, e0, e1) -> forbidden [e0; e1] "stored field updates"
| A.ETuple(_, list) -> forbidden list "tuples"
| A.EProduct(_, list) -> forbidden (List.map snd list) "product types"
| A.ETupleGet(_, e, _) -> f e
| A.ELet(_, A.PId "call-contract", _, _, _) -> false
| A.ELet(_, _, _, e0, e1) -> f e0 || f e1
in let _ = f expr in
()
let check_store_set expr =
let rec forbidden list where =
if List.exists f list
then unsupported ("Storage updates forbidden in "^where)
else false
and f = function
| A.ELit _ | A.EId _ -> false
| A.EProductGet(_, e, _) | A.ESum(_, _, e) | A.EUnOp(_, _, e) | A.ETypeAnnot(_, e, _) -> f e
| A.ESumCase(_, e, list) -> f e || List.exists (fun (v, (_, e)) -> f e) list
| A.ESequence(_, list) -> List.exists f list
| A.EColl(_, _, list) -> forbidden list "collections"
| A.ELambda(_, _, _, e) -> forbidden [e] "functions"
| A.EApp(_, e0, e1) -> forbidden [e0; e1] "function applications"
| A.EBinOp(_, e0, _, e1) -> forbidden [e0; e1] "binary operators"
| A.EProductSet(_, e0, _, e1) -> forbidden [e0; e1] "product updates"
| A.EStoreSet(_, _, e0, e1) -> forbidden [e0; e1] " surrounding updates"
| A.ETuple(_, list) -> forbidden list "tuples"
| A.ETupleGet(_, e, _) -> f e
| A.EProduct(_, list) -> forbidden (List.map snd list) "product types"
| A.ELet(_, A.PId"call-contract", _, _, _) -> false
| A.ELet(_, _, _, e0, e1) -> f e0 || f e1
in let _ = f expr in
()
let typecheck_contract ctx (type_declarations, storage_fields, code) =
TODO is the arity of A.TApp ( ) type properly checked ?
(* Incorporate type declarations in the context. *)
let ctx = List.fold_left typecheck_decl ctx type_declarations in
(* Turn store declarations into a sum declaration and product. *)
let ctx, store_fields, init_fields = List.fold_right typecheck_store storage_fields (ctx, [], Some []) in
let ctx = match store_fields with
| [] -> let ctx = Ctx.add_alias "@" ([], A.tunit) ctx in
Ctx.add_evar "@" ([], A.tunit) ctx
| _ -> let ctx = Ctx.add_product "@" [] store_fields ctx in
Ctx.add_evar "@" ([], A.tprim "@") ctx in
let ctx, storage_init = match init_fields with None -> ctx, None | Some fields ->
(* The expression must be typechecked, in order to be registered for Ctx.retrive_type. *)
let e = if fields=[] then A.eunit else A.EProduct(A.noloc, fields) in
let ctx, _ = typecheck_expr ctx e in
ctx, Some e
in
(* Compile the code itself *)
let ctx, t_code = typecheck_expr ctx code in
let t_prm, t_res = match t_code with
(* type will be combinator or closure depending on whether `@` is used in it. *)
| A.TLambda(_, t_prm, t_res, _) -> t_prm, t_res
| _ -> type_error A.noloc
("Bad contract type "^String_of_ast.string_of_type t_code)
in
let t_store = Ctx.expand_type ctx (A.tid "@") in
let ctx = Ctx.add_evar "@" ([], A.TApp(A.noloc, "@", [])) ctx in
begin match code with
| A.ELambda(_, _, _, res) -> check_contract_calls res; check_store_set res;
| _ -> unsupported "Contract code must be a litteral lambda"
end;
(* Check for unresolved polymorphism. *)
TODO reassociate TId with their EId . reverse lookup in ctx ? Or
* just reference them when first met in Typecheck .
* just reference them when first met in Typecheck. *)
TODO Tolerate the parameter to be untypable , and make it a unit .
let f_code = A.get_free_tvars t_code in
if f_code <> [] then type_error
(A.loc_of_expr code)
("Unresolved types "^String.concat ", " f_code^
" in code type: "^P.string_of_type t_code^"; add type annotations.");
let f_store = A.get_free_tvars t_store in
if f_store <> [] then type_error
(A.loc_of_expr code)
("Unresolved types "^String.concat ", " f_store^
" in storage type: "^P.string_of_type t_code^"; add type annotations.");
TODO migrate contract - call and EStoreSet checks here .
{ ctx = ctx;
storage_type = t_store;
param_type = t_prm;
result_type = t_res;
storage_init = storage_init;
code = code }
| null | https://raw.githubusercontent.com/fab13n/lamtez/ec0aab3093ca8380a4cd364f21cf763d729de25f/typecheck.ml | ocaml | Translates a pattern, and the type it matches, into type constraints
* added to `ctx`;
* Pushes the pattern variables into ctx' expression variables;
* Keeps a bookmark to allow the removal of those evars.
Assume that etype has the corresponding product's type,
* and that every field has the corresponding field type
Type e supposing that id has type t_arg.
Combinator or closure?
TODO let-gen: tvars in bookmarked evars which don't occur anywhere else
* in `ctx` can be generalized in these evars' type schemes.
nat² -> nat | (nat|int)² -> int | nat time -> time | tez² -> tez
let ctx, _ = Ctx.unify ctx (A.TId(_, id)) (p "int") in ctx, p "int"
(int|nat)² -> int | tez² -> tez
let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int"
let ctx, _ = Ctx.unify ctx (A.TId(_, id)) (p "int") in ctx, p "int"
let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int"
a² -> bool
bool² -> bool | nat² -> nat
nat² -> nat
int -> nat
bool -> bool | (nat|int) -> int
(nat|int) -> int
Incorporate type declarations in the context.
Turn store declarations into a sum declaration and product.
The expression must be typechecked, in order to be registered for Ctx.retrive_type.
Compile the code itself
type will be combinator or closure depending on whether `@` is used in it.
Check for unresolved polymorphism. | open Utils
module Ctx = Typecheck_ctx
module A = Ast
module P = String_of_ast
module StringSet = Set.Make(String)
type typed_contract = {
ctx: Ctx.t;
storage_type: A.etype;
param_type: A.etype;
result_type: A.etype;
storage_init: A.expr option;
code: A.expr }
let _DEBUG_ = ref false
let debug_indent = ref 0
let rec push_pattern_bindings ctx pattern etype : (Ctx.t*Ctx.bookmark) =
match pattern with
| A.PId id ->
let scheme = ([], etype) in
Ctx.push_evars [id, scheme] ctx
| A.PAny -> ctx, Ctx.bookmark_empty
| A.PTuple plist ->
let tlist = List.map (fun _ -> A.fresh_tvar ~prefix:"tuple" ()) plist in
let ctx, _ = Ctx.unify ctx etype (A.ttuple tlist) in
let fold (ctx, bmrk) p t =
let ctx, bmrk' = push_pattern_bindings ctx p t in
ctx, bmrk @ bmrk' in
List.fold_left2 fold (ctx, Ctx.bookmark_empty) plist tlist
| A.PProduct tagged_pattern_list ->
let pname = Ctx.name_of_product_tag ctx (fst @@ List.hd tagged_pattern_list) in
let tprod, tagged_type_list = Ctx.instantiate_composite pname @@ Ctx.product_of_name ctx pname in
let ctx, _ = Ctx.unify ctx etype tprod in
let fold (ctx, bmrk) (tag, pattern) =
let etype = List.assoc tag tagged_type_list in
let ctx, bmrk' = push_pattern_bindings ctx pattern etype in
ctx, bmrk@bmrk' in
List.fold_left fold (ctx, Ctx.bookmark_empty) tagged_pattern_list
let rec typecheck_expr ctx expr =
if !_DEBUG_ then begin
print_endline (String.make (2 * !debug_indent) ' '^"Typing "^P.string_of_expr expr);
incr debug_indent
end;
let ctx, t = match expr with
| A.ELit(_, c) -> begin match c with
| A.LNat _ -> ctx, A.tprim "nat"
| A.LInt _ -> ctx, A.tprim "int"
| A.LString _ -> ctx, A.tprim "string"
| A.LTez _ -> ctx, A.tprim "tez"
| A.LSig _ -> ctx, A.tprim "sig"
| A.LTime _ -> ctx, A.tprim "time"
| A.LKey _ -> ctx, A.tprim "key"
end
| A.EColl(_, A.CList, list) -> typecheck_EColl_CList ctx list
| A.EColl(_, A.CMap, list) -> typecheck_EColl_CMap ctx list
| A.EColl(_, A.CSet, list) -> typecheck_EColl_CSet ctx list
| A.EId(_, id) ->
let scheme = Ctx.scheme_of_evar ctx id in
ctx, Ctx.instantiate_scheme scheme
| A.ELambda(_, p_prm, t_prm, e_res) -> typecheck_ELambda ctx expr p_prm t_prm e_res
| A.ELet(_, id, t_id, e0, e1) -> typecheck_ELetIn ctx id t_id e0 e1
| A.EApp(_, f, arg) -> typecheck_EApp ctx f arg
| A.ETypeAnnot(_, e, t) -> let ctx, te = typecheck_expr ctx e in Ctx.unify ctx te t
| A.ETuple(_, list) -> typecheck_ETuple ctx list
| A.ESequence(_, list) -> typecheck_ESequence ctx list
| A.ETupleGet(_, e, n) -> typecheck_ETupleGet ctx e n
| A.EProduct(_, pairs) -> typecheck_EProduct ctx pairs
| A.EProductGet(_, e, tag) -> typecheck_EProductGet ctx e tag
| A.EProductSet(_, e0, tag, e1) -> typecheck_EProductSet ctx e0 tag e1
| A.EStoreSet(_, v, e0, e1) -> typecheck_EStoreSet ctx v e0 e1
| A.ESum(_, tag, e) -> typecheck_ESum ctx tag e
| A.ESumCase(_, e, cases) -> typecheck_ESumCase ctx e cases
| A.EBinOp(loc, a, op, b) -> typecheck_EBinOp ctx loc a op b
| A.EUnOp(_, op, a) -> typecheck_EUnOp ctx op a
in
let t = Ctx.expand_type ctx t in
let ctx = Ctx.save_type expr t ctx in
if !_DEBUG_ then begin
decr debug_indent;
print_endline (String.make (2 * !debug_indent) ' '^"Result "^P.string_of_expr expr^" :: "^P.string_of_type t);
end;
ctx, t
and typecheck_EColl_CList ctx elts =
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, elt_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"elt" ()) elts in
ctx, A.TApp(A.noloc, "list", [elt_type])
and typecheck_EColl_CMap ctx elts =
let rec split (klist, vlist) = function
| k :: v :: rest -> split (k :: klist, v :: vlist) rest
| [] -> (klist, vlist)
| [_] -> assert false
in
let klist, vlist = split ([], []) elts in
let ctx, types = list_fold_map typecheck_expr ctx elts in
let ctx, types = list_fold_map typecheck_expr ctx elts in
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, k_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"key" ()) klist in
let ctx, v_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"val" ()) vlist in
ctx, A.TApp(A.noloc, "map", [k_type; v_type])
and typecheck_EColl_CSet ctx elts =
let fold (ctx, t0) elt =
let ctx, t1 = typecheck_expr ctx elt in
TODO order ?
in
let ctx, elt_type = List.fold_left fold (ctx, A.fresh_tvar ~prefix:"elt" ()) elts in
ctx, A.TApp(A.noloc, "set", [elt_type])
and typecheck_ELambda ctx l p_prm t_prm e_res =
TODO forbid global vars shadowing ?
let prm = A.pattern_binds_list p_prm in
let globals = Standard_ctx.globals in
let free_evars = A.get_free_evars ~except:(prm@globals) e_res in
A.M.is_empty free_evars in
let ctx, bmrk = push_pattern_bindings ctx p_prm t_prm in
let ctx, t_res = typecheck_expr ctx e_res in
let ctx = Ctx.pop_evars bmrk ctx in
let tlambda = A.TLambda(A.noloc, t_prm, t_res, cmb) in
ctx, tlambda
and typecheck_ELetIn ctx pattern sp e0 e1 =
TODO forbid global vars shadowing ?
if fst sp <> [] then unsupported "Polymorphic types";
let ctx, t0 = typecheck_expr ctx e0 in
let ctx, t0 = Ctx.unify ctx t0 (snd sp) in
let ctx, bmrk = push_pattern_bindings ctx pattern (snd sp) in
let ctx, t1 = typecheck_expr ctx e1 in
let ctx = Ctx.pop_evars bmrk ctx in
ctx, t1
and typecheck_ETuple ctx list =
let ctx, types = list_fold_map typecheck_expr ctx list in
ctx, A.TTuple(A.noloc, types)
and typecheck_ESequence ctx list =
let rlist = List.rev list in
let last = List.hd rlist in
let but_last = List.rev (List.tl rlist) in
let fold ctx e =
let ctx, t = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t A.tunit in
ctx in
let ctx = List.fold_left fold ctx but_last in
typecheck_expr ctx last
and typecheck_EApp ctx f arg =
let ctx, t_f = typecheck_expr ctx f in
let ctx, t_arg = typecheck_expr ctx arg in
let t_prm, t_res = match t_f with
| A.TLambda(_, t_prm, t_res, _) -> t_prm, t_res
| _ -> type_error (A.loc_of_expr f) "Applying a non-function" in
let ctx, _ = Ctx.unify ctx t_arg t_prm in
ctx, t_res
and typecheck_ETupleGet ctx e n =
let ctx, t_e = typecheck_expr ctx e in
begin match t_e with
| A.TTuple(_, types) ->
begin try ctx, List.nth types n
with Failure _ -> type_error (A.loc_of_expr e) "Out of tuple index" end
| _ -> type_error (A.loc_of_expr e) "Not a tuple"
end
and typecheck_EProduct ctx e_pairs =
let tag0 = fst (List.hd e_pairs) in
let name = Ctx.name_of_product_tag ctx tag0 in
let t_result, t_items = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let f ctx (tag, e) =
let ctx, t = typecheck_expr ctx e in
let ctx, t = Ctx.unify ctx t (List.assoc tag t_items) in
ctx, (tag, t) in
let ctx, t_pairs = list_fold_map f ctx e_pairs in
ctx, t_result
and typecheck_ESumCase ctx e e_cases =
let tag0, _ = List.hd e_cases in
let name = try Ctx.name_of_sum_tag ctx tag0
with Not_found -> type_error (A.loc_of_expr e) (tag0^" is not a sum tag") in
let t_sum, case_types = Ctx.instantiate_composite name (Ctx.sum_of_name ctx name) in
let ctx, t_e = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t_e t_sum in
TODO check that declaration and case domains are equal .
let ctx, t_pairs = list_fold_map
(fun ctx (tag, (p, e)) ->
TODO forbid global vars shadowing ?
let t = List.assoc tag case_types in
let ctx, bmrk = push_pattern_bindings ctx p t in
let ctx, t = typecheck_expr ctx e in
let ctx = Ctx.pop_evars bmrk ctx in
ctx, (tag, t))
ctx e_cases in
let ctx, t = List.fold_left
TODO order ?
(ctx, snd(List.hd t_pairs)) (List.tl t_pairs) in
ctx, t
and typecheck_EProductGet ctx e_product tag =
let name = try Ctx.name_of_product_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e_product) (tag^" is not a product tag") in
let t_product0, field_types = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let ctx, t_product1 = typecheck_expr ctx e_product in
let ctx, _ = Ctx.unify ctx t_product1 t_product0 in
let t = List.assoc tag field_types in
ctx, t
and typecheck_EProductSet ctx e_product tag e_field =
let name = try Ctx.name_of_product_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e_product) (tag^" is not a product tag") in
let t_product0, field_types = Ctx.instantiate_composite name (Ctx.product_of_name ctx name) in
let ctx, t_product1 = typecheck_expr ctx e_product in
let ctx, t_product2 = Ctx.unify ctx t_product1 t_product0 in
let t_field0 = List.assoc tag field_types in
let ctx, t_field1 = typecheck_expr ctx e_field in
let ctx, _ = Ctx.unify ctx t_field1 t_field0 in
ctx, t_product2
and typecheck_EStoreSet ctx v e_field e =
let _, field_types = Ctx.instantiate_composite "@" (Ctx.product_of_name ctx "@") in
let t_field0 = List.assoc v field_types in
let ctx, t_field1 = typecheck_expr ctx e_field in
let ctx, _ = Ctx.unify ctx t_field1 t_field0 in
typecheck_expr ctx e
and typecheck_ESum ctx tag e =
let name = try Ctx.name_of_sum_tag ctx tag
with Not_found -> type_error (A.loc_of_expr e) (tag^" is not a sum tag") in
let t_sum, case_types = Ctx.instantiate_composite name (Ctx.sum_of_name ctx name) in
let ctx, t_e = typecheck_expr ctx e in
let ctx, _ = Ctx.unify ctx t_e (List.assoc tag case_types) in
ctx, t_sum
and typecheck_EBinOp ctx loc a op b =
let prims_in candidates responses = List.for_all (fun t-> List.mem t responses) candidates in
let p n = A.TApp(A.noloc, n, []) in
let ctx, ta = typecheck_expr ctx a in
let ctx, tb = typecheck_expr ctx b in
let error op = type_error loc ("Cannot "^op^" "^P.string_of_type ta^" and "^P.string_of_type tb) in
match op with
| A.BConcat ->
let ctx, _ = Ctx.unify ctx ta (p "string") in
let ctx, _ = Ctx.unify ctx tb (p "string") in
ctx, A.TApp(A.noloc, "string", [])
| A.BAdd ->
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
TODO should n't this be time int->time instead ?
| A.TApp(_, "nat", []), A.TApp(_, "time", []) | A.TApp(_, "time", []), A.TApp(_, "nat", []) -> ctx, p "time"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
type_error loc ("Need more type annotation to determine wether addition is "^
"(nat, int) -> int, (nat, nat) -> nat or (nat, time) -> time.")
| (A.TId _ as tid), A.TApp(_, "int", []) | A.TApp(_, "int", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, p "tez"
| (A.TId _ as tid), A.TApp(_, "time", []) | A.TApp(_, "time", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, p "nat"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more type annotation to determine addition type.")
| _ -> error "add"
end
| A.BSub ->
begin match ta, tb with
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| (A.TId _ as tid), A.TApp(_, t, []) | A.TApp(_, t, []), (A.TId _ as tid) when prims_in [t] ["nat"; "int"] ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, p "tez"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more annotations to determine substraction type.")
| _ -> error "substract"
end
| A.BMul ->
nat² - > nat | ( int|nat)² - > int | tez
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, p "int"
| A.TApp(_, "tez", []), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TApp(_, "tez", []) -> ctx, p "tez"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
type_error loc ("Need more type annotation to determine wether multiplication is "^
"(nat, int) -> int, (nat, nat) -> nat or (nat, tez) -> tez.")
| (A.TId _ as tid), A.TApp(_, "int", []) | A.TApp(_, "int", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, p "int"
| (A.TId _ as tid), A.TApp(_, "tez", []) | A.TApp(_, "tez", []), (A.TId _ as tid) ->
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, p "tez"
| A.TId(_, id0), A.TId(_, id1) ->
type_error loc ("Need more annotations to determine multiplication type.")
| _ -> error "multiply"
end
| A.BDiv ->
nat² - > option ( nat*nat ) | ( nat|int)² - > option(int*nat )
| tez nat - > option(tez*tez ) | tez tez - > option(nat*tez )
| tez nat -> option(tez*tez) | tez tez -> option(nat*tez) *)
let op x y = A.TApp(A.noloc, "option", [A.TTuple(A.noloc, [A.TApp(A.noloc, x, []); A.TApp(A.noloc, y, [])])]) in
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, op "nat" "nat"
| A.TApp(_, t0, []), A.TApp(_, t1, []) when prims_in [t0; t1] ["int"; "nat"] -> ctx, op "int" "nat"
| A.TApp(_, "tez", []), A.TApp(_, "nat", []) -> ctx, op "tez" "tez"
| A.TApp(_, "tez", []), A.TApp(_, "tez", []) -> ctx, op "nat" "tez"
| (A.TId _ as tid), A.TApp(_, t, []) | A.TApp(_, t, []), (A.TId _ as tid) when prims_in [t] ["int"; "nat"] ->
let ctx, _ = Ctx.unify ctx tid (p "int") in ctx, op "int" "nat"
| (A.TId _ as tid), A.TApp(_, "tez", []) ->
let ctx, _ = Ctx.unify ctx tid (p "tez") in ctx, op "nat" "tez"
` t1 ` Could be either tez or nat ; let 's arbitrarily pick
let ctx, _ = Ctx.unify ctx tid (p "nat") in ctx, op "tez" "tez"
| A.TId(_, id0), A.TId(_, id1) ->
let ctx, _ = Ctx.unify ctx ta (p "int") in
let ctx, _ = Ctx.unify ctx tb (p "int") in
ctx, p "int"
| _ -> error "divide"
end
| A.BEq | A.BNeq | A.BLt | A.BLe | A.BGt | A.BGe ->
let ctx, _ = Ctx.unify ctx ta tb in ctx, p "bool"
| A.BOr | A.BAnd | A.BXor ->
begin match ta, tb with
| A.TApp(_, "bool", []), A.TApp(_, "bool", []) -> ctx, p "bool"
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TId(_, id), A.TApp(_, t, []) | A.TApp(_, t, []), A.TId(_, id) when prims_in [t] ["nat"; "bool"] ->
let ctx, _ = Ctx.unify ctx ta tb in ctx, p t
have to choose arbitrarily between bool and
let ctx, _ = Ctx.unify ctx ta (p "bool") in
let ctx, _ = Ctx.unify ctx tb (p "bool") in
ctx, p "bool"
| _ -> error "apply logical operator"
end
| A.BLsl | A.BLsr ->
begin match ta, tb with
| A.TApp(_, "nat", []), A.TApp(_, "nat", []) -> ctx, p "nat"
| A.TId(_, id), A.TApp(_, "nat", []) | A.TApp(_, "nat", []), A.TId(_, id) ->
let ctx, _ = Ctx.unify ctx ta tb in ctx, p "nat"
have to choose arbitrarily between bool and
let ctx, _ = Ctx.unify ctx ta (p "nat") in
let ctx, _ = Ctx.unify ctx tb (p "nat") in
ctx, p "nat"
| _ -> error "bit-shift"
end
and typecheck_EUnOp ctx op a =
let p n = A.TApp(A.noloc, n, []) in
let ctx, ta = typecheck_expr ctx a in
match op with
| A.UAbs ->
begin match ta with
| A.TApp(_, "int", []) -> ctx, p "nat"
| A.TApp(_, "nat", []) -> type_error (A.loc_of_expr a) "no point in getting the absolute val of a nat"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "int") in ctx, p "nat"
| _ -> type_error (A.loc_of_expr a) "Cannot get abs of that"
end
| A.UNot ->
begin match ta with
| A.TApp(_, "int", []) | A.TApp(_, "nat", []) -> ctx, p "int"
| A.TApp(_, "bool", []) -> ctx, p "bool"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "bool") in ctx, p "bool"
| _ -> type_error (A.loc_of_expr a) "Cannot get opposite of that"
end
| A.UNeg ->
begin match ta with
| A.TApp(_, "int", []) | A.TApp(_, "nat", []) -> ctx, p "int"
| A.TId(_, id) -> let ctx, _ = Ctx.unify ctx ta (p "int") in ctx, p "int"
| _ -> type_error (A.loc_of_expr a) "Cannot get the negation of that"
end
let typecheck_decl ctx = function
| A.DPrim(_, var, params) -> Ctx.add_prim var params ctx
| A.DAlias(_, var, params, t) -> Ctx.add_alias var (params, t) ctx
| A.DProduct(_, var, params, cases) -> Ctx.add_product var params cases ctx
| A.DSum(_, var, params, cases) -> Ctx.add_sum var params cases ctx
let typecheck_store (tag, etype, init) (ctx, fields, inits) =
if List.mem_assoc tag fields then unsound("Storage field "^tag^" redefined");
let ctx, inits = match inits, init with
| None, _ | _, None -> ctx, None
| Some inits, Some init ->
let ctx, t_init = typecheck_expr ctx init in
let ctx, _ = Ctx.unify ctx etype t_init in
ctx, Some ((tag, init)::inits)
in
(ctx, (tag, etype)::fields, inits)
let check_contract_calls expr =
let rec forbidden list where =
if List.exists f list
then unsupported ("Contract calls forbidden in "^where)
else false
and f = function
| A.ELit _ | A.EId _ -> false
| A.EProductGet(_, e, _) | A.ESum(_, _, e) | A.EUnOp(_, _, e) | A.ETypeAnnot(_, e, _) -> f e
| A.ESumCase(_, e, list) -> List.exists (fun (v, (_, e)) -> v<>"call-contract" && f e) list
| A.ESequence(_, list) -> List.exists f list
| A.EColl(_, _, list) -> forbidden list "collections"
| A.ELambda(_, A.PId "call-contract", _, _) -> false
| A.ELambda(_, _, _, e) -> forbidden [e] "functions"
| A.EApp(_, e0, e1) -> forbidden [e0; e1] "function applications"
| A.EBinOp(_, e0, _, e1) -> forbidden [e0; e1] "binary operators"
| A.EProductSet(_, e0, _, e1) -> forbidden [e0; e1] "product updates"
| A.EStoreSet(_, _, e0, e1) -> forbidden [e0; e1] "stored field updates"
| A.ETuple(_, list) -> forbidden list "tuples"
| A.EProduct(_, list) -> forbidden (List.map snd list) "product types"
| A.ETupleGet(_, e, _) -> f e
| A.ELet(_, A.PId "call-contract", _, _, _) -> false
| A.ELet(_, _, _, e0, e1) -> f e0 || f e1
in let _ = f expr in
()
let check_store_set expr =
let rec forbidden list where =
if List.exists f list
then unsupported ("Storage updates forbidden in "^where)
else false
and f = function
| A.ELit _ | A.EId _ -> false
| A.EProductGet(_, e, _) | A.ESum(_, _, e) | A.EUnOp(_, _, e) | A.ETypeAnnot(_, e, _) -> f e
| A.ESumCase(_, e, list) -> f e || List.exists (fun (v, (_, e)) -> f e) list
| A.ESequence(_, list) -> List.exists f list
| A.EColl(_, _, list) -> forbidden list "collections"
| A.ELambda(_, _, _, e) -> forbidden [e] "functions"
| A.EApp(_, e0, e1) -> forbidden [e0; e1] "function applications"
| A.EBinOp(_, e0, _, e1) -> forbidden [e0; e1] "binary operators"
| A.EProductSet(_, e0, _, e1) -> forbidden [e0; e1] "product updates"
| A.EStoreSet(_, _, e0, e1) -> forbidden [e0; e1] " surrounding updates"
| A.ETuple(_, list) -> forbidden list "tuples"
| A.ETupleGet(_, e, _) -> f e
| A.EProduct(_, list) -> forbidden (List.map snd list) "product types"
| A.ELet(_, A.PId"call-contract", _, _, _) -> false
| A.ELet(_, _, _, e0, e1) -> f e0 || f e1
in let _ = f expr in
()
let typecheck_contract ctx (type_declarations, storage_fields, code) =
TODO is the arity of A.TApp ( ) type properly checked ?
let ctx = List.fold_left typecheck_decl ctx type_declarations in
let ctx, store_fields, init_fields = List.fold_right typecheck_store storage_fields (ctx, [], Some []) in
let ctx = match store_fields with
| [] -> let ctx = Ctx.add_alias "@" ([], A.tunit) ctx in
Ctx.add_evar "@" ([], A.tunit) ctx
| _ -> let ctx = Ctx.add_product "@" [] store_fields ctx in
Ctx.add_evar "@" ([], A.tprim "@") ctx in
let ctx, storage_init = match init_fields with None -> ctx, None | Some fields ->
let e = if fields=[] then A.eunit else A.EProduct(A.noloc, fields) in
let ctx, _ = typecheck_expr ctx e in
ctx, Some e
in
let ctx, t_code = typecheck_expr ctx code in
let t_prm, t_res = match t_code with
| A.TLambda(_, t_prm, t_res, _) -> t_prm, t_res
| _ -> type_error A.noloc
("Bad contract type "^String_of_ast.string_of_type t_code)
in
let t_store = Ctx.expand_type ctx (A.tid "@") in
let ctx = Ctx.add_evar "@" ([], A.TApp(A.noloc, "@", [])) ctx in
begin match code with
| A.ELambda(_, _, _, res) -> check_contract_calls res; check_store_set res;
| _ -> unsupported "Contract code must be a litteral lambda"
end;
TODO reassociate TId with their EId . reverse lookup in ctx ? Or
* just reference them when first met in Typecheck .
* just reference them when first met in Typecheck. *)
TODO Tolerate the parameter to be untypable , and make it a unit .
let f_code = A.get_free_tvars t_code in
if f_code <> [] then type_error
(A.loc_of_expr code)
("Unresolved types "^String.concat ", " f_code^
" in code type: "^P.string_of_type t_code^"; add type annotations.");
let f_store = A.get_free_tvars t_store in
if f_store <> [] then type_error
(A.loc_of_expr code)
("Unresolved types "^String.concat ", " f_store^
" in storage type: "^P.string_of_type t_code^"; add type annotations.");
TODO migrate contract - call and EStoreSet checks here .
{ ctx = ctx;
storage_type = t_store;
param_type = t_prm;
result_type = t_res;
storage_init = storage_init;
code = code }
|
e1c6ff87113cae73399aa222ec7b57db1178bd7e805c091d374ee9543ed94efc | brawnski/git-annex | Uninit.hs | git - annex command
-
- Copyright 2010 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.Uninit where
import Control.Monad.State (liftIO)
import System.Directory
import System.Exit
import Command
import Messages
import Types
import Utility
import qualified Git
import qualified Annex
import qualified Command.Unannex
import qualified Command.Init
import qualified Branch
import Content
import Locations
command :: [Command]
command = [repoCommand "uninit" paramPath seek
"de-initialize git-annex and clean out repository"]
seek :: [CommandSeek]
seek = [withFilesInGit startUnannex, withNothing start]
startUnannex :: CommandStartString
startUnannex file = do
Force fast mode before running unannex . This way , if multiple
files link to a key , it will be left in the annex and hardlinked
-- to by each.
Annex.changeState $ \s -> s { Annex.fast = True }
Command.Unannex.start file
start :: CommandStartNothing
start = next perform
perform :: CommandPerform
perform = next cleanup
cleanup :: CommandCleanup
cleanup = do
g <- Annex.gitRepo
gitPreCommitHookUnWrite g
mapM_ removeAnnex =<< getKeysPresent
liftIO $ removeDirectoryRecursive (gitAnnexDir g)
-- avoid normal shutdown
saveState
liftIO $ do
Git.run g "branch" [Param "-D", Param Branch.name]
exitSuccess
gitPreCommitHookUnWrite :: Git.Repo -> Annex ()
gitPreCommitHookUnWrite repo = do
let hook = Command.Init.preCommitHook repo
whenM (liftIO $ doesFileExist hook) $ do
c <- liftIO $ readFile hook
if c == Command.Init.preCommitScript
then liftIO $ removeFile hook
else warning $ "pre-commit hook (" ++ hook ++
") contents modified; not deleting." ++
" Edit it to remove call to git annex."
| null | https://raw.githubusercontent.com/brawnski/git-annex/8b847517a810d384a79178124b9766141b89bc17/Command/Uninit.hs | haskell | to by each.
avoid normal shutdown | git - annex command
-
- Copyright 2010 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.Uninit where
import Control.Monad.State (liftIO)
import System.Directory
import System.Exit
import Command
import Messages
import Types
import Utility
import qualified Git
import qualified Annex
import qualified Command.Unannex
import qualified Command.Init
import qualified Branch
import Content
import Locations
command :: [Command]
command = [repoCommand "uninit" paramPath seek
"de-initialize git-annex and clean out repository"]
seek :: [CommandSeek]
seek = [withFilesInGit startUnannex, withNothing start]
startUnannex :: CommandStartString
startUnannex file = do
Force fast mode before running unannex . This way , if multiple
files link to a key , it will be left in the annex and hardlinked
Annex.changeState $ \s -> s { Annex.fast = True }
Command.Unannex.start file
start :: CommandStartNothing
start = next perform
perform :: CommandPerform
perform = next cleanup
cleanup :: CommandCleanup
cleanup = do
g <- Annex.gitRepo
gitPreCommitHookUnWrite g
mapM_ removeAnnex =<< getKeysPresent
liftIO $ removeDirectoryRecursive (gitAnnexDir g)
saveState
liftIO $ do
Git.run g "branch" [Param "-D", Param Branch.name]
exitSuccess
gitPreCommitHookUnWrite :: Git.Repo -> Annex ()
gitPreCommitHookUnWrite repo = do
let hook = Command.Init.preCommitHook repo
whenM (liftIO $ doesFileExist hook) $ do
c <- liftIO $ readFile hook
if c == Command.Init.preCommitScript
then liftIO $ removeFile hook
else warning $ "pre-commit hook (" ++ hook ++
") contents modified; not deleting." ++
" Edit it to remove call to git annex."
|
cd3b39ff1f696041de583c11c26409dc446e653df6a8980d17779b25c29d8c3d | logicmoo/wam_common_lisp | system.lsp | ;;;
;;; **********************************************************************
( c ) Copyright , 1993 . All rights reserved .
;;; **********************************************************************
;;;
A simple minded System Builder Tool .
;;;
;;; ----------------------------------------------------------------------
;;; Use:
;;;
( defsystem name
;;;
;;; :modules
;;; -----------------------------------------------------------------
;;; file | load | compile | files which force
| environment | environment | recompilations of
;;; -----------------------------------------------------------------
` ( , @patches
;;; (pkg ,(car patches) ,(car patches) ())
;;; (macros (pkg macros) (pkg macros) ())
;;; (low (pkg walk) (pkg macros) (macros))
;;; (,xxx-low (low) (macros low) (low))
;;; (boot (,xxx) (macros low ,xxx) (low ,xxx))
;;; (last t t (boot)))
;;;
;;; If the value specified for :directory is a CONS, then the CAR is
;;; used as the source file directory and the CDR is used as the binary
;;; file directory.
;;; :directory "/usr/src/"
;;;
;;; :pathname-types ("lisp" . "bin"))
;;;
;;; ----------------------------------------------------------------------
(in-package 'sbt :use '(lisp))
(export '(defsystem
build-system
compile-system
load-system))
(defmacro defsystem (name &key modules directory pathname-types)
`(defparameter ,name ; rather then defvar
(make-system :name ',name
:modules ,modules
:source-directory ,directory
:pathname-types ,pathname-types)))
;;; ----------------------------------------------------------------------
(defstruct (system (:type vector) :named)
name
modules
source-directory
(pathname-types '("lsp" "o")))
(defun make-source-pathname (name system)
(make-pathname-internal name system :source))
(defun make-binary-pathname (name system)
(make-pathname-internal name system :binary))
(defun make-pathname-internal (name system type)
(let* ((extension (ecase type
(:SOURCE (car (system-pathname-types system)))
(:BINARY (cdr (system-pathname-types system)))))
(directory (etypecase (system-source-directory system)
(STRING (pathname (system-source-directory system)))
(CONS (ecase type
(:SOURCE (pathname
(car (system-source-directory system))))
(:BINARY (pathname
(cdr (system-source-directory system))))))))
(pathname
(make-pathname
:name #-VMS (string-downcase (string name))
#+VMS (string-downcase (substitute #\_ #\- (string name)))
:type extension
:defaults directory)))
pathname))
;;; ----------------------------------------------------------------------
;;; Operations on modules
;;;
(defstruct (module (:TYPE vector) :NAMED
(:CONSTRUCTOR make-module (name))
; (:PRINT-FUNCTION
; (lambda (m s d)
; (declare (ignore d))
; (format s "#<Module ~A>" (module-name m))))
)
name
load-env
comp-env
recomp-reasons)
(defun make-modules (system-description)
(let ((modules ()))
(labels ((get-module (name)
(or (find name modules :key #'module-name)
(progn (setq modules (cons (make-module name) modules))
(car modules))))
(parse-spec (spec)
(if (eq spec 't)
(reverse (cdr modules))
(mapcar #'get-module spec))))
(dolist (file system-description)
(let* ((name (car file))
(module (get-module name)))
(setf (module-load-env module) (parse-spec (second file))
(module-comp-env module) (parse-spec (third file))
(module-recomp-reasons module) (parse-spec (fourth file))))))
(reverse modules)))
(defun make-transformations (system filter make-transform)
(let ((transforms (list nil)))
(dolist (m (make-modules (system-modules system)))
(when (funcall filter system m transforms)
(funcall make-transform m transforms)))
(nreverse (cdr transforms))))
(defun make-compile-transformation (module transforms)
(unless (dolist (trans transforms)
(and (eq (car trans) :COMPILE)
(eq (second trans) module)
(return trans)))
(dolist (c (module-comp-env module))
(make-load-transformation c transforms))
(push `(:COMPILE ,module) (cdr transforms))))
(defun make-load-transformation (module transforms)
(unless (dolist (trans transforms)
(when (eq (second trans) module)
(case (car trans)
(:COMPILE (return nil))
(:LOAD (return trans)))))
(dolist (l (module-load-env module))
(make-load-transformation l transforms))
(push `(:LOAD ,module) (cdr transforms))))
(defun make-load-without-dependencies-transformation (module transforms)
(unless (dolist (trans transforms)
(and (eq (car trans) :LOAD)
(eq (second trans) module)
(return trans)))
(push `(:LOAD ,module) (cdr transforms))))
(defun compile-filter (system module transforms)
(or (dolist (r (module-recomp-reasons module))
(when (dolist (transform transforms)
(when (and (eq (car transform) :COMPILE)
(eq (second transform) r))
(return t)))
(return t)))
(null (probe-file (make-binary-pathname (module-name module) system)))
(> (file-write-date (make-source-pathname (module-name module) system))
(file-write-date (make-binary-pathname (module-name module) system)))))
(defun operate-on-system (system mode &optional arg print-only)
(let ((transformations ()))
(flet ((load-module (m s)
(let ((name (module-name m))
(*load-verbose* nil))
(if (or (eq mode :source)
(dolist (trans transformations)
(and (eq (first trans) :compile)
(eq (second trans) m)
(return trans))))
(progn (format t "~&Loading source of ~A..." name)
(unless print-only
(load (make-source-pathname name s))))
(progn (format t "~&Loading binary of ~A..." name)
(unless print-only
(load (make-binary-pathname name s)))))))
(compile-module (m s)
(format t "~&Compiling ~A..." (module-name m))
(unless print-only
(let ((name (module-name m)))
(compile-file (make-source-pathname name s)
:output-file (make-binary-pathname name s)))))
(true (&rest ignore) (declare (ignore ignore)) 't))
(setq transformations
(ecase mode
(:COMPILE
(make-transformations system
#'compile-filter
#'make-compile-transformation))
(:RECOMPILE
(make-transformations system
#'true
#'make-compile-transformation))
(:QUERY-COMPILE
(make-transformations system
#'(lambda (s m transforms)
(or (compile-filter s m transforms)
(y-or-n-p "Compile ~A?"
(module-name m))))
#'make-compile-transformation))
(:COMPILE-FROM
(make-transformations system
#'(lambda (s m transforms)
(or (member (module-name m) arg)
(compile-filter s m transforms)))
#'make-compile-transformation))
((:LOAD :SOURCE)
(make-transformations system
#'true
#'make-load-transformation))
(:QUERY-LOAD
(make-transformations system
#'(lambda (s m transforms)
(declare (ignore s transforms))
(y-or-n-p "Load ~A?" (module-name m)))
#'make-load-without-dependencies-transformation))))
(loop (when (null transformations) (return t))
(let ((transform (pop transformations)))
(ecase (first transform)
(:COMPILE (compile-module (second transform) system))
(:LOAD (load-module (second transform) system))))))))
(defun compile-system (system &optional m)
(cond ((null m) (operate-on-system system :COMPILE))
((eq m 't) (operate-on-system system :RECOMPILE))
((eq m :PRINT) (operate-on-system system :COMPILE () t))
((eq m :QUERY) (operate-on-system system :QUERY-COMPILE))
((symbolp m) (operate-on-system system :COMPILE-FROM (list m)))
((listp m) (operate-on-system system :COMPILE-FROM m))))
(defun load-system (system &optional mode)
(case mode
((NIL) (operate-on-system system :LOAD))
(:SOURCE (operate-on-system system :SOURCE))
(:QUERY-LOAD (operate-on-system system :QUERY-LOAD))))
;;;----------------------------------------------------------------------
;;; User interface
(defmacro build-system (system &optional op mode)
(case op
(:LOAD
`(load-system ,system ,(case mode
(:QUERY :QUERY-LOAD)
(:SOURCE :SOURCE))))
(:COMPILE
`(compile-system ,system ,(case mode
(:QUERY :QUERY-COMPILE)
(:FORCE :RECOMPILE))))
(:PRINT
`(compile-system ,system :PRINT))
(otherwise
`(load-system ,system))))
| null | https://raw.githubusercontent.com/logicmoo/wam_common_lisp/4396d9e26b050f68182d65c9a2d5a939557616dd/prolog/wam_cl/src/util/system.lsp | lisp |
**********************************************************************
**********************************************************************
----------------------------------------------------------------------
Use:
:modules
-----------------------------------------------------------------
file | load | compile | files which force
-----------------------------------------------------------------
(pkg ,(car patches) ,(car patches) ())
(macros (pkg macros) (pkg macros) ())
(low (pkg walk) (pkg macros) (macros))
(,xxx-low (low) (macros low) (low))
(boot (,xxx) (macros low ,xxx) (low ,xxx))
(last t t (boot)))
If the value specified for :directory is a CONS, then the CAR is
used as the source file directory and the CDR is used as the binary
file directory.
:directory "/usr/src/"
:pathname-types ("lisp" . "bin"))
----------------------------------------------------------------------
rather then defvar
----------------------------------------------------------------------
----------------------------------------------------------------------
Operations on modules
(:PRINT-FUNCTION
(lambda (m s d)
(declare (ignore d))
(format s "#<Module ~A>" (module-name m))))
----------------------------------------------------------------------
User interface | ( c ) Copyright , 1993 . All rights reserved .
A simple minded System Builder Tool .
( defsystem name
| environment | environment | recompilations of
` ( , @patches
(in-package 'sbt :use '(lisp))
(export '(defsystem
build-system
compile-system
load-system))
(defmacro defsystem (name &key modules directory pathname-types)
(make-system :name ',name
:modules ,modules
:source-directory ,directory
:pathname-types ,pathname-types)))
(defstruct (system (:type vector) :named)
name
modules
source-directory
(pathname-types '("lsp" "o")))
(defun make-source-pathname (name system)
(make-pathname-internal name system :source))
(defun make-binary-pathname (name system)
(make-pathname-internal name system :binary))
(defun make-pathname-internal (name system type)
(let* ((extension (ecase type
(:SOURCE (car (system-pathname-types system)))
(:BINARY (cdr (system-pathname-types system)))))
(directory (etypecase (system-source-directory system)
(STRING (pathname (system-source-directory system)))
(CONS (ecase type
(:SOURCE (pathname
(car (system-source-directory system))))
(:BINARY (pathname
(cdr (system-source-directory system))))))))
(pathname
(make-pathname
:name #-VMS (string-downcase (string name))
#+VMS (string-downcase (substitute #\_ #\- (string name)))
:type extension
:defaults directory)))
pathname))
(defstruct (module (:TYPE vector) :NAMED
(:CONSTRUCTOR make-module (name))
)
name
load-env
comp-env
recomp-reasons)
(defun make-modules (system-description)
(let ((modules ()))
(labels ((get-module (name)
(or (find name modules :key #'module-name)
(progn (setq modules (cons (make-module name) modules))
(car modules))))
(parse-spec (spec)
(if (eq spec 't)
(reverse (cdr modules))
(mapcar #'get-module spec))))
(dolist (file system-description)
(let* ((name (car file))
(module (get-module name)))
(setf (module-load-env module) (parse-spec (second file))
(module-comp-env module) (parse-spec (third file))
(module-recomp-reasons module) (parse-spec (fourth file))))))
(reverse modules)))
(defun make-transformations (system filter make-transform)
(let ((transforms (list nil)))
(dolist (m (make-modules (system-modules system)))
(when (funcall filter system m transforms)
(funcall make-transform m transforms)))
(nreverse (cdr transforms))))
(defun make-compile-transformation (module transforms)
(unless (dolist (trans transforms)
(and (eq (car trans) :COMPILE)
(eq (second trans) module)
(return trans)))
(dolist (c (module-comp-env module))
(make-load-transformation c transforms))
(push `(:COMPILE ,module) (cdr transforms))))
(defun make-load-transformation (module transforms)
(unless (dolist (trans transforms)
(when (eq (second trans) module)
(case (car trans)
(:COMPILE (return nil))
(:LOAD (return trans)))))
(dolist (l (module-load-env module))
(make-load-transformation l transforms))
(push `(:LOAD ,module) (cdr transforms))))
(defun make-load-without-dependencies-transformation (module transforms)
(unless (dolist (trans transforms)
(and (eq (car trans) :LOAD)
(eq (second trans) module)
(return trans)))
(push `(:LOAD ,module) (cdr transforms))))
(defun compile-filter (system module transforms)
(or (dolist (r (module-recomp-reasons module))
(when (dolist (transform transforms)
(when (and (eq (car transform) :COMPILE)
(eq (second transform) r))
(return t)))
(return t)))
(null (probe-file (make-binary-pathname (module-name module) system)))
(> (file-write-date (make-source-pathname (module-name module) system))
(file-write-date (make-binary-pathname (module-name module) system)))))
(defun operate-on-system (system mode &optional arg print-only)
(let ((transformations ()))
(flet ((load-module (m s)
(let ((name (module-name m))
(*load-verbose* nil))
(if (or (eq mode :source)
(dolist (trans transformations)
(and (eq (first trans) :compile)
(eq (second trans) m)
(return trans))))
(progn (format t "~&Loading source of ~A..." name)
(unless print-only
(load (make-source-pathname name s))))
(progn (format t "~&Loading binary of ~A..." name)
(unless print-only
(load (make-binary-pathname name s)))))))
(compile-module (m s)
(format t "~&Compiling ~A..." (module-name m))
(unless print-only
(let ((name (module-name m)))
(compile-file (make-source-pathname name s)
:output-file (make-binary-pathname name s)))))
(true (&rest ignore) (declare (ignore ignore)) 't))
(setq transformations
(ecase mode
(:COMPILE
(make-transformations system
#'compile-filter
#'make-compile-transformation))
(:RECOMPILE
(make-transformations system
#'true
#'make-compile-transformation))
(:QUERY-COMPILE
(make-transformations system
#'(lambda (s m transforms)
(or (compile-filter s m transforms)
(y-or-n-p "Compile ~A?"
(module-name m))))
#'make-compile-transformation))
(:COMPILE-FROM
(make-transformations system
#'(lambda (s m transforms)
(or (member (module-name m) arg)
(compile-filter s m transforms)))
#'make-compile-transformation))
((:LOAD :SOURCE)
(make-transformations system
#'true
#'make-load-transformation))
(:QUERY-LOAD
(make-transformations system
#'(lambda (s m transforms)
(declare (ignore s transforms))
(y-or-n-p "Load ~A?" (module-name m)))
#'make-load-without-dependencies-transformation))))
(loop (when (null transformations) (return t))
(let ((transform (pop transformations)))
(ecase (first transform)
(:COMPILE (compile-module (second transform) system))
(:LOAD (load-module (second transform) system))))))))
(defun compile-system (system &optional m)
(cond ((null m) (operate-on-system system :COMPILE))
((eq m 't) (operate-on-system system :RECOMPILE))
((eq m :PRINT) (operate-on-system system :COMPILE () t))
((eq m :QUERY) (operate-on-system system :QUERY-COMPILE))
((symbolp m) (operate-on-system system :COMPILE-FROM (list m)))
((listp m) (operate-on-system system :COMPILE-FROM m))))
(defun load-system (system &optional mode)
(case mode
((NIL) (operate-on-system system :LOAD))
(:SOURCE (operate-on-system system :SOURCE))
(:QUERY-LOAD (operate-on-system system :QUERY-LOAD))))
(defmacro build-system (system &optional op mode)
(case op
(:LOAD
`(load-system ,system ,(case mode
(:QUERY :QUERY-LOAD)
(:SOURCE :SOURCE))))
(:COMPILE
`(compile-system ,system ,(case mode
(:QUERY :QUERY-COMPILE)
(:FORCE :RECOMPILE))))
(:PRINT
`(compile-system ,system :PRINT))
(otherwise
`(load-system ,system))))
|
8ba49a04ae6ad9d2743294db8dbdfe3bbdcbade7f85c4551257426b39c92b753 | philnguyen/soft-contract | length.rkt | #lang racket/base
(require racket/match
soft-contract/fake-contract)
(define (length l)
(if (pair? l)
(+ 1 (length (cdr l)))
0))
(provide
(contract-out
[length ((listof any/c) . -> . integer? #:total? #t)]))
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/termination/misc/length.rkt | racket | #lang racket/base
(require racket/match
soft-contract/fake-contract)
(define (length l)
(if (pair? l)
(+ 1 (length (cdr l)))
0))
(provide
(contract-out
[length ((listof any/c) . -> . integer? #:total? #t)]))
| |
0597caf3d1434c2aedcc57c6bdbd6aec3bdb10c3d606cd240e4b949a334a71bf | ecraven/r7rs-benchmarks | nboyer.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; File: nboyer.sch
Description : The benchmark
Author :
Created : 5 - Apr-85
Modified : 10 - Apr-85 14:52:20 ( )
22 - Jul-87 ( )
2 - Jul-88 ( -- distinguished # f and the empty list )
13 - Feb-97 ( -- fixed bugs in unifier and rules ,
;; rewrote to eliminate property lists, and added
a scaling parameter suggested by )
19 - Mar-99 ( -- cleaned up comments )
24 - Nov-07 ( -- converted to R6RS )
;; Language: Scheme
;; Status: Public Domain
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
NBOYER -- Logic programming benchmark , originally written by .
;;; Fairly CONS intensive.
Note : The version of this benchmark that appears in book
;; contained several bugs that are corrected here. These bugs are discussed
by , " The Boyer Benchmark Meets Linear Logic " , ACM SIGPLAN Lisp
Pointers 6(4 ) , October - December 1993 , pages 3 - 10 . The fixed bugs are :
;;
;; The benchmark now returns a boolean result.
FALSEP and TRUEP use TERM - MEMBER ? rather than MEMV ( which is called MEMBER
in )
ONE - WAY - UNIFY1 now treats numbers correctly
ONE - WAY - UNIFY1 - LST now treats empty lists correctly
Rule 19 has been corrected ( this rule was not touched by the original
;; benchmark, but is used by this version)
Rules 84 and 101 have been corrected ( but these rules are never touched
;; by the benchmark)
;;
According to , these bug fixes make the benchmark 10 - 25 % slower .
;; Please do not compare the timings from this benchmark against those of
;; the original benchmark.
;;
;; This version of the benchmark also prints the number of rewrites as a sanity
;; check, because it is too easy for a buggy version to return the correct
;; boolean result. The correct number of rewrites is
;;
;; n rewrites peak live storage (approximate, in bytes)
0 95024 520,000
1 591777 2,085,000
;; 2 1813975 5,175,000
3 5375678
4 16445406
5 51507739
Nboyer is a 2 - phase benchmark .
The first phase attaches lemmas to symbols . This phase is not timed ,
;; but it accounts for very little of the runtime anyway.
The second phase creates the test problem , and tests to see
;; whether it is implied by the lemmas.
(import (scheme base) (scheme cxr) (scheme read) (scheme write) (scheme time))
(define (run-benchmark)
(let* ((count (read))
(input (read))
(output (read))
(s2 (number->string count))
(s1 (number->string input))
(name "nboyer"))
(run-r7rs-benchmark
(string-append name ":" s1 ":" s2)
count
(lambda ()
(setup-boyer)
(test-boyer alist term (hide count input)))
(lambda (rewrites)
(and (number? rewrites) (= rewrites output))))))
(define alist
(quote ((x f (plus (plus a b)
(plus c (zero))))
(y f (times (times a b)
(plus c d)))
(z f (reverse (append (append a b)
(nil))))
(u equal (plus a b)
(difference x y))
(w lessp (remainder a b)
(member a (length b))))))
(define term
(quote (implies (and (implies x y)
(and (implies y z)
(and (implies z u)
(implies u w))))
(implies x w))))
(define (setup-boyer . args) #t);; assigned below
(define (test-boyer . args) #t);; assigned below
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
The first phase .
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; In the original benchmark, it stored a list of lemmas on the
;; property lists of symbols.
;; In the new benchmark, it maintains an association list of
;; symbols and symbol-records, and stores the list of lemmas
;; within the symbol-records.
(let ()
(define (setup)
(add-lemma-lst
(quote ((equal (compile form)
(reverse (codegen (optimize form)
(nil))))
(equal (eqp x y)
(equal (fix x)
(fix y)))
(equal (greaterp x y)
(lessp y x))
(equal (lesseqp x y)
(not (lessp y x)))
(equal (greatereqp x y)
(not (lessp x y)))
(equal (boolean x)
(or (equal x (t))
(equal x (f))))
(equal (iff x y)
(and (implies x y)
(implies y x)))
(equal (even1 x)
(if (zerop x)
(t)
(odd (_1- x))))
(equal (countps- l pred)
(countps-loop l pred (zero)))
(equal (fact- i)
(fact-loop i 1))
(equal (reverse- x)
(reverse-loop x (nil)))
(equal (divides x y)
(zerop (remainder y x)))
(equal (assume-true var alist)
(cons (cons var (t))
alist))
(equal (assume-false var alist)
(cons (cons var (f))
alist))
(equal (tautology-checker x)
(tautologyp (normalize x)
(nil)))
(equal (falsify x)
(falsify1 (normalize x)
(nil)))
(equal (prime x)
(and (not (zerop x))
(not (equal x (add1 (zero))))
(prime1 x (_1- x))))
(equal (and p q)
(if p (if q (t)
(f))
(f)))
(equal (or p q)
(if p (t)
(if q (t)
(f))))
(equal (not p)
(if p (f)
(t)))
(equal (implies p q)
(if p (if q (t)
(f))
(t)))
(equal (fix x)
(if (numberp x)
x
(zero)))
(equal (if (if a b c)
d e)
(if a (if b d e)
(if c d e)))
(equal (zerop x)
(or (equal x (zero))
(not (numberp x))))
(equal (plus (plus x y)
z)
(plus x (plus y z)))
(equal (equal (plus a b)
(zero))
(and (zerop a)
(zerop b)))
(equal (difference x x)
(zero))
(equal (equal (plus a b)
(plus a c))
(equal (fix b)
(fix c)))
(equal (equal (zero)
(difference x y))
(not (lessp y x)))
(equal (equal x (difference x y))
(and (numberp x)
(or (equal x (zero))
(zerop y))))
(equal (meaning (plus-tree (append x y))
a)
(plus (meaning (plus-tree x)
a)
(meaning (plus-tree y)
a)))
(equal (meaning (plus-tree (plus-fringe x))
a)
(fix (meaning x a)))
(equal (append (append x y)
z)
(append x (append y z)))
(equal (reverse (append a b))
(append (reverse b)
(reverse a)))
(equal (times x (plus y z))
(plus (times x y)
(times x z)))
(equal (times (times x y)
z)
(times x (times y z)))
(equal (equal (times x y)
(zero))
(or (zerop x)
(zerop y)))
(equal (exec (append x y)
pds envrn)
(exec y (exec x pds envrn)
envrn))
(equal (mc-flatten x y)
(append (flatten x)
y))
(equal (member x (append a b))
(or (member x a)
(member x b)))
(equal (member x (reverse y))
(member x y))
(equal (length (reverse x))
(length x))
(equal (member a (intersect b c))
(and (member a b)
(member a c)))
(equal (nth (zero)
i)
(zero))
(equal (exp i (plus j k))
(times (exp i j)
(exp i k)))
(equal (exp i (times j k))
(exp (exp i j)
k))
(equal (reverse-loop x y)
(append (reverse x)
y))
(equal (reverse-loop x (nil))
(reverse x))
(equal (count-list z (sort-lp x y))
(plus (count-list z x)
(count-list z y)))
(equal (equal (append a b)
(append a c))
(equal b c))
(equal (plus (remainder x y)
(times y (quotient x y)))
(fix x))
(equal (power-eval (big-plus1 l i base)
base)
(plus (power-eval l base)
i))
(equal (power-eval (big-plus x y i base)
base)
(plus i (plus (power-eval x base)
(power-eval y base))))
(equal (remainder y 1)
(zero))
(equal (lessp (remainder x y)
y)
(not (zerop y)))
(equal (remainder x x)
(zero))
(equal (lessp (quotient i j)
i)
(and (not (zerop i))
(or (zerop j)
(not (equal j 1)))))
(equal (lessp (remainder x y)
x)
(and (not (zerop y))
(not (zerop x))
(not (lessp x y))))
(equal (power-eval (power-rep i base)
base)
(fix i))
(equal (power-eval (big-plus (power-rep i base)
(power-rep j base)
(zero)
base)
base)
(plus i j))
(equal (gcd x y)
(gcd y x))
(equal (nth (append a b)
i)
(append (nth a i)
(nth b (difference i (length a)))))
(equal (difference (plus x y)
x)
(fix y))
(equal (difference (plus y x)
x)
(fix y))
(equal (difference (plus x y)
(plus x z))
(difference y z))
(equal (times x (difference c w))
(difference (times c x)
(times w x)))
(equal (remainder (times x z)
z)
(zero))
(equal (difference (plus b (plus a c))
a)
(plus b c))
(equal (difference (add1 (plus y z))
z)
(add1 y))
(equal (lessp (plus x y)
(plus x z))
(lessp y z))
(equal (lessp (times x z)
(times y z))
(and (not (zerop z))
(lessp x y)))
(equal (lessp y (plus x y))
(not (zerop x)))
(equal (gcd (times x z)
(times y z))
(times z (gcd x y)))
(equal (value (normalize x)
a)
(value x a))
(equal (equal (flatten x)
(cons y (nil)))
(and (nlistp x)
(equal x y)))
(equal (listp (gopher x))
(listp x))
(equal (samefringe x y)
(equal (flatten x)
(flatten y)))
(equal (equal (greatest-factor x y)
(zero))
(and (or (zerop y)
(equal y 1))
(equal x (zero))))
(equal (equal (greatest-factor x y)
1)
(equal x 1))
(equal (numberp (greatest-factor x y))
(not (and (or (zerop y)
(equal y 1))
(not (numberp x)))))
(equal (times-list (append x y))
(times (times-list x)
(times-list y)))
(equal (prime-list (append x y))
(and (prime-list x)
(prime-list y)))
(equal (equal z (times w z))
(and (numberp z)
(or (equal z (zero))
(equal w 1))))
(equal (greatereqp x y)
(not (lessp x y)))
(equal (equal x (times x y))
(or (equal x (zero))
(and (numberp x)
(equal y 1))))
(equal (remainder (times y x)
y)
(zero))
(equal (equal (times a b)
1)
(and (not (equal a (zero)))
(not (equal b (zero)))
(numberp a)
(numberp b)
(equal (_1- a)
(zero))
(equal (_1- b)
(zero))))
(equal (lessp (length (delete x l))
(length l))
(member x l))
(equal (sort2 (delete x l))
(delete x (sort2 l)))
(equal (dsort x)
(sort2 x))
(equal (length (cons x1
(cons x2
(cons x3 (cons x4
(cons x5
(cons x6 x7)))))))
(plus 6 (length x7)))
(equal (difference (add1 (add1 x))
2)
(fix x))
(equal (quotient (plus x (plus x y))
2)
(plus x (quotient y 2)))
(equal (sigma (zero)
i)
(quotient (times i (add1 i))
2))
(equal (plus x (add1 y))
(if (numberp y)
(add1 (plus x y))
(add1 x)))
(equal (equal (difference x y)
(difference z y))
(if (lessp x y)
(not (lessp y z))
(if (lessp z y)
(not (lessp y x))
(equal (fix x)
(fix z)))))
(equal (meaning (plus-tree (delete x y))
a)
(if (member x y)
(difference (meaning (plus-tree y)
a)
(meaning x a))
(meaning (plus-tree y)
a)))
(equal (times x (add1 y))
(if (numberp y)
(plus x (times x y))
(fix x)))
(equal (nth (nil)
i)
(if (zerop i)
(nil)
(zero)))
(equal (last (append a b))
(if (listp b)
(last b)
(if (listp a)
(cons (car (last a))
b)
b)))
(equal (equal (lessp x y)
z)
(if (lessp x y)
(equal (t) z)
(equal (f) z)))
(equal (assignment x (append a b))
(if (assignedp x a)
(assignment x a)
(assignment x b)))
(equal (car (gopher x))
(if (listp x)
(car (flatten x))
(zero)))
(equal (flatten (cdr (gopher x)))
(if (listp x)
(cdr (flatten x))
(cons (zero)
(nil))))
(equal (quotient (times y x)
y)
(if (zerop y)
(zero)
(fix x)))
(equal (get j (set i val mem))
(if (eqp j i)
val
(get j mem)))))))
(define (add-lemma-lst lst)
(cond ((null? lst)
#t)
(else (add-lemma (car lst))
(add-lemma-lst (cdr lst)))))
(define (add-lemma term)
(cond ((and (pair? term)
(eq? (car term)
(quote equal))
(pair? (cadr term)))
(put (car (cadr term))
(quote lemmas)
(cons
(translate-term term)
(get (car (cadr term)) (quote lemmas)))))
(else (error #f "ADD-LEMMA did not like term: " term))))
;; Translates a term by replacing its constructor symbols by symbol-records.
(define (translate-term term)
(cond ((not (pair? term))
term)
(else (cons (symbol->symbol-record (car term))
(translate-args (cdr term))))))
(define (translate-args lst)
(cond ((null? lst)
'())
(else (cons (translate-term (car lst))
(translate-args (cdr lst))))))
For debugging only , so the use of MAP does not change
the first - order character of the benchmark .
(define (untranslate-term term)
(cond ((not (pair? term))
term)
(else (cons (get-name (car term))
(map untranslate-term (cdr term))))))
A symbol - record is represented as a vector with two fields :
;; the symbol (for debugging) and
;; the list of lemmas associated with the symbol.
(define (put sym property value)
(put-lemmas! (symbol->symbol-record sym) value))
(define (get sym property)
(get-lemmas (symbol->symbol-record sym)))
(define (symbol->symbol-record sym)
(let ((x (assq sym *symbol-records-alist*)))
(if x
(cdr x)
(let ((r (make-symbol-record sym)))
(set! *symbol-records-alist*
(cons (cons sym r)
*symbol-records-alist*))
r))))
;; Association list of symbols and symbol-records.
(define *symbol-records-alist* '())
A symbol - record is represented as a vector with two fields :
;; the symbol (for debugging) and
;; the list of lemmas associated with the symbol.
(define (make-symbol-record sym)
(vector sym '()))
(define (put-lemmas! symbol-record lemmas)
(vector-set! symbol-record 1 lemmas))
(define (get-lemmas symbol-record)
(vector-ref symbol-record 1))
(define (get-name symbol-record)
(vector-ref symbol-record 0))
(define (symbol-record-equal? r1 r2)
(eq? r1 r2))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
The second phase .
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (test alist term n)
(let ((term
(apply-subst
(translate-alist alist)
(translate-term
(do ((term term (list 'or term '(f)))
(n n (- n 1)))
((zero? n) term))))))
(tautp term)))
(define (translate-alist alist)
(cond ((null? alist)
'())
(else (cons (cons (caar alist)
(translate-term (cdar alist)))
(translate-alist (cdr alist))))))
(define (apply-subst alist term)
(cond ((not (pair? term))
(let ((temp-temp (assq term alist)))
(if temp-temp
(cdr temp-temp)
term)))
(else (cons (car term)
(apply-subst-lst alist (cdr term))))))
(define (apply-subst-lst alist lst)
(cond ((null? lst)
'())
(else (cons (apply-subst alist (car lst))
(apply-subst-lst alist (cdr lst))))))
(define (tautp x)
(tautologyp (rewrite x)
'() '()))
(define (tautologyp x true-lst false-lst)
(cond ((truep x true-lst)
#t)
((falsep x false-lst)
#f)
((not (pair? x))
#f)
((eq? (car x) if-constructor)
(cond ((truep (cadr x)
true-lst)
(tautologyp (caddr x)
true-lst false-lst))
((falsep (cadr x)
false-lst)
(tautologyp (cadddr x)
true-lst false-lst))
(else (and (tautologyp (caddr x)
(cons (cadr x)
true-lst)
false-lst)
(tautologyp (cadddr x)
true-lst
(cons (cadr x)
false-lst))))))
(else #f)))
(define if-constructor '*);; becomes (symbol->symbol-record 'if)
(define rewrite-count 0);; sanity check
(define (rewrite term)
(set! rewrite-count (+ rewrite-count 1))
(cond ((not (pair? term))
term)
(else (rewrite-with-lemmas (cons (car term)
(rewrite-args (cdr term)))
(get-lemmas (car term))))))
(define (rewrite-args lst)
(cond ((null? lst)
'())
(else (cons (rewrite (car lst))
(rewrite-args (cdr lst))))))
(define (rewrite-with-lemmas term lst)
(cond ((null? lst)
term)
((one-way-unify term (cadr (car lst)))
(rewrite (apply-subst unify-subst (caddr (car lst)))))
(else (rewrite-with-lemmas term (cdr lst)))))
(define unify-subst '*)
(define (one-way-unify term1 term2)
(begin (set! unify-subst '())
(one-way-unify1 term1 term2)))
(define (one-way-unify1 term1 term2)
(cond ((not (pair? term2))
(let ((temp-temp (assq term2 unify-subst)))
(cond (temp-temp
(term-equal? term1 (cdr temp-temp)))
((number? term2);; This bug fix makes
nboyer 10 - 25 % slower !
(else
(set! unify-subst (cons (cons term2 term1)
unify-subst))
#t))))
((not (pair? term1))
#f)
((eq? (car term1)
(car term2))
(one-way-unify1-lst (cdr term1)
(cdr term2)))
(else #f)))
(define (one-way-unify1-lst lst1 lst2)
(cond ((null? lst1)
(null? lst2))
((null? lst2)
#f)
((one-way-unify1 (car lst1)
(car lst2))
(one-way-unify1-lst (cdr lst1)
(cdr lst2)))
(else #f)))
(define (falsep x lst)
(or (term-equal? x false-term)
(term-member? x lst)))
(define (truep x lst)
(or (term-equal? x true-term)
(term-member? x lst)))
(define false-term '*);; becomes (translate-term '(f))
(define true-term '*);; becomes (translate-term '(t))
The next two procedures were in the original benchmark
;; but were never used.
(define (trans-of-implies n)
(translate-term
(list (quote implies)
(trans-of-implies1 n)
(list (quote implies)
0 n))))
(define (trans-of-implies1 n)
(cond ((equal? n 1)
(list (quote implies)
0 1))
(else (list (quote and)
(list (quote implies)
(- n 1)
n)
(trans-of-implies1 (- n 1))))))
;; Translated terms can be circular structures, which can't be
compared using Scheme 's equal ? and member procedures , so we
;; use these instead.
(define (term-equal? x y)
(cond ((pair? x)
(and (pair? y)
(symbol-record-equal? (car x) (car y))
(term-args-equal? (cdr x) (cdr y))))
(else (equal? x y))))
(define (term-args-equal? lst1 lst2)
(cond ((null? lst1)
(null? lst2))
((null? lst2)
#f)
((term-equal? (car lst1) (car lst2))
(term-args-equal? (cdr lst1) (cdr lst2)))
(else #f)))
(define (term-member? x lst)
(cond ((null? lst)
#f)
((term-equal? x (car lst))
#t)
(else (term-member? x (cdr lst)))))
(set! setup-boyer
(lambda ()
(set! *symbol-records-alist* '())
(set! if-constructor (symbol->symbol-record 'if))
(set! false-term (translate-term '(f)))
(set! true-term (translate-term '(t)))
(setup)))
(set! test-boyer
(lambda (alist term n)
(set! rewrite-count 0)
(let ((answer (test alist term n)))
;; (write rewrite-count)
;; (display " rewrites")
;; (newline)
(if answer
rewrite-count
#f)))))
| null | https://raw.githubusercontent.com/ecraven/r7rs-benchmarks/b0910c4759629fcd2fb84a323a4eeef3b6ffc6bf/src/nboyer.scm | scheme |
File: nboyer.sch
rewrote to eliminate property lists, and added
Language: Scheme
Status: Public Domain
Fairly CONS intensive.
contained several bugs that are corrected here. These bugs are discussed
The benchmark now returns a boolean result.
benchmark, but is used by this version)
by the benchmark)
Please do not compare the timings from this benchmark against those of
the original benchmark.
This version of the benchmark also prints the number of rewrites as a sanity
check, because it is too easy for a buggy version to return the correct
boolean result. The correct number of rewrites is
n rewrites peak live storage (approximate, in bytes)
2 1813975 5,175,000
but it accounts for very little of the runtime anyway.
whether it is implied by the lemmas.
assigned below
assigned below
In the original benchmark, it stored a list of lemmas on the
property lists of symbols.
In the new benchmark, it maintains an association list of
symbols and symbol-records, and stores the list of lemmas
within the symbol-records.
Translates a term by replacing its constructor symbols by symbol-records.
the symbol (for debugging) and
the list of lemmas associated with the symbol.
Association list of symbols and symbol-records.
the symbol (for debugging) and
the list of lemmas associated with the symbol.
becomes (symbol->symbol-record 'if)
sanity check
This bug fix makes
becomes (translate-term '(f))
becomes (translate-term '(t))
but were never used.
Translated terms can be circular structures, which can't be
use these instead.
(write rewrite-count)
(display " rewrites")
(newline) | Description : The benchmark
Author :
Created : 5 - Apr-85
Modified : 10 - Apr-85 14:52:20 ( )
22 - Jul-87 ( )
2 - Jul-88 ( -- distinguished # f and the empty list )
13 - Feb-97 ( -- fixed bugs in unifier and rules ,
a scaling parameter suggested by )
19 - Mar-99 ( -- cleaned up comments )
24 - Nov-07 ( -- converted to R6RS )
NBOYER -- Logic programming benchmark , originally written by .
Note : The version of this benchmark that appears in book
by , " The Boyer Benchmark Meets Linear Logic " , ACM SIGPLAN Lisp
Pointers 6(4 ) , October - December 1993 , pages 3 - 10 . The fixed bugs are :
FALSEP and TRUEP use TERM - MEMBER ? rather than MEMV ( which is called MEMBER
in )
ONE - WAY - UNIFY1 now treats numbers correctly
ONE - WAY - UNIFY1 - LST now treats empty lists correctly
Rule 19 has been corrected ( this rule was not touched by the original
Rules 84 and 101 have been corrected ( but these rules are never touched
According to , these bug fixes make the benchmark 10 - 25 % slower .
0 95024 520,000
1 591777 2,085,000
3 5375678
4 16445406
5 51507739
Nboyer is a 2 - phase benchmark .
The first phase attaches lemmas to symbols . This phase is not timed ,
The second phase creates the test problem , and tests to see
(import (scheme base) (scheme cxr) (scheme read) (scheme write) (scheme time))
(define (run-benchmark)
(let* ((count (read))
(input (read))
(output (read))
(s2 (number->string count))
(s1 (number->string input))
(name "nboyer"))
(run-r7rs-benchmark
(string-append name ":" s1 ":" s2)
count
(lambda ()
(setup-boyer)
(test-boyer alist term (hide count input)))
(lambda (rewrites)
(and (number? rewrites) (= rewrites output))))))
(define alist
(quote ((x f (plus (plus a b)
(plus c (zero))))
(y f (times (times a b)
(plus c d)))
(z f (reverse (append (append a b)
(nil))))
(u equal (plus a b)
(difference x y))
(w lessp (remainder a b)
(member a (length b))))))
(define term
(quote (implies (and (implies x y)
(and (implies y z)
(and (implies z u)
(implies u w))))
(implies x w))))
The first phase .
(let ()
(define (setup)
(add-lemma-lst
(quote ((equal (compile form)
(reverse (codegen (optimize form)
(nil))))
(equal (eqp x y)
(equal (fix x)
(fix y)))
(equal (greaterp x y)
(lessp y x))
(equal (lesseqp x y)
(not (lessp y x)))
(equal (greatereqp x y)
(not (lessp x y)))
(equal (boolean x)
(or (equal x (t))
(equal x (f))))
(equal (iff x y)
(and (implies x y)
(implies y x)))
(equal (even1 x)
(if (zerop x)
(t)
(odd (_1- x))))
(equal (countps- l pred)
(countps-loop l pred (zero)))
(equal (fact- i)
(fact-loop i 1))
(equal (reverse- x)
(reverse-loop x (nil)))
(equal (divides x y)
(zerop (remainder y x)))
(equal (assume-true var alist)
(cons (cons var (t))
alist))
(equal (assume-false var alist)
(cons (cons var (f))
alist))
(equal (tautology-checker x)
(tautologyp (normalize x)
(nil)))
(equal (falsify x)
(falsify1 (normalize x)
(nil)))
(equal (prime x)
(and (not (zerop x))
(not (equal x (add1 (zero))))
(prime1 x (_1- x))))
(equal (and p q)
(if p (if q (t)
(f))
(f)))
(equal (or p q)
(if p (t)
(if q (t)
(f))))
(equal (not p)
(if p (f)
(t)))
(equal (implies p q)
(if p (if q (t)
(f))
(t)))
(equal (fix x)
(if (numberp x)
x
(zero)))
(equal (if (if a b c)
d e)
(if a (if b d e)
(if c d e)))
(equal (zerop x)
(or (equal x (zero))
(not (numberp x))))
(equal (plus (plus x y)
z)
(plus x (plus y z)))
(equal (equal (plus a b)
(zero))
(and (zerop a)
(zerop b)))
(equal (difference x x)
(zero))
(equal (equal (plus a b)
(plus a c))
(equal (fix b)
(fix c)))
(equal (equal (zero)
(difference x y))
(not (lessp y x)))
(equal (equal x (difference x y))
(and (numberp x)
(or (equal x (zero))
(zerop y))))
(equal (meaning (plus-tree (append x y))
a)
(plus (meaning (plus-tree x)
a)
(meaning (plus-tree y)
a)))
(equal (meaning (plus-tree (plus-fringe x))
a)
(fix (meaning x a)))
(equal (append (append x y)
z)
(append x (append y z)))
(equal (reverse (append a b))
(append (reverse b)
(reverse a)))
(equal (times x (plus y z))
(plus (times x y)
(times x z)))
(equal (times (times x y)
z)
(times x (times y z)))
(equal (equal (times x y)
(zero))
(or (zerop x)
(zerop y)))
(equal (exec (append x y)
pds envrn)
(exec y (exec x pds envrn)
envrn))
(equal (mc-flatten x y)
(append (flatten x)
y))
(equal (member x (append a b))
(or (member x a)
(member x b)))
(equal (member x (reverse y))
(member x y))
(equal (length (reverse x))
(length x))
(equal (member a (intersect b c))
(and (member a b)
(member a c)))
(equal (nth (zero)
i)
(zero))
(equal (exp i (plus j k))
(times (exp i j)
(exp i k)))
(equal (exp i (times j k))
(exp (exp i j)
k))
(equal (reverse-loop x y)
(append (reverse x)
y))
(equal (reverse-loop x (nil))
(reverse x))
(equal (count-list z (sort-lp x y))
(plus (count-list z x)
(count-list z y)))
(equal (equal (append a b)
(append a c))
(equal b c))
(equal (plus (remainder x y)
(times y (quotient x y)))
(fix x))
(equal (power-eval (big-plus1 l i base)
base)
(plus (power-eval l base)
i))
(equal (power-eval (big-plus x y i base)
base)
(plus i (plus (power-eval x base)
(power-eval y base))))
(equal (remainder y 1)
(zero))
(equal (lessp (remainder x y)
y)
(not (zerop y)))
(equal (remainder x x)
(zero))
(equal (lessp (quotient i j)
i)
(and (not (zerop i))
(or (zerop j)
(not (equal j 1)))))
(equal (lessp (remainder x y)
x)
(and (not (zerop y))
(not (zerop x))
(not (lessp x y))))
(equal (power-eval (power-rep i base)
base)
(fix i))
(equal (power-eval (big-plus (power-rep i base)
(power-rep j base)
(zero)
base)
base)
(plus i j))
(equal (gcd x y)
(gcd y x))
(equal (nth (append a b)
i)
(append (nth a i)
(nth b (difference i (length a)))))
(equal (difference (plus x y)
x)
(fix y))
(equal (difference (plus y x)
x)
(fix y))
(equal (difference (plus x y)
(plus x z))
(difference y z))
(equal (times x (difference c w))
(difference (times c x)
(times w x)))
(equal (remainder (times x z)
z)
(zero))
(equal (difference (plus b (plus a c))
a)
(plus b c))
(equal (difference (add1 (plus y z))
z)
(add1 y))
(equal (lessp (plus x y)
(plus x z))
(lessp y z))
(equal (lessp (times x z)
(times y z))
(and (not (zerop z))
(lessp x y)))
(equal (lessp y (plus x y))
(not (zerop x)))
(equal (gcd (times x z)
(times y z))
(times z (gcd x y)))
(equal (value (normalize x)
a)
(value x a))
(equal (equal (flatten x)
(cons y (nil)))
(and (nlistp x)
(equal x y)))
(equal (listp (gopher x))
(listp x))
(equal (samefringe x y)
(equal (flatten x)
(flatten y)))
(equal (equal (greatest-factor x y)
(zero))
(and (or (zerop y)
(equal y 1))
(equal x (zero))))
(equal (equal (greatest-factor x y)
1)
(equal x 1))
(equal (numberp (greatest-factor x y))
(not (and (or (zerop y)
(equal y 1))
(not (numberp x)))))
(equal (times-list (append x y))
(times (times-list x)
(times-list y)))
(equal (prime-list (append x y))
(and (prime-list x)
(prime-list y)))
(equal (equal z (times w z))
(and (numberp z)
(or (equal z (zero))
(equal w 1))))
(equal (greatereqp x y)
(not (lessp x y)))
(equal (equal x (times x y))
(or (equal x (zero))
(and (numberp x)
(equal y 1))))
(equal (remainder (times y x)
y)
(zero))
(equal (equal (times a b)
1)
(and (not (equal a (zero)))
(not (equal b (zero)))
(numberp a)
(numberp b)
(equal (_1- a)
(zero))
(equal (_1- b)
(zero))))
(equal (lessp (length (delete x l))
(length l))
(member x l))
(equal (sort2 (delete x l))
(delete x (sort2 l)))
(equal (dsort x)
(sort2 x))
(equal (length (cons x1
(cons x2
(cons x3 (cons x4
(cons x5
(cons x6 x7)))))))
(plus 6 (length x7)))
(equal (difference (add1 (add1 x))
2)
(fix x))
(equal (quotient (plus x (plus x y))
2)
(plus x (quotient y 2)))
(equal (sigma (zero)
i)
(quotient (times i (add1 i))
2))
(equal (plus x (add1 y))
(if (numberp y)
(add1 (plus x y))
(add1 x)))
(equal (equal (difference x y)
(difference z y))
(if (lessp x y)
(not (lessp y z))
(if (lessp z y)
(not (lessp y x))
(equal (fix x)
(fix z)))))
(equal (meaning (plus-tree (delete x y))
a)
(if (member x y)
(difference (meaning (plus-tree y)
a)
(meaning x a))
(meaning (plus-tree y)
a)))
(equal (times x (add1 y))
(if (numberp y)
(plus x (times x y))
(fix x)))
(equal (nth (nil)
i)
(if (zerop i)
(nil)
(zero)))
(equal (last (append a b))
(if (listp b)
(last b)
(if (listp a)
(cons (car (last a))
b)
b)))
(equal (equal (lessp x y)
z)
(if (lessp x y)
(equal (t) z)
(equal (f) z)))
(equal (assignment x (append a b))
(if (assignedp x a)
(assignment x a)
(assignment x b)))
(equal (car (gopher x))
(if (listp x)
(car (flatten x))
(zero)))
(equal (flatten (cdr (gopher x)))
(if (listp x)
(cdr (flatten x))
(cons (zero)
(nil))))
(equal (quotient (times y x)
y)
(if (zerop y)
(zero)
(fix x)))
(equal (get j (set i val mem))
(if (eqp j i)
val
(get j mem)))))))
(define (add-lemma-lst lst)
(cond ((null? lst)
#t)
(else (add-lemma (car lst))
(add-lemma-lst (cdr lst)))))
(define (add-lemma term)
(cond ((and (pair? term)
(eq? (car term)
(quote equal))
(pair? (cadr term)))
(put (car (cadr term))
(quote lemmas)
(cons
(translate-term term)
(get (car (cadr term)) (quote lemmas)))))
(else (error #f "ADD-LEMMA did not like term: " term))))
(define (translate-term term)
(cond ((not (pair? term))
term)
(else (cons (symbol->symbol-record (car term))
(translate-args (cdr term))))))
(define (translate-args lst)
(cond ((null? lst)
'())
(else (cons (translate-term (car lst))
(translate-args (cdr lst))))))
For debugging only , so the use of MAP does not change
the first - order character of the benchmark .
(define (untranslate-term term)
(cond ((not (pair? term))
term)
(else (cons (get-name (car term))
(map untranslate-term (cdr term))))))
A symbol - record is represented as a vector with two fields :
(define (put sym property value)
(put-lemmas! (symbol->symbol-record sym) value))
(define (get sym property)
(get-lemmas (symbol->symbol-record sym)))
(define (symbol->symbol-record sym)
(let ((x (assq sym *symbol-records-alist*)))
(if x
(cdr x)
(let ((r (make-symbol-record sym)))
(set! *symbol-records-alist*
(cons (cons sym r)
*symbol-records-alist*))
r))))
(define *symbol-records-alist* '())
A symbol - record is represented as a vector with two fields :
(define (make-symbol-record sym)
(vector sym '()))
(define (put-lemmas! symbol-record lemmas)
(vector-set! symbol-record 1 lemmas))
(define (get-lemmas symbol-record)
(vector-ref symbol-record 1))
(define (get-name symbol-record)
(vector-ref symbol-record 0))
(define (symbol-record-equal? r1 r2)
(eq? r1 r2))
The second phase .
(define (test alist term n)
(let ((term
(apply-subst
(translate-alist alist)
(translate-term
(do ((term term (list 'or term '(f)))
(n n (- n 1)))
((zero? n) term))))))
(tautp term)))
(define (translate-alist alist)
(cond ((null? alist)
'())
(else (cons (cons (caar alist)
(translate-term (cdar alist)))
(translate-alist (cdr alist))))))
(define (apply-subst alist term)
(cond ((not (pair? term))
(let ((temp-temp (assq term alist)))
(if temp-temp
(cdr temp-temp)
term)))
(else (cons (car term)
(apply-subst-lst alist (cdr term))))))
(define (apply-subst-lst alist lst)
(cond ((null? lst)
'())
(else (cons (apply-subst alist (car lst))
(apply-subst-lst alist (cdr lst))))))
(define (tautp x)
(tautologyp (rewrite x)
'() '()))
(define (tautologyp x true-lst false-lst)
(cond ((truep x true-lst)
#t)
((falsep x false-lst)
#f)
((not (pair? x))
#f)
((eq? (car x) if-constructor)
(cond ((truep (cadr x)
true-lst)
(tautologyp (caddr x)
true-lst false-lst))
((falsep (cadr x)
false-lst)
(tautologyp (cadddr x)
true-lst false-lst))
(else (and (tautologyp (caddr x)
(cons (cadr x)
true-lst)
false-lst)
(tautologyp (cadddr x)
true-lst
(cons (cadr x)
false-lst))))))
(else #f)))
(define (rewrite term)
(set! rewrite-count (+ rewrite-count 1))
(cond ((not (pair? term))
term)
(else (rewrite-with-lemmas (cons (car term)
(rewrite-args (cdr term)))
(get-lemmas (car term))))))
(define (rewrite-args lst)
(cond ((null? lst)
'())
(else (cons (rewrite (car lst))
(rewrite-args (cdr lst))))))
(define (rewrite-with-lemmas term lst)
(cond ((null? lst)
term)
((one-way-unify term (cadr (car lst)))
(rewrite (apply-subst unify-subst (caddr (car lst)))))
(else (rewrite-with-lemmas term (cdr lst)))))
(define unify-subst '*)
(define (one-way-unify term1 term2)
(begin (set! unify-subst '())
(one-way-unify1 term1 term2)))
(define (one-way-unify1 term1 term2)
(cond ((not (pair? term2))
(let ((temp-temp (assq term2 unify-subst)))
(cond (temp-temp
(term-equal? term1 (cdr temp-temp)))
nboyer 10 - 25 % slower !
(else
(set! unify-subst (cons (cons term2 term1)
unify-subst))
#t))))
((not (pair? term1))
#f)
((eq? (car term1)
(car term2))
(one-way-unify1-lst (cdr term1)
(cdr term2)))
(else #f)))
(define (one-way-unify1-lst lst1 lst2)
(cond ((null? lst1)
(null? lst2))
((null? lst2)
#f)
((one-way-unify1 (car lst1)
(car lst2))
(one-way-unify1-lst (cdr lst1)
(cdr lst2)))
(else #f)))
(define (falsep x lst)
(or (term-equal? x false-term)
(term-member? x lst)))
(define (truep x lst)
(or (term-equal? x true-term)
(term-member? x lst)))
The next two procedures were in the original benchmark
(define (trans-of-implies n)
(translate-term
(list (quote implies)
(trans-of-implies1 n)
(list (quote implies)
0 n))))
(define (trans-of-implies1 n)
(cond ((equal? n 1)
(list (quote implies)
0 1))
(else (list (quote and)
(list (quote implies)
(- n 1)
n)
(trans-of-implies1 (- n 1))))))
compared using Scheme 's equal ? and member procedures , so we
(define (term-equal? x y)
(cond ((pair? x)
(and (pair? y)
(symbol-record-equal? (car x) (car y))
(term-args-equal? (cdr x) (cdr y))))
(else (equal? x y))))
(define (term-args-equal? lst1 lst2)
(cond ((null? lst1)
(null? lst2))
((null? lst2)
#f)
((term-equal? (car lst1) (car lst2))
(term-args-equal? (cdr lst1) (cdr lst2)))
(else #f)))
(define (term-member? x lst)
(cond ((null? lst)
#f)
((term-equal? x (car lst))
#t)
(else (term-member? x (cdr lst)))))
(set! setup-boyer
(lambda ()
(set! *symbol-records-alist* '())
(set! if-constructor (symbol->symbol-record 'if))
(set! false-term (translate-term '(f)))
(set! true-term (translate-term '(t)))
(setup)))
(set! test-boyer
(lambda (alist term n)
(set! rewrite-count 0)
(let ((answer (test alist term n)))
(if answer
rewrite-count
#f)))))
|
82fbdc92ccfaf90e4d8ee71f9d4eef16260c238a9a9850c031b6d6953abb3b66 | haskell/haskell-language-server | Main.hs | {-# LANGUAGE BlockArguments #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE TypeOperators #-}
module Main (main) where
import Data.Foldable (find)
import Data.Text (Text)
import qualified Ide.Plugin.QualifyImportedNames as QualifyImportedNames
import System.FilePath ((</>))
import Test.Hls (CodeAction (CodeAction, _title),
Command (Command), IdeState,
MonadIO (liftIO),
PluginDescriptor,
PluginTestDescriptor,
Position (Position),
Range (Range), Session,
TestName, TestTree,
TextDocumentIdentifier,
assertBool, assertFailure,
defaultTestRunner,
executeCodeAction,
getCodeActions,
goldenWithHaskellDoc,
mkPluginTestDescriptor',
openDoc, rename,
runSessionWithServer,
testCase, testGroup,
type (|?) (InR), (@?=))
import Prelude
-- 1's based
data Point = Point {
line :: !Int,
column :: !Int
}
makePoint line column
| line >= 1 && column >= 1 = Point line column
| otherwise = error "Line or column is less than 1."
isNotEmpty :: Foldable f => f a -> Bool
isNotEmpty = not . isEmpty
isEmpty :: Foldable f => f a -> Bool
isEmpty = null
makeCodeActionNotFoundAtString :: Point -> String
makeCodeActionNotFoundAtString Point {..} =
"CodeAction not found at line: " <> show line <> ", column: " <> show column
makeCodeActionFoundAtString :: Point -> String
makeCodeActionFoundAtString Point {..} =
"CodeAction found at line: " <> show line <> ", column: " <> show column
main :: IO ()
main = defaultTestRunner $ testGroup "Qualify Imported Names"
[
testCase "No CodeAction when not at import" $
runSessionWithServer pluginDescriptor testDataDir $ do
let point = makePoint 1 1
document <- openDoc "NoImport.hs" "haskell"
actions <- getCodeActions document $ pointToRange point
liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions)
, testCase "No CodeAction when import is qualified" $
runSessionWithServer pluginDescriptor testDataDir $ do
let point = makePoint 3 1
document <- openDoc "QualifiedImport.hs" "haskell"
actions <- getCodeActions document $ pointToRange point
liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions)
, codeActionGoldenTest
"CodeAction qualifies names with alias if imported module is aliased"
"AliasedImport"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies names with module name if imported module is not aliased"
"UnaliasedImport"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies only names in import's explicit non-hiding list"
"ExplicitImport"
(makePoint 4 1)
, codeActionGoldenTest
"CodeAction qualifies only names outside of import's explicit hiding list"
"ExplicitHidingImport"
(makePoint 4 1)
, codeActionGoldenTest
"CodeAction can qualify names not defined in modules they are imported from"
"Reexported"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction can qualify explicitly imported Prelude"
"ExplicitPrelude"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies only imported names"
"OnlyImportedNames"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies parenthesized operators properly"
"Parenthesized"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies backticked operators properly"
"Backticked"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies parenthesized and backticked operators on the same line properly"
"SameLine"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction doesn't qualify already qualified names"
"NoDoubleQualify"
(makePoint 3 1)
]
codeActionGoldenTest :: TestName -> FilePath -> Point -> TestTree
codeActionGoldenTest testCaseName goldenFilename point =
goldenWithQualifyImportedNames testCaseName goldenFilename $ \document -> do
actions <- getCodeActions document $ pointToRange point
case find ((== Just "Qualify imported names") . getCodeActionTitle) actions of
Just (InR codeAction) -> executeCodeAction codeAction
_ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point
testDataDir :: String
testDataDir = "test" </> "data"
pluginDescriptor :: PluginTestDescriptor ()
pluginDescriptor = mkPluginTestDescriptor' QualifyImportedNames.descriptor "qualifyImportedNames"
getCodeActionTitle :: (Command |? CodeAction) -> Maybe Text
getCodeActionTitle commandOrCodeAction
| InR CodeAction {_title} <- commandOrCodeAction = Just _title
| otherwise = Nothing
goldenWithQualifyImportedNames :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree
goldenWithQualifyImportedNames testName path =
goldenWithHaskellDoc pluginDescriptor testName testDataDir path "expected" "hs"
pointToRange :: Point -> Range
pointToRange Point {..}
| line <- fromIntegral $ subtract 1 line
, column <- fromIntegral $ subtract 1 column =
Range (Position line column) (Position line $ column + 1)
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/aeb57a8eb56964c8666d7cd05b6ba46d531de7c7/plugins/hls-qualify-imported-names-plugin/test/Main.hs | haskell | # LANGUAGE BlockArguments #
# LANGUAGE MultiWayIf #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators #
1's based | # LANGUAGE RecordWildCards #
module Main (main) where
import Data.Foldable (find)
import Data.Text (Text)
import qualified Ide.Plugin.QualifyImportedNames as QualifyImportedNames
import System.FilePath ((</>))
import Test.Hls (CodeAction (CodeAction, _title),
Command (Command), IdeState,
MonadIO (liftIO),
PluginDescriptor,
PluginTestDescriptor,
Position (Position),
Range (Range), Session,
TestName, TestTree,
TextDocumentIdentifier,
assertBool, assertFailure,
defaultTestRunner,
executeCodeAction,
getCodeActions,
goldenWithHaskellDoc,
mkPluginTestDescriptor',
openDoc, rename,
runSessionWithServer,
testCase, testGroup,
type (|?) (InR), (@?=))
import Prelude
data Point = Point {
line :: !Int,
column :: !Int
}
makePoint line column
| line >= 1 && column >= 1 = Point line column
| otherwise = error "Line or column is less than 1."
isNotEmpty :: Foldable f => f a -> Bool
isNotEmpty = not . isEmpty
isEmpty :: Foldable f => f a -> Bool
isEmpty = null
makeCodeActionNotFoundAtString :: Point -> String
makeCodeActionNotFoundAtString Point {..} =
"CodeAction not found at line: " <> show line <> ", column: " <> show column
makeCodeActionFoundAtString :: Point -> String
makeCodeActionFoundAtString Point {..} =
"CodeAction found at line: " <> show line <> ", column: " <> show column
main :: IO ()
main = defaultTestRunner $ testGroup "Qualify Imported Names"
[
testCase "No CodeAction when not at import" $
runSessionWithServer pluginDescriptor testDataDir $ do
let point = makePoint 1 1
document <- openDoc "NoImport.hs" "haskell"
actions <- getCodeActions document $ pointToRange point
liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions)
, testCase "No CodeAction when import is qualified" $
runSessionWithServer pluginDescriptor testDataDir $ do
let point = makePoint 3 1
document <- openDoc "QualifiedImport.hs" "haskell"
actions <- getCodeActions document $ pointToRange point
liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions)
, codeActionGoldenTest
"CodeAction qualifies names with alias if imported module is aliased"
"AliasedImport"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies names with module name if imported module is not aliased"
"UnaliasedImport"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies only names in import's explicit non-hiding list"
"ExplicitImport"
(makePoint 4 1)
, codeActionGoldenTest
"CodeAction qualifies only names outside of import's explicit hiding list"
"ExplicitHidingImport"
(makePoint 4 1)
, codeActionGoldenTest
"CodeAction can qualify names not defined in modules they are imported from"
"Reexported"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction can qualify explicitly imported Prelude"
"ExplicitPrelude"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies only imported names"
"OnlyImportedNames"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies parenthesized operators properly"
"Parenthesized"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies backticked operators properly"
"Backticked"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction qualifies parenthesized and backticked operators on the same line properly"
"SameLine"
(makePoint 3 1)
, codeActionGoldenTest
"CodeAction doesn't qualify already qualified names"
"NoDoubleQualify"
(makePoint 3 1)
]
codeActionGoldenTest :: TestName -> FilePath -> Point -> TestTree
codeActionGoldenTest testCaseName goldenFilename point =
goldenWithQualifyImportedNames testCaseName goldenFilename $ \document -> do
actions <- getCodeActions document $ pointToRange point
case find ((== Just "Qualify imported names") . getCodeActionTitle) actions of
Just (InR codeAction) -> executeCodeAction codeAction
_ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point
testDataDir :: String
testDataDir = "test" </> "data"
pluginDescriptor :: PluginTestDescriptor ()
pluginDescriptor = mkPluginTestDescriptor' QualifyImportedNames.descriptor "qualifyImportedNames"
getCodeActionTitle :: (Command |? CodeAction) -> Maybe Text
getCodeActionTitle commandOrCodeAction
| InR CodeAction {_title} <- commandOrCodeAction = Just _title
| otherwise = Nothing
goldenWithQualifyImportedNames :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree
goldenWithQualifyImportedNames testName path =
goldenWithHaskellDoc pluginDescriptor testName testDataDir path "expected" "hs"
pointToRange :: Point -> Range
pointToRange Point {..}
| line <- fromIntegral $ subtract 1 line
, column <- fromIntegral $ subtract 1 column =
Range (Position line column) (Position line $ column + 1)
|
a2c89358dede9bbfde2b78016f888eb4f4acac31cdd223139ddee6871d8770cb | MinaProtocol/mina | proof.mli | module Base : sig
module Messages_for_next_proof_over_same_field =
Reduced_messages_for_next_proof_over_same_field
module Step : sig
type ( 's
, 'unfinalized_proofs
, 'sgs
, 'bp_chals
, 'messages_for_next_wrap_proof
, 'prev_evals )
t =
{ statement :
( 'unfinalized_proofs
, ('s, 'sgs, 'bp_chals) Messages_for_next_proof_over_same_field.Step.t
, 'messages_for_next_wrap_proof )
Import.Types.Step.Statement.t
; index : int
; prev_evals : 'prev_evals
; proof : Backend.Tick.Proof.t
}
end
module Wrap : sig
[%%versioned:
module Stable : sig
module V2 : sig
type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t =
( 'messages_for_next_wrap_proof
, 'messages_for_next_step_proof )
Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t =
{ statement :
( Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
, Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
Import.Scalar_challenge.Stable.V2.t
, Backend.Tick.Field.Stable.V1.t
Pickles_types.Shifted_value.Type1.Stable.V1.t
, bool
, 'messages_for_next_wrap_proof
, Import.Digest.Constant.Stable.V1.t
, 'messages_for_next_step_proof
, Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
Import.Scalar_challenge.Stable.V2.t
Import.Bulletproof_challenge.Stable.V1.t
Import.Step_bp_vec.Stable.V1.t
, Import.Branch_data.Stable.V1.t )
Import.Types.Wrap.Statement.Minimal.Stable.V1.t
; prev_evals :
( Backend.Tick.Field.Stable.V1.t
, Backend.Tick.Field.Stable.V1.t array )
Pickles_types.Plonk_types.All_evals.Stable.V1.t
; proof : Backend.Tock.Proof.Stable.V2.t
}
[@@deriving compare, sexp, yojson, hash, equal]
end
end]
type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t =
( 'messages_for_next_wrap_proof
, 'messages_for_next_step_proof )
Stable.Latest.t =
{ statement :
( Import.Challenge.Constant.t
, Import.Challenge.Constant.t Import.Scalar_challenge.t
, Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t
, bool
, 'messages_for_next_wrap_proof
, Import.Digest.Constant.t
, 'messages_for_next_step_proof
, Import.Challenge.Constant.t Import.Scalar_challenge.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
, Import.Branch_data.t )
Import.Types.Wrap.Statement.Minimal.t
; prev_evals :
( Backend.Tick.Field.t
, Backend.Tick.Field.t array )
Pickles_types.Plonk_types.All_evals.t
; proof : Backend.Tock.Proof.t
}
[@@deriving compare, sexp, yojson, hash, equal]
end
end
type ('s, 'mlmb, 'c) with_data =
('s, 'mlmb, 'c) Mina_wire_types.Pickles.Concrete_.Proof.with_data =
| T :
( 'mlmb Base.Messages_for_next_proof_over_same_field.Wrap.t
, ( 's
, ( Backend.Tock.Curve.Affine.t
, 'most_recent_width )
Pickles_types.Vector.t
, ( Import.Challenge.Constant.t Import.Scalar_challenge.Stable.Latest.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
, 'most_recent_width )
Pickles_types.Vector.t )
Base.Messages_for_next_proof_over_same_field.Step.t )
Base.Wrap.t
-> ('s, 'mlmb, _) with_data
type ('max_width, 'mlmb) t = (unit, 'mlmb, 'max_width) with_data
val dummy :
'w Pickles_types.Nat.t
-> 'h Pickles_types.Nat.t
-> 'r Pickles_types.Nat.t
-> domain_log2:int
-> ('w, 'h) t
module Make (W : Pickles_types.Nat.Intf) (MLMB : Pickles_types.Nat.Intf) : sig
module Max_proofs_verified_at_most :
module type of Pickles_types.At_most.With_length (W)
module MLMB_vec : module type of Import.Nvector (MLMB)
module Repr : sig
type t =
( ( Backend.Tock.Inner_curve.Affine.t
, Reduced_messages_for_next_proof_over_same_field.Wrap.Challenges_vector
.t
MLMB_vec.t )
Import.Types.Wrap.Proof_state.Messages_for_next_wrap_proof.t
, ( unit
, Backend.Tock.Curve.Affine.t Max_proofs_verified_at_most.t
, Import.Challenge.Constant.t Import.Scalar_challenge.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
Max_proofs_verified_at_most.t )
Base.Messages_for_next_proof_over_same_field.Step.t )
Base.Wrap.t
[@@deriving compare, sexp, yojson, hash, equal]
end
type nonrec t = (W.n, MLMB.n) t [@@deriving compare, sexp, hash, equal]
val to_base64 : t -> string
val of_base64 : string -> (t, string) result
val to_yojson : t -> [> `String of string ]
val to_yojson_full : t Pickles_types.Sigs.jsonable
val of_yojson : [> `String of string ] -> (t, string) result
end
module Proofs_verified_2 : sig
module T : module type of Make (Pickles_types.Nat.N2) (Pickles_types.Nat.N2)
[%%versioned:
module Stable : sig
module V2 : sig
include module type of T with module Repr := T.Repr
include Pickles_types.Sigs.VERSIONED
include Pickles_types.Sigs.Binable.S with type t := t
end
end]
include module type of (T : module type of T with module Repr := T.Repr)
end
module Proofs_verified_max : sig
module T :
module type of
Make
(Side_loaded_verification_key.Width.Max)
(Side_loaded_verification_key.Width.Max)
[%%versioned:
module Stable : sig
module V2 : sig
include module type of T with module Repr := T.Repr
include Pickles_types.Sigs.VERSIONED
include Pickles_types.Sigs.Binable.S with type t := t
end
end]
include module type of (T : module type of T with module Repr := T.Repr)
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/8403887a428f56e91bfdf4187c6b8dc260e5424a/src/lib/pickles/proof.mli | ocaml | module Base : sig
module Messages_for_next_proof_over_same_field =
Reduced_messages_for_next_proof_over_same_field
module Step : sig
type ( 's
, 'unfinalized_proofs
, 'sgs
, 'bp_chals
, 'messages_for_next_wrap_proof
, 'prev_evals )
t =
{ statement :
( 'unfinalized_proofs
, ('s, 'sgs, 'bp_chals) Messages_for_next_proof_over_same_field.Step.t
, 'messages_for_next_wrap_proof )
Import.Types.Step.Statement.t
; index : int
; prev_evals : 'prev_evals
; proof : Backend.Tick.Proof.t
}
end
module Wrap : sig
[%%versioned:
module Stable : sig
module V2 : sig
type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t =
( 'messages_for_next_wrap_proof
, 'messages_for_next_step_proof )
Mina_wire_types.Pickles.Concrete_.Proof.Base.Wrap.V2.t =
{ statement :
( Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
, Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
Import.Scalar_challenge.Stable.V2.t
, Backend.Tick.Field.Stable.V1.t
Pickles_types.Shifted_value.Type1.Stable.V1.t
, bool
, 'messages_for_next_wrap_proof
, Import.Digest.Constant.Stable.V1.t
, 'messages_for_next_step_proof
, Limb_vector.Constant.Hex64.Stable.V1.t
Pickles_types.Vector.Vector_2.Stable.V1.t
Import.Scalar_challenge.Stable.V2.t
Import.Bulletproof_challenge.Stable.V1.t
Import.Step_bp_vec.Stable.V1.t
, Import.Branch_data.Stable.V1.t )
Import.Types.Wrap.Statement.Minimal.Stable.V1.t
; prev_evals :
( Backend.Tick.Field.Stable.V1.t
, Backend.Tick.Field.Stable.V1.t array )
Pickles_types.Plonk_types.All_evals.Stable.V1.t
; proof : Backend.Tock.Proof.Stable.V2.t
}
[@@deriving compare, sexp, yojson, hash, equal]
end
end]
type ('messages_for_next_wrap_proof, 'messages_for_next_step_proof) t =
( 'messages_for_next_wrap_proof
, 'messages_for_next_step_proof )
Stable.Latest.t =
{ statement :
( Import.Challenge.Constant.t
, Import.Challenge.Constant.t Import.Scalar_challenge.t
, Backend.Tick.Field.t Pickles_types.Shifted_value.Type1.t
, bool
, 'messages_for_next_wrap_proof
, Import.Digest.Constant.t
, 'messages_for_next_step_proof
, Import.Challenge.Constant.t Import.Scalar_challenge.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
, Import.Branch_data.t )
Import.Types.Wrap.Statement.Minimal.t
; prev_evals :
( Backend.Tick.Field.t
, Backend.Tick.Field.t array )
Pickles_types.Plonk_types.All_evals.t
; proof : Backend.Tock.Proof.t
}
[@@deriving compare, sexp, yojson, hash, equal]
end
end
type ('s, 'mlmb, 'c) with_data =
('s, 'mlmb, 'c) Mina_wire_types.Pickles.Concrete_.Proof.with_data =
| T :
( 'mlmb Base.Messages_for_next_proof_over_same_field.Wrap.t
, ( 's
, ( Backend.Tock.Curve.Affine.t
, 'most_recent_width )
Pickles_types.Vector.t
, ( Import.Challenge.Constant.t Import.Scalar_challenge.Stable.Latest.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
, 'most_recent_width )
Pickles_types.Vector.t )
Base.Messages_for_next_proof_over_same_field.Step.t )
Base.Wrap.t
-> ('s, 'mlmb, _) with_data
type ('max_width, 'mlmb) t = (unit, 'mlmb, 'max_width) with_data
val dummy :
'w Pickles_types.Nat.t
-> 'h Pickles_types.Nat.t
-> 'r Pickles_types.Nat.t
-> domain_log2:int
-> ('w, 'h) t
module Make (W : Pickles_types.Nat.Intf) (MLMB : Pickles_types.Nat.Intf) : sig
module Max_proofs_verified_at_most :
module type of Pickles_types.At_most.With_length (W)
module MLMB_vec : module type of Import.Nvector (MLMB)
module Repr : sig
type t =
( ( Backend.Tock.Inner_curve.Affine.t
, Reduced_messages_for_next_proof_over_same_field.Wrap.Challenges_vector
.t
MLMB_vec.t )
Import.Types.Wrap.Proof_state.Messages_for_next_wrap_proof.t
, ( unit
, Backend.Tock.Curve.Affine.t Max_proofs_verified_at_most.t
, Import.Challenge.Constant.t Import.Scalar_challenge.t
Import.Bulletproof_challenge.t
Import.Step_bp_vec.t
Max_proofs_verified_at_most.t )
Base.Messages_for_next_proof_over_same_field.Step.t )
Base.Wrap.t
[@@deriving compare, sexp, yojson, hash, equal]
end
type nonrec t = (W.n, MLMB.n) t [@@deriving compare, sexp, hash, equal]
val to_base64 : t -> string
val of_base64 : string -> (t, string) result
val to_yojson : t -> [> `String of string ]
val to_yojson_full : t Pickles_types.Sigs.jsonable
val of_yojson : [> `String of string ] -> (t, string) result
end
module Proofs_verified_2 : sig
module T : module type of Make (Pickles_types.Nat.N2) (Pickles_types.Nat.N2)
[%%versioned:
module Stable : sig
module V2 : sig
include module type of T with module Repr := T.Repr
include Pickles_types.Sigs.VERSIONED
include Pickles_types.Sigs.Binable.S with type t := t
end
end]
include module type of (T : module type of T with module Repr := T.Repr)
end
module Proofs_verified_max : sig
module T :
module type of
Make
(Side_loaded_verification_key.Width.Max)
(Side_loaded_verification_key.Width.Max)
[%%versioned:
module Stable : sig
module V2 : sig
include module type of T with module Repr := T.Repr
include Pickles_types.Sigs.VERSIONED
include Pickles_types.Sigs.Binable.S with type t := t
end
end]
include module type of (T : module type of T with module Repr := T.Repr)
end
| |
5755cd50b94c7f40fc5ee03d128c948013662950198c9e130d1def9e6a821ad1 | sbcl/sbcl | stubs.lisp | ;;;; miscellaneous primitive stubs (ordinary FDEFINITIONs for full
;;;; call defined in terms of fundamental definitions of inline
;;;; expansions)
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB-IMPL")
(defun (cas symbol-value) (old new symbol)
(cas (symbol-value symbol) old new))
(defun (cas svref) (old new vector index)
(cas (svref vector index) old new))
#+(or ppc64 x86-64)
(macrolet ((def (name)
`(defun (cas ,name) (old new sap index)
(funcall #'(cas ,name) old new sap index))))
(def sb-sys:sap-ref-8)
(def sb-sys:sap-ref-16)
(def sb-sys:sap-ref-32)
(def sb-sys:sap-ref-64)
(def sb-sys:signed-sap-ref-64)
(def sb-sys:sap-ref-sap)
(def sb-sys:sap-ref-lispobj))
(macrolet ((def (name &rest args)
`(defun ,name ,args
(,name ,@args))))
(def word-logical-not x)
(def word-logical-and x y)
(def word-logical-or x y)
(def word-logical-xor x y)
(def word-logical-nor x y)
(def word-logical-eqv x y)
(def word-logical-nand x y)
(def word-logical-andc1 x y)
(def word-logical-andc2 x y)
(def word-logical-orc1 x y)
(def word-logical-orc2 x y))
(macrolet ((def (name &optional (args '(x)))
`(defun ,name ,args
(,@(if (listp name) `(funcall #',name) `(,name)) ,@args)))
(def* (&rest defs)
`(progn ,@(mapcar (lambda (x) `(def ,@x)) defs))))
SAP arithmetic and accessors
(def sap< (x y))
(def sap<= (x y))
(def sap= (x y))
(def sap>= (x y))
(def sap> (x y))
(def sap+ (sap offset))
(def sap- (x y))
(def sap-int)
(def int-sap)
(macrolet ((def-accessor (name)
;; the low-level %SET functions should not need stubs
`(progn (def (setf ,name) (value sap offset))
(def ,name (sap offset)))))
(def-accessor sap-ref-8)
(def-accessor sap-ref-16)
(def-accessor sap-ref-32)
(def-accessor sap-ref-64)
(def-accessor sap-ref-word)
(def-accessor signed-sap-ref-8)
(def-accessor signed-sap-ref-16)
(def-accessor signed-sap-ref-32)
(def-accessor signed-sap-ref-64)
(def-accessor signed-sap-ref-word)
(def-accessor sap-ref-sap)
(def-accessor sap-ref-lispobj)
(def-accessor sap-ref-single)
(def-accessor sap-ref-double))
(def %byte-blt (src src-start dst dst-start dst-end))
(def shift-towards-start (number count))
(def shift-towards-end (number count))
(def get-header-data)
(def set-header-data (x val))
(def widetag-of)
(def %other-pointer-widetag)
(def pointer-hash)
(def vector-sap)
(def binding-stack-pointer-sap ())
#+cheneygc (def dynamic-space-free-pointer ())
(def control-stack-pointer-sap ())
(def sb-c:safe-fdefn-fun)
(def %fun-pointer-widetag)
(def %closure-fun)
(def %closure-index-ref (closure index))
(def fdefn-name)
(def fdefn-fun)
(def fdefn-makunbound)
(def sb-c::vector-length)
(def make-array-header (type rank))
(def code-instructions)
#-untagged-fdefns (def code-header-ref (code-obj index))
(def %vector-raw-bits (object offset))
(def %set-vector-raw-bits (object offset value))
(def single-float-bits)
(def double-float-high-bits)
#+64-bit
(def double-float-bits)
(def double-float-low-bits)
(def value-cell-ref)
(def %caller-frame ())
(def %caller-pc ())
(def %code-debug-info)
(def sb-vm::%code-fixups)
#+x86-64 (def pointerp)
(def sb-bignum:%bignum-length)
;; instances
(def %make-instance) ; Allocate a new instance with X data slots.
(def %make-instance/mixed)
(def %instance-length) ; Given an instance, return its length.
(def %instance-layout)
(def %instance-wrapper)
(def %set-instance-layout (instance new-value))
; (def %instance-ref (instance index)) ; defined in 'target-defstruct'
(def %instance-set (instance index new-value))
;; funcallable instances
(def %make-funcallable-instance)
(def %fun-layout)
(def %fun-wrapper)
(def %set-fun-layout (fin new-value))
(def %funcallable-instance-fun)
(def (setf %funcallable-instance-fun) (fin new-value))
(def %funcallable-instance-info (fin i))
#+compact-instance-header (progn (def wrapper-of)
(def %instanceoid-layout))
;; lists
(def %rplaca (x val))
(def %rplacd (x val))
#+compare-and-swap-vops
(def* (%array-atomic-incf/word (array index diff))
(%raw-instance-atomic-incf/word (instance index diff)))
#+sb-simd-pack
(def* (%make-simd-pack (tag low high))
(%make-simd-pack-single (x y z w))
(%make-simd-pack-double (low high))
(%make-simd-pack-ub64 (low high))
(%simd-pack-tag))
#+sb-simd-pack-256
(def* (%make-simd-pack-256 (tag p0 p1 p2 p3))
(%make-simd-pack-256-single (a b c d e f g h))
(%make-simd-pack-256-double (a b c d))
(%make-simd-pack-256-ub64 (a b c d))
(%simd-pack-256-tag))
#+sb-thread (def sb-vm::current-thread-offset-sap)
(def current-sp ())
(def current-fp ())
(def stack-ref (s n))
(def fun-code-header)
(def symbol-package-id)
(def symbol-hash)
(def symbol-%info) ; primitive reader always needs a stub
;; but the "wrapped" reader might not need a stub.
;; If it's already a proper function, then it doesn't.
#.(if (fboundp 'symbol-dbinfo) (values) '(def symbol-dbinfo))
#-(or x86 x86-64) (def lra-code-header)
(def %make-lisp-obj)
(def get-lisp-obj-address)
#+x86-64
(def single-float-copysign (float float2))
#+x86-64
(def single-float-sign))
#+sb-simd-pack
(macrolet ((def (name)
`(defun ,name (pack)
(sb-vm::simd-pack-dispatch pack
(,name pack)))))
(def %simd-pack-low)
(def %simd-pack-high))
#+sb-simd-pack-256
(macrolet ((def (name)
`(defun ,name (pack)
(sb-vm::simd-pack-256-dispatch pack
(,name pack)))))
(def %simd-pack-256-0)
(def %simd-pack-256-1)
(def %simd-pack-256-2)
(def %simd-pack-256-3))
(defun spin-loop-hint ()
"Hints the processor that the current thread is spin-looping."
(spin-loop-hint))
;;; The stub for sb-c::%structure-is-a should really use layout-id in the same way
that the vop does , however , because the all 64 - bit architectures other than
;;; x86-64 need to use with-pinned-objects to extract a layout-id, it is cheaper not to.
I should add a vop for uint32 access to raw slots .
(defun sb-c::%structure-is-a (object-layout test-layout)
(or (eq object-layout test-layout)
(let ((depthoid (wrapper-depthoid test-layout))
(inherits (wrapper-inherits object-layout)))
(and (> (length inherits) depthoid)
(eq (svref inherits depthoid) test-layout)))))
(defun sb-c::structure-typep (object test-layout)
(and (%instancep object)
(let ((object-layout (%instance-layout object)))
(or (eq object-layout test-layout)
(let ((depthoid (wrapper-depthoid test-layout))
(inherits (wrapper-inherits object-layout)))
(and (> (length inherits) depthoid)
(eq (svref inherits depthoid) test-layout)))))))
(defun %other-pointer-subtype-p (x choices)
(and (%other-pointer-p x)
(member (%other-pointer-widetag x) choices)
t))
#+x86-64
(defun symbol-hash* (x satisfies)
(declare (explicit-check)) ; actually, not
(declare (ignore satisfies))
(symbol-hash* x nil))
TYPECASE could expand to contain a call to this function .
;;; The interpreter can ignore it, it is just compiler magic.
(defun sb-c::%type-constraint (var type)
(declare (ignore var type))
nil)
(eval-when (:compile-toplevel)
Defining % TYPE - CONSTRAINT issues a full warning because TYPE 's type
;; is (OR TYPE-SPECIFIER CTYPE), and TYPE-SPECIFIER is
;; (OR LIST SYMBOL CLASSOID CLASS), and CLASS isn't known, and you can't
;; define it because it's a standard symbol.
(setq sb-c::*undefined-warnings* nil))
| null | https://raw.githubusercontent.com/sbcl/sbcl/01674dd5e771680564602b923385c10d7dee1abc/src/code/stubs.lisp | lisp | miscellaneous primitive stubs (ordinary FDEFINITIONs for full
call defined in terms of fundamental definitions of inline
expansions)
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
the low-level %SET functions should not need stubs
instances
Allocate a new instance with X data slots.
Given an instance, return its length.
(def %instance-ref (instance index)) ; defined in 'target-defstruct'
funcallable instances
lists
primitive reader always needs a stub
but the "wrapped" reader might not need a stub.
If it's already a proper function, then it doesn't.
The stub for sb-c::%structure-is-a should really use layout-id in the same way
x86-64 need to use with-pinned-objects to extract a layout-id, it is cheaper not to.
actually, not
The interpreter can ignore it, it is just compiler magic.
is (OR TYPE-SPECIFIER CTYPE), and TYPE-SPECIFIER is
(OR LIST SYMBOL CLASSOID CLASS), and CLASS isn't known, and you can't
define it because it's a standard symbol. |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB-IMPL")
(defun (cas symbol-value) (old new symbol)
(cas (symbol-value symbol) old new))
(defun (cas svref) (old new vector index)
(cas (svref vector index) old new))
#+(or ppc64 x86-64)
(macrolet ((def (name)
`(defun (cas ,name) (old new sap index)
(funcall #'(cas ,name) old new sap index))))
(def sb-sys:sap-ref-8)
(def sb-sys:sap-ref-16)
(def sb-sys:sap-ref-32)
(def sb-sys:sap-ref-64)
(def sb-sys:signed-sap-ref-64)
(def sb-sys:sap-ref-sap)
(def sb-sys:sap-ref-lispobj))
(macrolet ((def (name &rest args)
`(defun ,name ,args
(,name ,@args))))
(def word-logical-not x)
(def word-logical-and x y)
(def word-logical-or x y)
(def word-logical-xor x y)
(def word-logical-nor x y)
(def word-logical-eqv x y)
(def word-logical-nand x y)
(def word-logical-andc1 x y)
(def word-logical-andc2 x y)
(def word-logical-orc1 x y)
(def word-logical-orc2 x y))
(macrolet ((def (name &optional (args '(x)))
`(defun ,name ,args
(,@(if (listp name) `(funcall #',name) `(,name)) ,@args)))
(def* (&rest defs)
`(progn ,@(mapcar (lambda (x) `(def ,@x)) defs))))
SAP arithmetic and accessors
(def sap< (x y))
(def sap<= (x y))
(def sap= (x y))
(def sap>= (x y))
(def sap> (x y))
(def sap+ (sap offset))
(def sap- (x y))
(def sap-int)
(def int-sap)
(macrolet ((def-accessor (name)
`(progn (def (setf ,name) (value sap offset))
(def ,name (sap offset)))))
(def-accessor sap-ref-8)
(def-accessor sap-ref-16)
(def-accessor sap-ref-32)
(def-accessor sap-ref-64)
(def-accessor sap-ref-word)
(def-accessor signed-sap-ref-8)
(def-accessor signed-sap-ref-16)
(def-accessor signed-sap-ref-32)
(def-accessor signed-sap-ref-64)
(def-accessor signed-sap-ref-word)
(def-accessor sap-ref-sap)
(def-accessor sap-ref-lispobj)
(def-accessor sap-ref-single)
(def-accessor sap-ref-double))
(def %byte-blt (src src-start dst dst-start dst-end))
(def shift-towards-start (number count))
(def shift-towards-end (number count))
(def get-header-data)
(def set-header-data (x val))
(def widetag-of)
(def %other-pointer-widetag)
(def pointer-hash)
(def vector-sap)
(def binding-stack-pointer-sap ())
#+cheneygc (def dynamic-space-free-pointer ())
(def control-stack-pointer-sap ())
(def sb-c:safe-fdefn-fun)
(def %fun-pointer-widetag)
(def %closure-fun)
(def %closure-index-ref (closure index))
(def fdefn-name)
(def fdefn-fun)
(def fdefn-makunbound)
(def sb-c::vector-length)
(def make-array-header (type rank))
(def code-instructions)
#-untagged-fdefns (def code-header-ref (code-obj index))
(def %vector-raw-bits (object offset))
(def %set-vector-raw-bits (object offset value))
(def single-float-bits)
(def double-float-high-bits)
#+64-bit
(def double-float-bits)
(def double-float-low-bits)
(def value-cell-ref)
(def %caller-frame ())
(def %caller-pc ())
(def %code-debug-info)
(def sb-vm::%code-fixups)
#+x86-64 (def pointerp)
(def sb-bignum:%bignum-length)
(def %make-instance/mixed)
(def %instance-layout)
(def %instance-wrapper)
(def %set-instance-layout (instance new-value))
(def %instance-set (instance index new-value))
(def %make-funcallable-instance)
(def %fun-layout)
(def %fun-wrapper)
(def %set-fun-layout (fin new-value))
(def %funcallable-instance-fun)
(def (setf %funcallable-instance-fun) (fin new-value))
(def %funcallable-instance-info (fin i))
#+compact-instance-header (progn (def wrapper-of)
(def %instanceoid-layout))
(def %rplaca (x val))
(def %rplacd (x val))
#+compare-and-swap-vops
(def* (%array-atomic-incf/word (array index diff))
(%raw-instance-atomic-incf/word (instance index diff)))
#+sb-simd-pack
(def* (%make-simd-pack (tag low high))
(%make-simd-pack-single (x y z w))
(%make-simd-pack-double (low high))
(%make-simd-pack-ub64 (low high))
(%simd-pack-tag))
#+sb-simd-pack-256
(def* (%make-simd-pack-256 (tag p0 p1 p2 p3))
(%make-simd-pack-256-single (a b c d e f g h))
(%make-simd-pack-256-double (a b c d))
(%make-simd-pack-256-ub64 (a b c d))
(%simd-pack-256-tag))
#+sb-thread (def sb-vm::current-thread-offset-sap)
(def current-sp ())
(def current-fp ())
(def stack-ref (s n))
(def fun-code-header)
(def symbol-package-id)
(def symbol-hash)
#.(if (fboundp 'symbol-dbinfo) (values) '(def symbol-dbinfo))
#-(or x86 x86-64) (def lra-code-header)
(def %make-lisp-obj)
(def get-lisp-obj-address)
#+x86-64
(def single-float-copysign (float float2))
#+x86-64
(def single-float-sign))
#+sb-simd-pack
(macrolet ((def (name)
`(defun ,name (pack)
(sb-vm::simd-pack-dispatch pack
(,name pack)))))
(def %simd-pack-low)
(def %simd-pack-high))
#+sb-simd-pack-256
(macrolet ((def (name)
`(defun ,name (pack)
(sb-vm::simd-pack-256-dispatch pack
(,name pack)))))
(def %simd-pack-256-0)
(def %simd-pack-256-1)
(def %simd-pack-256-2)
(def %simd-pack-256-3))
(defun spin-loop-hint ()
"Hints the processor that the current thread is spin-looping."
(spin-loop-hint))
that the vop does , however , because the all 64 - bit architectures other than
I should add a vop for uint32 access to raw slots .
(defun sb-c::%structure-is-a (object-layout test-layout)
(or (eq object-layout test-layout)
(let ((depthoid (wrapper-depthoid test-layout))
(inherits (wrapper-inherits object-layout)))
(and (> (length inherits) depthoid)
(eq (svref inherits depthoid) test-layout)))))
(defun sb-c::structure-typep (object test-layout)
(and (%instancep object)
(let ((object-layout (%instance-layout object)))
(or (eq object-layout test-layout)
(let ((depthoid (wrapper-depthoid test-layout))
(inherits (wrapper-inherits object-layout)))
(and (> (length inherits) depthoid)
(eq (svref inherits depthoid) test-layout)))))))
(defun %other-pointer-subtype-p (x choices)
(and (%other-pointer-p x)
(member (%other-pointer-widetag x) choices)
t))
#+x86-64
(defun symbol-hash* (x satisfies)
(declare (ignore satisfies))
(symbol-hash* x nil))
TYPECASE could expand to contain a call to this function .
(defun sb-c::%type-constraint (var type)
(declare (ignore var type))
nil)
(eval-when (:compile-toplevel)
Defining % TYPE - CONSTRAINT issues a full warning because TYPE 's type
(setq sb-c::*undefined-warnings* nil))
|
d73bfcdffc6e26e747d97e1e26a25df245c2fc24a59001ef0b0b15e1c3d11002 | erlang/otp | snmpm_mpd.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2004 - 2023 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(snmpm_mpd).
-export([init/1,
process_msg/7, process_msg/6,
generate_msg/5, generate_response_msg/4,
next_msg_id/0,
next_req_id/0,
reset/1,
inc/1]).
-define(SNMP_USE_V3, true).
-include("snmp_types.hrl").
-include("snmpm_internal.hrl").
-include("SNMP-MPD-MIB.hrl").
-include("SNMPv2-TM.hrl").
-define(VMODULE,"MPD").
-include("snmp_verbosity.hrl").
-define(empty_msg_size, 24).
-record(state, {v1 = false, v2c = false, v3 = false}).
%%%-----------------------------------------------------------------
This module implements the Message Processing and Dispatch part of
%%% the multi-lingual SNMP agent.
%%%
The MPD is responsible for :
%%% *) call the security module (auth/priv).
* ) decoding the message into a PDU .
* ) decide a suitable Access Control Model , and provide it with
%%% the data it needs.
%%% *) maintaining SNMP counters.
%%%
%%% In order to take care of the different versions of counters, it
%%% implements and maintains the union of all SNMP counters (i.e. from
%%% rfc1213 and from rfc1907). It is up to the administrator of the
agent to load the correct MIB . Note that this module implements
%%% the counters only, it does not provide instrumentation functions
%%% for the counters.
%%%
%%% With the terms defined in rfc2271, this module implements part
of the and the Message Processing functionality .
%%%-----------------------------------------------------------------
init(Vsns) ->
?vdebug("init -> entry with ~p", [Vsns]),
?SNMP_RAND_SEED(),
%% rand:seed(exrop,
%% {erlang:phash2([node()]),
: monotonic_time ( ) ,
: unique_integer ( ) } ) ,
snmpm_config:cre_counter(msg_id, rand:uniform(2147483647)),
snmpm_config:cre_counter(req_id, rand:uniform(2147483647)),
init_counters(),
State = init_versions(Vsns, #state{}),
init_usm(State#state.v3),
?vtrace("init -> done when ~p", [State]),
State.
reset(#state{v3 = V3}) ->
reset_counters(),
reset_usm(V3).
%%-----------------------------------------------------------------
Func : process_msg(Packet , TDomain , TAddress , State ) - >
{ ok , SnmpVsn , Pdu , PduMS , ACMData } | { discarded , Reason }
%% Types: Packet = binary()
= snmpUDPDomain | atom ( )
%% TAddress = {Ip, Udp}
%% State = #state
%% Purpose: This is the main Message Dispatching function. (see
section 4.2.1 in rfc2272 )
%%-----------------------------------------------------------------
process_msg(Msg, Domain, Ip, Port, State, NoteStore, Logger) ->
process_msg(Msg, Domain, {Ip, Port}, State, NoteStore, Logger).
process_msg(Msg, Domain, Addr, State, NoteStore, Logger) ->
inc(snmpInPkts),
case (catch snmp_pdus:dec_message_only(binary_to_list(Msg))) of
%% Version 1
#message{version = 'version-1', vsn_hdr = Community, data = Data}
when State#state.v1 =:= true ->
HS = ?empty_msg_size + length(Community),
process_v1_v2c_msg(
'version-1', NoteStore, Msg, Domain, Addr,
Community, Data, HS, Logger);
Version 2
#message{version = 'version-2', vsn_hdr = Community, data = Data}
when State#state.v2c =:= true ->
HS = ?empty_msg_size + length(Community),
process_v1_v2c_msg(
'version-2', NoteStore, Msg, Domain, Addr,
Community, Data, HS, Logger);
Version 3
#message{version = 'version-3', vsn_hdr = H, data = Data}
when State#state.v3 =:= true ->
?vlog("v3:"
"~n msgID: ~p"
"~n msgFlags: ~p"
"~n msgSecModel: ~p",
[H#v3_hdr.msgID,H#v3_hdr.msgFlags,H#v3_hdr.msgSecurityModel]),
process_v3_msg(NoteStore, Msg, H, Data, Addr, Logger);
%% Crap
{'EXIT', {bad_version, Vsn}} ->
?vinfo("exit: bad version: ~p",[Vsn]),
inc(snmpInBadVersions),
{discarded, snmpInBadVersions};
%% More crap
{'EXIT', Reason} ->
?vinfo("exit: ~p",[Reason]),
inc(snmpInASNParseErrs),
{discarded, Reason};
%% Really crap
Crap ->
?vinfo("unknown message: "
"~n ~p",[Crap]),
inc(snmpInBadVersions),
{discarded, snmpInBadVersions}
end.
%%-----------------------------------------------------------------
%% Handles a Community based message (v1 or v2c).
%%-----------------------------------------------------------------
process_v1_v2c_msg(
Vsn, _NoteStore, Msg, Domain, Addr, Community, Data, HS, Log) ->
?vdebug("process_v1_v2c_msg -> entry with"
"~n Vsn: ~p"
"~n Domain: ~p"
"~n Addr: ~p"
"~n Community: ~p"
"~n HS: ~p", [Vsn, Domain, Addr, Community, HS]),
{TDomain, TAddress} =
try
{snmp_conf:mk_tdomain(Domain),
snmp_conf:mk_taddress(Domain, Addr)}
catch
throw:{error, TReason} ->
throw({discarded, {badarg, Domain, TReason}})
end,
Max = get_max_message_size(),
AgentMax = get_agent_max_message_size(Domain, Addr),
PduMS = pdu_ms(Max, AgentMax, HS),
?vtrace("process_v1_v2c_msg -> PduMS: ~p", [PduMS]),
case (catch snmp_pdus:dec_pdu(Data)) of
Pdu when is_record(Pdu, pdu) ->
?vtrace("process_v1_v2c_msg -> was a pdu", []),
Log(Msg),
inc_snmp_in(Pdu),
MsgData = {Community, sec_model(Vsn), TDomain, TAddress},
{ok, Vsn, Pdu, PduMS, MsgData};
Trap when is_record(Trap, trappdu) ->
?vtrace("process_v1_v2c_msg -> was a trap", []),
Log(Msg),
inc_snmp_in(Trap),
MsgData = {Community, sec_model(Vsn), TDomain, TAddress},
{ok, Vsn, Trap, PduMS, MsgData};
{'EXIT', Reason} ->
?vlog("process_v1_v2c_msg -> failed decoding PDU: "
"~n Reason: ~p", [Reason]),
inc(snmpInASNParseErrs),
{discarded, Reason}
end.
pdu_ms(MgrMMS, AgentMMS, HS) when AgentMMS < MgrMMS ->
AgentMMS - HS;
pdu_ms(MgrMMS, _AgentMMS, HS) ->
MgrMMS - HS.
sec_model('version-1') -> ?SEC_V1;
sec_model('version-2') -> ?SEC_V2C.
%%-----------------------------------------------------------------
%% Handles a SNMPv3 Message, following the procedures in rfc2272,
section 4.2 and 7.2
%%-----------------------------------------------------------------
process_v3_msg(NoteStore, Msg, Hdr, Data, Address, Log) ->
?vdebug(
"process_v3_msg -> entry with~n"
" Hdr: ~p~n"
" Address: ~p", [Hdr, Address]),
%% 7.2.3
#v3_hdr{msgID = MsgID,
msgMaxSize = MMS,
msgFlags = MsgFlags,
msgSecurityModel = MsgSecModel,
msgSecurityParameters = SecParams,
hdr_size = HdrSize} = Hdr,
7.2.4
SecModule = get_security_module(MsgSecModel),
?vtrace("process_v3_msg -> 7.2.4: "
"~n SecModule: ~p", [SecModule]),
7.2.5
SecLevel = check_sec_level(MsgFlags),
IsReportable = is_reportable(MsgFlags),
?vtrace("process_v3_msg -> 7.2.5: "
"~n SecLevel: ~p"
"~n IsReportable: ~p", [SecLevel, IsReportable]),
%% 7.2.6
SecRes = (catch SecModule:process_incoming_msg(Msg, Data,
SecParams, SecLevel)),
?vtrace("process_v3_msg -> 7.2.6 - message processing result: "
"~n ~p",[SecRes]),
{SecEngineID, SecName, ScopedPDUBytes, SecData} =
check_sec_module_result(SecRes, Hdr, Data, IsReportable, Log),
?vtrace("process_v3_msg -> 7.2.6 - checked module result: "
"~n SecEngineID: ~p"
"~n SecName: ~p",[SecEngineID, SecName]),
7.2.7
#scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = PDU} =
case (catch snmp_pdus:dec_scoped_pdu(ScopedPDUBytes)) of
ScopedPDU when is_record(ScopedPDU, scopedPdu) ->
ScopedPDU;
{'EXIT', Reason} ->
?vlog("failed decoding scoped pdu: "
"~n ~p",[Reason]),
inc(snmpInASNParseErrs),
discard(Reason)
end,
?vlog("7.2.7"
"~n ContextEngineID: ~p "
"~n context: \"~s\" ",
[CtxEngineID, CtxName]),
if
encrypted message - log decrypted pdu
Log({Hdr, ScopedPDUBytes});
true -> % otherwise, log binary
Log(Msg)
end,
%% Make sure a get_bulk doesn't get too big.
MgrMMS = get_max_message_size(),
%% PduMMS is supposed to be the maximum total length of the response
PDU we can send . From the MMS , we need to subtract everything before
the PDU , i.e. Message and ScopedPDU .
Message : [ 48 , TotalLen , , [ Tag , LH , ] , [ Tag , LM , MsgSec ] , Data ]
1 3 < ----------- HdrSize ----------- >
HdrSize = everything up to and including msgSecurityParameters .
%% ScopedPduData follows. This is
[ Tag , , [ Tag , L1 , CtxName ] , [ Tag , L2 , CtxEID ] ]
i.e. 6 + length(CtxName ) + length(CtxEID )
%%
Total : 1 + TotalLenOctets + 3 + ScopedPduDataLen
TotMMS = tot_mms(MgrMMS, MMS),
TotalLenOctets = snmp_pdus:get_encoded_length(TotMMS - 1),
PduMMS = TotMMS - TotalLenOctets - 10 - HdrSize -
length(CtxName) - length(CtxEngineID),
?vtrace("process_v3_msg -> PduMMS = ~p", [PduMMS]),
Type = PDU#pdu.type,
?vdebug("process_v3_msg -> PDU type: ~p",[Type]),
case Type of
report ->
7.2.10 & 11
%% BMK BMK BMK: discovery?
Note = snmp_note_store:get_note(NoteStore, MsgID),
case Note of
{SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, _ReqId} ->
?vtrace("process_v3_msg -> 7.2.11b: ok", []),
%% BMK BMK: Should we discard the cached info
%% BMK BMK: or do we let the gc deal with it?
{ok, 'version-3', PDU, PduMMS, ok};
_ when is_tuple(Note) ->
?vlog("process_v3_msg -> 7.2.11b: error"
"~n Note: ~p"
"~n SecEngineID: ~p"
"~n MsgSecModel: ~p"
"~n SecName: ~p"
"~n SecLevel: ~p"
"~n CtxEngineID: ~p"
"~n CtxName: ~p",
[Note,
SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName]),
Recv = {SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, PDU#pdu.request_id},
Err = sec_error(Note, Recv),
ACM = {invalid_sec_info, Err},
ReqId = element(tuple_size(Note), Note),
{ok, 'version-3', PDU, PduMMS, {error, ReqId, ACM}};
_NoFound ->
?vtrace("process_v3_msg -> _NoFound: "
"~p", [_NoFound]),
inc(snmpUnknownPDUHandlers),
discard({no_outstanding_req, MsgID})
end;
'get-response' ->
7.2.10 & 12
case snmp_note_store:get_note(NoteStore, MsgID) of
{SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, _} ->
7.2.12.d
{ok, 'version-3', PDU, PduMMS, undefined};
_ ->
%% 7.2.12.b
%% BMK BMK: Should we not discard the cached info??
inc(snmpUnknownPDUHandlers),
discard({no_outstanding_req, MsgID})
end;
'snmpv2-trap' ->
%% 7.2.14
{ok, 'version-3', PDU, PduMMS, undefined};
'inform-request' ->
7.2.13
SnmpEngineID = get_engine_id(),
case SecEngineID of
7.2.13.b
?vtrace("7.2.13d - valid securityEngineID: ~p",
[SecEngineID]),
4.2.2.1.1 - we do n't handle proxys yet = > we only
%% handle CtxEngineID to ourselves
%% Check that we actually know of an agent with this
%% CtxEngineID and Address
case is_known_engine_id(CtxEngineID, Address) of
true ->
?vtrace("and the agent EngineID (~p) "
"is know to us", [CtxEngineID]),
Uses ACMData that snmpm_acm knows of .
ACMData =
{MsgID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData},
{ok, 'version-3', PDU, PduMMS, ACMData};
UnknownEngineID ->
?vtrace("4.2.2.1.2 - UnknownEngineId: ~p",
[UnknownEngineID]),
4.2.2.1.2
NIsReportable = snmp_misc:is_reportable_pdu(Type),
Val = inc(snmpUnknownPDUHandlers),
ErrorInfo =
{#varbind{oid = ?snmpUnknownPDUHandlers,
variabletype = 'Counter32',
value = Val},
SecName,
[{securityLevel, SecLevel},
{contextEngineID, CtxEngineID},
{contextName, CtxName}]},
case generate_v3_report_msg(MsgID,
MsgSecModel,
Data,
ErrorInfo,
Log) of
{ok, Report} when NIsReportable =:= true ->
discard(snmpUnknownPDUHandlers, Report);
_ ->
discard(snmpUnknownPDUHandlers)
end
end;
_ -> % 7.2.13.a
?vinfo("7.2.13a - invalid securityEngineID: ~p",
[SecEngineID]),
discard({badSecurityEngineID, SecEngineID})
end;
_ ->
7.2.13 - This would be the requests which we should not
%% receive since we are a manager, barring possible
%% proxy...
discard(Type)
end.
sec_error(T1, T2)
when tuple_size(T1) =:= tuple_size(T2) ->
Tags = {sec_engine_id, msg_sec_model, sec_name, sec_level,
ctx_engine_id, ctx_name, request_id},
sec_error(tuple_size(T1), T1, T2, Tags, []);
sec_error(T1, T2) ->
[{internal_error, T1, T2}].
sec_error(0, _T1, _T2, _Tags, Acc) ->
Acc;
sec_error(Idx, T1, T2, Tags, Acc) ->
case element(Idx, T1) =:= element(Idx, T2) of
true ->
sec_error(Idx - 1, T1, T2, Tags, Acc);
false ->
Elem = {element(Idx, Tags), element(Idx, T1), element(Idx, T2)},
sec_error(Idx - 1, T1, T2, Tags, [Elem|Acc])
end.
tot_mms(MgrMMS, AgentMMS) when MgrMMS > AgentMMS -> AgentMMS;
tot_mms(MgrMMS, _AgentMMS) -> MgrMMS.
get_security_module(?SEC_USM) ->
snmpm_usm;
get_security_module(_) ->
inc(snmpUnknownSecurityModels),
discard(snmpUnknownSecurityModels).
check_sec_level([MsgFlag]) ->
SecLevel = MsgFlag band 3,
if
SecLevel == 2 ->
inc(snmpInvalidMsgs),
discard(snmpInvalidMsgs);
true ->
SecLevel
end;
check_sec_level(_Unknown) ->
inc(snmpInvalidMsgs),
discard(snmpInvalidMsgs).
is_reportable([MsgFlag]) ->
4 == (MsgFlag band 4).
check_sec_module_result({ok, X}, _, _, _, _) ->
X;
check_sec_module_result({error, Reason, Info}, _, _, _, _)
when is_list(Info) ->
%% case 7.2.6 b
discard({securityError, Reason, Info});
check_sec_module_result({error, Reason, ErrorInfo}, V3Hdr, Data, true, Log) ->
%% case 7.2.6 a
?vtrace("security module result:"
"~n Reason: ~p"
"~n ErrorInfo: ~p", [Reason, ErrorInfo]),
#v3_hdr{msgID = MsgID, msgSecurityModel = MsgSecModel} = V3Hdr,
Pdu = get_scoped_pdu(Data),
case generate_v3_report_msg(MsgID, MsgSecModel, Pdu, ErrorInfo, Log) of
{ok, Report} ->
discard({securityError, Reason}, Report);
{discarded, _SomeOtherReason} ->
discard({securityError, Reason})
end;
check_sec_module_result({error, Reason, _ErrorInfo}, _, _, _, _) ->
?vtrace("security module result:"
"~n Reason: ~p"
"~n _ErrorInfo: ~p", [Reason, _ErrorInfo]),
discard({securityError, Reason});
check_sec_module_result(Res, _, _, _, _) ->
?vtrace("security module result:"
"~n Res: ~p", [Res]),
discard({securityError, Res}).
get_scoped_pdu(D) when is_list(D) ->
(catch snmp_pdus:dec_scoped_pdu(D));
get_scoped_pdu(D) ->
D.
%%-----------------------------------------------------------------
%% Generate a message
%%-----------------------------------------------------------------
generate_msg('version-3', NoteStore, Pdu,
{SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName}, Log) ->
generate_v3_msg(NoteStore, Pdu,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log);
generate_msg(Vsn, _NoteStore, Pdu, {Comm, _SecModel}, Log) ->
generate_v1_v2c_msg(Vsn, Pdu, Comm, Log).
generate_v3_msg(NoteStore, Pdu,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log) ->
%% rfc2272: 7.1.6
?vdebug("generate_v3_msg -> 7.1.6", []),
ScopedPDU = #scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = Pdu},
case (catch snmp_pdus:enc_scoped_pdu(ScopedPDU)) of
{'EXIT', Reason} ->
user_err("failed encoding scoped pdu "
"~n pdu: ~w"
"~n contextName: ~w"
"~n reason: ~w", [Pdu, CtxName, Reason]),
{discarded, Reason};
ScopedPDUBytes ->
{ok, generate_v3_msg(NoteStore, Pdu, ScopedPDUBytes,
SecModel, SecName, SecLevel,
CtxEngineID, CtxName, TargetName, Log)}
end.
generate_v3_msg(NoteStore,
#pdu{type = Type} = Pdu, ScopedPduBytes,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log) ->
7.1.7
?vdebug("generate_v3_msg -> 7.1.7", []),
MsgID = next_msg_id(),
MsgFlags = snmp_misc:mk_msg_flags(Type, SecLevel),
V3Hdr = #v3_hdr{msgID = MsgID,
msgMaxSize = get_max_message_size(),
msgFlags = MsgFlags,
msgSecurityModel = SecModel},
Message = #message{version = 'version-3',
vsn_hdr = V3Hdr,
data = ScopedPduBytes},
SecModule = sec_module(SecModel),
7.1.9a
?vdebug("generate_v3_msg -> 7.1.9a", []),
SecEngineID = sec_engine_id(TargetName),
?vtrace("SecEngineID: ~p", [SecEngineID]),
%% 7.1.9b
?vdebug("generate_v3_msg -> 7.1.9b", []),
case generate_v3_outgoing_msg(Message, SecModule, SecEngineID,
SecName, [], SecLevel) of
{ok, Packet} ->
7.1.9c
Store in cache for 150 sec .
?vdebug("generate_v3_msg -> 7.1.9c", []),
%% The request id is just in the case when we receive a
%% report with incorrect securityModel and/or securityLevel
CacheVal = {SecEngineID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, Pdu#pdu.request_id},
snmp_note_store:set_note(NoteStore, 1500, MsgID, CacheVal),
Log(Packet),
inc_snmp_out(Pdu),
?vdebug("generate_v3_msg -> done", []),
Packet;
Error ->
throw(Error)
end.
sec_module(?SEC_USM) ->
snmpm_usm.
9 ) If the PDU is a GetRequest - PDU , GetNextRequest - PDU ,
GetBulkRequest - PDU , SetRequest - PDU , InformRequest - PDU , or or
%% SNMPv2-Trap-PDU, then
%%
a ) If the PDU is an SNMPv2 - Trap - PDU , then securityEngineID is set
%% to the value of this entity's snmpEngineID.
%%
%% Otherwise, the snmpEngineID of the target entity is determined,
%% in an implementation-dependent manner, possibly using
transportDomain and transportAddress . The value of
%% securityEngineID is set to the value of the target entity's
%% snmpEngineID.
%%
%% As we never send traps, the SecEngineID is always the
%% snmpEngineID of the target entity!
sec_engine_id(TargetName) ->
case get_agent_engine_id(TargetName) of
{ok, EngineId} ->
EngineId;
_ ->
config_err("Can't find engineID for "
"snmpTargetAddrName ~p", [TargetName]),
this will trigger error in secmodule
""
end.
%% BMK BMK BMK
%% This one looks very similar to link generate_v1_v2c_response_msg!
Common / shared ? Should there be differences ?
%%
generate_v1_v2c_msg(Vsn, Pdu, Community, Log) ->
?vdebug("generate_v1_v2c_msg -> encode pdu", []),
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu: "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Community, Reason]),
{discarded, Reason};
PduBytes ->
MMS = get_max_message_size(),
Message = #message{version = Vsn,
vsn_hdr = Community,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Community, Reason]),
{discarded, Reason};
{ok, Packet} when byte_size(Packet) =< MMS ->
Log(Packet),
inc_snmp_out(Pdu),
{ok, Packet};
{ok, Packet} ->
?vlog("packet max size exceeded: "
"~n MMS: ~p"
"~n Len: ~p",
[MMS, byte_size(Packet)]),
{discarded, tooBig}
end
end.
%% -----------------------------------------------------------------------
generate_response_msg('version-3', Pdu,
{MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData}, Log) ->
generate_v3_response_msg(Pdu, MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log);
generate_response_msg(Vsn, Pdu, {Comm, _SecModel}, Log) ->
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log);
generate_response_msg(Vsn, Pdu, {Comm, _SecModel, _TDomain, _TAddress}, Log) ->
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log).
generate_v3_response_msg(#pdu{type = Type} = Pdu, MsgID,
SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log) ->
rfc2272 : 7.1 steps 6 - 8
ScopedPdu = #scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = Pdu},
case (catch snmp_pdus:enc_scoped_pdu(ScopedPdu)) of
{'EXIT', Reason} ->
user_err("failed encoded scoped pdu "
"(pdu: ~w, contextName: ~w): ~n~w",
[Pdu, CtxName, Reason]),
{discarded, Reason};
ScopedPduBytes ->
MMS = get_max_message_size(),
MsgFlags = snmp_misc:mk_msg_flags(Type, SecLevel),
V3Hdr = #v3_hdr{msgID = MsgID,
msgMaxSize = MMS,
msgFlags = MsgFlags,
msgSecurityModel = SecModel},
Message = #message{version = 'version-3',
vsn_hdr = V3Hdr,
data = ScopedPduBytes},
%% We know that the security model is valid when we
%% generate a response.
SecModule = sec_module(SecModel),
SecEngineID = get_engine_id(),
case generate_v3_outgoing_msg(Message, SecModule, SecEngineID,
SecName, SecData, SecLevel) of
%% Check the packet size. Send the msg even
%% if it's larger than the agent can handle -
%% it will be dropped. Just check against the
%% internal size.
{ok, Packet} when byte_size(Packet) =< MMS ->
if
SecLevel == 3 ->
encrypted - log decrypted pdu
Log({V3Hdr, ScopedPduBytes});
true ->
%% otherwise log the entire msg
Log(Packet)
end,
inc_snmp_out(Pdu),
{ok, Packet};
{ok, _Packet} when Pdu#pdu.error_status =:= tooBig ->
?vlog("packet max size exceeded (tooBog): "
"~n MMS: ~p", [MMS]),
inc(snmpSilentDrops),
{discarded, tooBig};
{ok, _Packet} ->
?vlog("packet max size exceeded: "
"~n MMS: ~p", [MMS]),
TooBigPdu = Pdu#pdu{error_status = tooBig,
error_index = 0,
varbinds = []},
generate_v3_response_msg(TooBigPdu, MsgID,
SecModel, SecName, SecLevel,
CtxEngineID,
CtxName,
SecData, Log);
Error ->
Error
end
end.
generate_v3_outgoing_msg(Message,
SecModule, SecEngineID, SecName, SecData, SecLevel) ->
case (catch SecModule:generate_outgoing_msg(Message,
SecEngineID,
SecName, SecData,
SecLevel)) of
{'EXIT', Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
{discarded, Reason};
{error, Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
{discarded, Reason};
Bin when is_binary(Bin) ->
{ok, Bin};
OutMsg when is_list(OutMsg) ->
case (catch list_to_binary(OutMsg)) of
Bin when is_binary(Bin) ->
{ok, Bin};
{'EXIT', Reason} ->
{error, Reason}
end
end.
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log) ->
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu: "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
PduBytes ->
MMS = get_max_message_size(),
Message = #message{version = Vsn,
vsn_hdr = Comm,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message only "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
{ok, Packet} when byte_size(Packet) =< MMS ->
Log(Packet),
inc_snmp_out(Pdu),
{ok, Packet};
{ok, Packet} -> %% Too big
too_big(Vsn, Pdu, Comm, MMS, byte_size(Packet), Log)
end
end.
too_big('version-1' = Vsn, #pdu{type = 'get-response'} = Pdu,
Comm, _MMS, _Len, Log) ->
%% In v1, the varbinds should be identical to the incoming
%% request. It isn't identical now! Make acceptable (?)
%% approximation.
V = set_vb_null(Pdu#pdu.varbinds),
TooBigPdu = Pdu#pdu{error_status = tooBig, error_index = 0, varbinds = V},
too_big(Vsn, TooBigPdu, Comm, Log);
too_big('version-2' = Vsn, #pdu{type = 'get-response'} = Pdu,
Comm, _MMS, _Len, Log) ->
%% In v2, varbinds should be empty (reasonable!)
TooBigPdu = Pdu#pdu{error_status = tooBig, error_index = 0, varbinds = []},
too_big(Vsn, TooBigPdu, Comm, Log);
too_big(_Vsn, Pdu, _Comm, _Log, MMS, Len) ->
user_err("encoded pdu, ~p bytes, exceeded "
"max message size of ~p bytes. Pdu: ~n~w",
[Len, MMS, Pdu]),
{discarded, tooBig}.
too_big(Vsn, Pdu, Comm, Log) ->
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
PduBytes ->
Message = #message{version = Vsn,
vsn_hdr = Comm,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message only"
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
{ok, Bin} ->
Log(Bin),
inc_snmp_out(Pdu),
{ok, Bin}
end
end.
set_vb_null(Vbs) ->
[Vb#varbind{variabletype = 'NULL', value = 'NULL'} || Vb <- Vbs].
generate_v1_v2c_outgoing_msg(Message) ->
?vdebug("generate_v1_v2c_outgoing_msg -> encode message", []),
case (catch snmp_pdus:enc_message_only(Message)) of
{'EXIT', Reason} ->
{error, Reason};
Bin when is_binary(Bin) ->
{ok, Bin};
Packet when is_list(Packet) ->
case (catch list_to_binary(Packet)) of
Bin when is_binary(Bin) ->
{ok, Bin};
{'EXIT', Reason} ->
{error, Reason}
end
end.
generate_v3_report_msg(MsgID, SecModel, ScopedPdu, ErrInfo, Log)
when is_record(ScopedPdu, scopedPdu) ->
ReqID = (ScopedPdu#scopedPdu.data)#pdu.request_id,
generate_v3_report_msg2(MsgID, ReqID, SecModel, ErrInfo, Log);
generate_v3_report_msg(MsgID, SecModel, _, ErrInfo, Log) ->
%% RFC2572, 7.1.3.c.4
generate_v3_report_msg2(MsgID, 0, SecModel, ErrInfo, Log).
generate_v3_report_msg2(MsgID, ReqID, SecModel, ErrInfo, Log) ->
{Varbind, SecName, Opts} = ErrInfo,
Pdu = #pdu{type = report,
request_id = ReqID,
error_status = noError,
error_index = 0,
varbinds = [Varbind]},
SecLevel = snmp_misc:get_option(securityLevel, Opts, 0),
CtxEngineID = snmp_misc:get_option(contextEngineID, Opts, get_engine_id()),
CtxName = snmp_misc:get_option(contextName, Opts, ""),
SecData = snmp_misc:get_option(sec_data, Opts, []),
generate_v3_response_msg(Pdu,
MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log).
%%-----------------------------------------------------------------
Get " our " ( manager ) MMS
get_max_message_size() ->
case snmpm_config:get_engine_max_message_size() of
{ok, MMS} ->
MMS;
E ->
user_err("failed retrieving engine max message size: ~w", [E]),
484
end.
%% The the MMS of the agent
get_agent_max_message_size(Domain, Addr) ->
case snmpm_config:get_agent_engine_max_message_size(Domain, Addr) of
{ok, MMS} ->
MMS;
_Error ->
TAddr = fun(TN) ->
case snmpm_config:agent_info(TN, taddress) of
{ok, TA} ->
TA;
{error, _} ->
undefined
end
end,
KnownAgents =
[{TargetName, TAddr(TargetName)} ||
TargetName <- snmpm_config:which_agents()],
?vlog("[agent engine max msg size lookup] unknown agent: ~s"
"~n Known Agents: ~p",
[snmp_conf:mk_addr_string({Domain, Addr}), KnownAgents]),
get_max_message_size()
end.
%% get_agent_max_message_size(Addr, Port) ->
case snmpm_config : get_agent_engine_max_message_size(Addr , Port ) of
%% {ok, MMS} ->
%% MMS;
%% _Error ->
? agent : ~w:~w " , [ Addr , Port ] ) ,
%% get_max_message_size()
%% end.
%% Get "our" (manager) engine id
get_engine_id() ->
case snmpm_config:get_engine_id() of
{ok, Id} ->
Id;
_Error ->
""
end.
%% The engine id of the agent
get_agent_engine_id(Name) ->
snmpm_config:get_agent_engine_id(Name).
is_known_engine_id(EngineID, {Addr, Port}) ->
snmpm_config:is_known_engine_id(EngineID, Addr, Port).
%%-----------------------------------------------------------------
%% Sequence number (msg-id & req-id) functions
%%-----------------------------------------------------------------
next_msg_id() ->
next_id(msg_id).
next_req_id() ->
next_id(req_id).
next_id(Id) ->
snmpm_config:incr_counter(Id, 1).
%%-----------------------------------------------------------------
%% Version(s) functions
%%-----------------------------------------------------------------
init_versions([], S) ->
S;
init_versions([v1|Vsns], S) ->
init_versions(Vsns, S#state{v1 = true});
init_versions([v2|Vsns], S) ->
init_versions(Vsns, S#state{v2c = true});
init_versions([v3|Vsns], S) ->
init_versions(Vsns, S#state{v3 = true}).
init_usm(true) ->
snmpm_usm:init();
init_usm(_) ->
ok.
%%-----------------------------------------------------------------
%% Counter functions
%%-----------------------------------------------------------------
init_counters() ->
F = fun(Counter) -> maybe_create_counter(Counter) end,
lists:map(F, counters()).
reset_counters() ->
F = fun(Counter) -> snmpm_config:reset_stats_counter(Counter) end,
lists:map(F, counters()).
reset_usm(true) ->
snmpm_usm:reset();
reset_usm(_) ->
ok.
maybe_create_counter(Counter) ->
snmpm_config:maybe_cre_stats_counter(Counter, 0).
counters() ->
[snmpInPkts,
snmpOutPkts,
snmpInBadVersions,
snmpInBadCommunityNames,
snmpInBadCommunityUses,
snmpInASNParseErrs,
snmpInTooBigs,
snmpInNoSuchNames,
snmpInBadValues,
snmpInReadOnlys,
snmpInGenErrs,
snmpInTotalReqVars,
snmpInTotalSetVars,
snmpInGetRequests,
snmpInGetNexts,
snmpInSetRequests,
snmpInGetResponses,
snmpInTraps,
snmpOutTooBigs,
snmpOutNoSuchNames,
snmpOutBadValues,
snmpOutGenErrs,
snmpOutGetRequests,
snmpOutGetNexts,
snmpOutSetRequests,
snmpOutGetResponses,
snmpOutTraps,
snmpSilentDrops,
snmpProxyDrops,
From SNMP - MPD - MIB
snmpUnknownSecurityModels,
snmpInvalidMsgs,
snmpUnknownPDUHandlers
].
%%-----------------------------------------------------------------
%% inc(VariableName) increments the variable (Counter) in
the local mib . ( e.g. snmpInPkts )
%%-----------------------------------------------------------------
inc(Name) -> inc(Name, 1).
inc(Name, N) -> snmpm_config:incr_stats_counter(Name, N).
inc_snmp_in(#pdu{type = Type}) ->
inc_in_type(Type);
inc_snmp_in(TrapPdu) when is_record(TrapPdu, trappdu) ->
inc(snmpInPkts),
inc(snmpInTraps).
inc_snmp_out(#pdu{type = Type,
error_status = ErrorStatus}) ->
inc(snmpOutPkts),
inc_out_err(ErrorStatus),
inc_out_type(Type).
inc_out_type('get-request') -> inc(snmpOutGetRequests);
inc_out_type('get-next-request') -> inc(snmpOutGetNexts);
inc_out_type('set-request') -> inc(snmpOutSetRequests);
inc_out_type(_) -> ok.
inc_out_err(genErr) -> inc(snmpOutGenErrs);
inc_out_err(tooBig) -> inc(snmpOutTooBigs);
inc_out_err(noSuchName) -> inc(snmpOutNoSuchNames);
inc_out_err(badValue) -> inc(snmpOutBadValues);
inc_out_err(_) -> ok.
inc_in_type('get-response') -> inc(snmpInGetResponses);
inc_in_type(_) -> ok.
%%-----------------------------------------------------------------
discard(Reason) ->
throw({discarded, Reason}).
discard(Reason, Report) ->
throw({discarded, Reason, Report}).
user_err(F, A) ->
error_msg("USER ERROR: " ++ F ++ "~n", A).
config_err(F, A) ->
error_msg("CONFIG ERROR: " ++ F ++ "~n", A).
error_msg(F, A) ->
?snmpm_error("MPD: " ++ F, A).
| null | https://raw.githubusercontent.com/erlang/otp/2b397d7e5580480dc32fa9751db95f4b89ff029e/lib/snmp/src/manager/snmpm_mpd.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-----------------------------------------------------------------
the multi-lingual SNMP agent.
*) call the security module (auth/priv).
the data it needs.
*) maintaining SNMP counters.
In order to take care of the different versions of counters, it
implements and maintains the union of all SNMP counters (i.e. from
rfc1213 and from rfc1907). It is up to the administrator of the
the counters only, it does not provide instrumentation functions
for the counters.
With the terms defined in rfc2271, this module implements part
-----------------------------------------------------------------
rand:seed(exrop,
{erlang:phash2([node()]),
-----------------------------------------------------------------
Types: Packet = binary()
TAddress = {Ip, Udp}
State = #state
Purpose: This is the main Message Dispatching function. (see
-----------------------------------------------------------------
Version 1
Crap
More crap
Really crap
-----------------------------------------------------------------
Handles a Community based message (v1 or v2c).
-----------------------------------------------------------------
-----------------------------------------------------------------
Handles a SNMPv3 Message, following the procedures in rfc2272,
-----------------------------------------------------------------
7.2.3
7.2.6
otherwise, log binary
Make sure a get_bulk doesn't get too big.
PduMMS is supposed to be the maximum total length of the response
ScopedPduData follows. This is
BMK BMK BMK: discovery?
BMK BMK: Should we discard the cached info
BMK BMK: or do we let the gc deal with it?
7.2.12.b
BMK BMK: Should we not discard the cached info??
7.2.14
handle CtxEngineID to ourselves
Check that we actually know of an agent with this
CtxEngineID and Address
7.2.13.a
receive since we are a manager, barring possible
proxy...
case 7.2.6 b
case 7.2.6 a
-----------------------------------------------------------------
Generate a message
-----------------------------------------------------------------
rfc2272: 7.1.6
7.1.9b
The request id is just in the case when we receive a
report with incorrect securityModel and/or securityLevel
SNMPv2-Trap-PDU, then
to the value of this entity's snmpEngineID.
Otherwise, the snmpEngineID of the target entity is determined,
in an implementation-dependent manner, possibly using
securityEngineID is set to the value of the target entity's
snmpEngineID.
As we never send traps, the SecEngineID is always the
snmpEngineID of the target entity!
BMK BMK BMK
This one looks very similar to link generate_v1_v2c_response_msg!
-----------------------------------------------------------------------
We know that the security model is valid when we
generate a response.
Check the packet size. Send the msg even
if it's larger than the agent can handle -
it will be dropped. Just check against the
internal size.
otherwise log the entire msg
Too big
In v1, the varbinds should be identical to the incoming
request. It isn't identical now! Make acceptable (?)
approximation.
In v2, varbinds should be empty (reasonable!)
RFC2572, 7.1.3.c.4
-----------------------------------------------------------------
The the MMS of the agent
get_agent_max_message_size(Addr, Port) ->
{ok, MMS} ->
MMS;
_Error ->
get_max_message_size()
end.
Get "our" (manager) engine id
The engine id of the agent
-----------------------------------------------------------------
Sequence number (msg-id & req-id) functions
-----------------------------------------------------------------
-----------------------------------------------------------------
Version(s) functions
-----------------------------------------------------------------
-----------------------------------------------------------------
Counter functions
-----------------------------------------------------------------
-----------------------------------------------------------------
inc(VariableName) increments the variable (Counter) in
-----------------------------------------------------------------
----------------------------------------------------------------- | Copyright Ericsson AB 2004 - 2023 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(snmpm_mpd).
-export([init/1,
process_msg/7, process_msg/6,
generate_msg/5, generate_response_msg/4,
next_msg_id/0,
next_req_id/0,
reset/1,
inc/1]).
-define(SNMP_USE_V3, true).
-include("snmp_types.hrl").
-include("snmpm_internal.hrl").
-include("SNMP-MPD-MIB.hrl").
-include("SNMPv2-TM.hrl").
-define(VMODULE,"MPD").
-include("snmp_verbosity.hrl").
-define(empty_msg_size, 24).
-record(state, {v1 = false, v2c = false, v3 = false}).
This module implements the Message Processing and Dispatch part of
The MPD is responsible for :
* ) decoding the message into a PDU .
* ) decide a suitable Access Control Model , and provide it with
agent to load the correct MIB . Note that this module implements
of the and the Message Processing functionality .
init(Vsns) ->
?vdebug("init -> entry with ~p", [Vsns]),
?SNMP_RAND_SEED(),
: monotonic_time ( ) ,
: unique_integer ( ) } ) ,
snmpm_config:cre_counter(msg_id, rand:uniform(2147483647)),
snmpm_config:cre_counter(req_id, rand:uniform(2147483647)),
init_counters(),
State = init_versions(Vsns, #state{}),
init_usm(State#state.v3),
?vtrace("init -> done when ~p", [State]),
State.
reset(#state{v3 = V3}) ->
reset_counters(),
reset_usm(V3).
Func : process_msg(Packet , TDomain , TAddress , State ) - >
{ ok , SnmpVsn , Pdu , PduMS , ACMData } | { discarded , Reason }
= snmpUDPDomain | atom ( )
section 4.2.1 in rfc2272 )
process_msg(Msg, Domain, Ip, Port, State, NoteStore, Logger) ->
process_msg(Msg, Domain, {Ip, Port}, State, NoteStore, Logger).
process_msg(Msg, Domain, Addr, State, NoteStore, Logger) ->
inc(snmpInPkts),
case (catch snmp_pdus:dec_message_only(binary_to_list(Msg))) of
#message{version = 'version-1', vsn_hdr = Community, data = Data}
when State#state.v1 =:= true ->
HS = ?empty_msg_size + length(Community),
process_v1_v2c_msg(
'version-1', NoteStore, Msg, Domain, Addr,
Community, Data, HS, Logger);
Version 2
#message{version = 'version-2', vsn_hdr = Community, data = Data}
when State#state.v2c =:= true ->
HS = ?empty_msg_size + length(Community),
process_v1_v2c_msg(
'version-2', NoteStore, Msg, Domain, Addr,
Community, Data, HS, Logger);
Version 3
#message{version = 'version-3', vsn_hdr = H, data = Data}
when State#state.v3 =:= true ->
?vlog("v3:"
"~n msgID: ~p"
"~n msgFlags: ~p"
"~n msgSecModel: ~p",
[H#v3_hdr.msgID,H#v3_hdr.msgFlags,H#v3_hdr.msgSecurityModel]),
process_v3_msg(NoteStore, Msg, H, Data, Addr, Logger);
{'EXIT', {bad_version, Vsn}} ->
?vinfo("exit: bad version: ~p",[Vsn]),
inc(snmpInBadVersions),
{discarded, snmpInBadVersions};
{'EXIT', Reason} ->
?vinfo("exit: ~p",[Reason]),
inc(snmpInASNParseErrs),
{discarded, Reason};
Crap ->
?vinfo("unknown message: "
"~n ~p",[Crap]),
inc(snmpInBadVersions),
{discarded, snmpInBadVersions}
end.
process_v1_v2c_msg(
Vsn, _NoteStore, Msg, Domain, Addr, Community, Data, HS, Log) ->
?vdebug("process_v1_v2c_msg -> entry with"
"~n Vsn: ~p"
"~n Domain: ~p"
"~n Addr: ~p"
"~n Community: ~p"
"~n HS: ~p", [Vsn, Domain, Addr, Community, HS]),
{TDomain, TAddress} =
try
{snmp_conf:mk_tdomain(Domain),
snmp_conf:mk_taddress(Domain, Addr)}
catch
throw:{error, TReason} ->
throw({discarded, {badarg, Domain, TReason}})
end,
Max = get_max_message_size(),
AgentMax = get_agent_max_message_size(Domain, Addr),
PduMS = pdu_ms(Max, AgentMax, HS),
?vtrace("process_v1_v2c_msg -> PduMS: ~p", [PduMS]),
case (catch snmp_pdus:dec_pdu(Data)) of
Pdu when is_record(Pdu, pdu) ->
?vtrace("process_v1_v2c_msg -> was a pdu", []),
Log(Msg),
inc_snmp_in(Pdu),
MsgData = {Community, sec_model(Vsn), TDomain, TAddress},
{ok, Vsn, Pdu, PduMS, MsgData};
Trap when is_record(Trap, trappdu) ->
?vtrace("process_v1_v2c_msg -> was a trap", []),
Log(Msg),
inc_snmp_in(Trap),
MsgData = {Community, sec_model(Vsn), TDomain, TAddress},
{ok, Vsn, Trap, PduMS, MsgData};
{'EXIT', Reason} ->
?vlog("process_v1_v2c_msg -> failed decoding PDU: "
"~n Reason: ~p", [Reason]),
inc(snmpInASNParseErrs),
{discarded, Reason}
end.
pdu_ms(MgrMMS, AgentMMS, HS) when AgentMMS < MgrMMS ->
AgentMMS - HS;
pdu_ms(MgrMMS, _AgentMMS, HS) ->
MgrMMS - HS.
sec_model('version-1') -> ?SEC_V1;
sec_model('version-2') -> ?SEC_V2C.
section 4.2 and 7.2
process_v3_msg(NoteStore, Msg, Hdr, Data, Address, Log) ->
?vdebug(
"process_v3_msg -> entry with~n"
" Hdr: ~p~n"
" Address: ~p", [Hdr, Address]),
#v3_hdr{msgID = MsgID,
msgMaxSize = MMS,
msgFlags = MsgFlags,
msgSecurityModel = MsgSecModel,
msgSecurityParameters = SecParams,
hdr_size = HdrSize} = Hdr,
7.2.4
SecModule = get_security_module(MsgSecModel),
?vtrace("process_v3_msg -> 7.2.4: "
"~n SecModule: ~p", [SecModule]),
7.2.5
SecLevel = check_sec_level(MsgFlags),
IsReportable = is_reportable(MsgFlags),
?vtrace("process_v3_msg -> 7.2.5: "
"~n SecLevel: ~p"
"~n IsReportable: ~p", [SecLevel, IsReportable]),
SecRes = (catch SecModule:process_incoming_msg(Msg, Data,
SecParams, SecLevel)),
?vtrace("process_v3_msg -> 7.2.6 - message processing result: "
"~n ~p",[SecRes]),
{SecEngineID, SecName, ScopedPDUBytes, SecData} =
check_sec_module_result(SecRes, Hdr, Data, IsReportable, Log),
?vtrace("process_v3_msg -> 7.2.6 - checked module result: "
"~n SecEngineID: ~p"
"~n SecName: ~p",[SecEngineID, SecName]),
7.2.7
#scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = PDU} =
case (catch snmp_pdus:dec_scoped_pdu(ScopedPDUBytes)) of
ScopedPDU when is_record(ScopedPDU, scopedPdu) ->
ScopedPDU;
{'EXIT', Reason} ->
?vlog("failed decoding scoped pdu: "
"~n ~p",[Reason]),
inc(snmpInASNParseErrs),
discard(Reason)
end,
?vlog("7.2.7"
"~n ContextEngineID: ~p "
"~n context: \"~s\" ",
[CtxEngineID, CtxName]),
if
encrypted message - log decrypted pdu
Log({Hdr, ScopedPDUBytes});
Log(Msg)
end,
MgrMMS = get_max_message_size(),
PDU we can send . From the MMS , we need to subtract everything before
the PDU , i.e. Message and ScopedPDU .
Message : [ 48 , TotalLen , , [ Tag , LH , ] , [ Tag , LM , MsgSec ] , Data ]
1 3 < ----------- HdrSize ----------- >
HdrSize = everything up to and including msgSecurityParameters .
[ Tag , , [ Tag , L1 , CtxName ] , [ Tag , L2 , CtxEID ] ]
i.e. 6 + length(CtxName ) + length(CtxEID )
Total : 1 + TotalLenOctets + 3 + ScopedPduDataLen
TotMMS = tot_mms(MgrMMS, MMS),
TotalLenOctets = snmp_pdus:get_encoded_length(TotMMS - 1),
PduMMS = TotMMS - TotalLenOctets - 10 - HdrSize -
length(CtxName) - length(CtxEngineID),
?vtrace("process_v3_msg -> PduMMS = ~p", [PduMMS]),
Type = PDU#pdu.type,
?vdebug("process_v3_msg -> PDU type: ~p",[Type]),
case Type of
report ->
7.2.10 & 11
Note = snmp_note_store:get_note(NoteStore, MsgID),
case Note of
{SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, _ReqId} ->
?vtrace("process_v3_msg -> 7.2.11b: ok", []),
{ok, 'version-3', PDU, PduMMS, ok};
_ when is_tuple(Note) ->
?vlog("process_v3_msg -> 7.2.11b: error"
"~n Note: ~p"
"~n SecEngineID: ~p"
"~n MsgSecModel: ~p"
"~n SecName: ~p"
"~n SecLevel: ~p"
"~n CtxEngineID: ~p"
"~n CtxName: ~p",
[Note,
SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName]),
Recv = {SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, PDU#pdu.request_id},
Err = sec_error(Note, Recv),
ACM = {invalid_sec_info, Err},
ReqId = element(tuple_size(Note), Note),
{ok, 'version-3', PDU, PduMMS, {error, ReqId, ACM}};
_NoFound ->
?vtrace("process_v3_msg -> _NoFound: "
"~p", [_NoFound]),
inc(snmpUnknownPDUHandlers),
discard({no_outstanding_req, MsgID})
end;
'get-response' ->
7.2.10 & 12
case snmp_note_store:get_note(NoteStore, MsgID) of
{SecEngineID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, _} ->
7.2.12.d
{ok, 'version-3', PDU, PduMMS, undefined};
_ ->
inc(snmpUnknownPDUHandlers),
discard({no_outstanding_req, MsgID})
end;
'snmpv2-trap' ->
{ok, 'version-3', PDU, PduMMS, undefined};
'inform-request' ->
7.2.13
SnmpEngineID = get_engine_id(),
case SecEngineID of
7.2.13.b
?vtrace("7.2.13d - valid securityEngineID: ~p",
[SecEngineID]),
4.2.2.1.1 - we do n't handle proxys yet = > we only
case is_known_engine_id(CtxEngineID, Address) of
true ->
?vtrace("and the agent EngineID (~p) "
"is know to us", [CtxEngineID]),
Uses ACMData that snmpm_acm knows of .
ACMData =
{MsgID, MsgSecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData},
{ok, 'version-3', PDU, PduMMS, ACMData};
UnknownEngineID ->
?vtrace("4.2.2.1.2 - UnknownEngineId: ~p",
[UnknownEngineID]),
4.2.2.1.2
NIsReportable = snmp_misc:is_reportable_pdu(Type),
Val = inc(snmpUnknownPDUHandlers),
ErrorInfo =
{#varbind{oid = ?snmpUnknownPDUHandlers,
variabletype = 'Counter32',
value = Val},
SecName,
[{securityLevel, SecLevel},
{contextEngineID, CtxEngineID},
{contextName, CtxName}]},
case generate_v3_report_msg(MsgID,
MsgSecModel,
Data,
ErrorInfo,
Log) of
{ok, Report} when NIsReportable =:= true ->
discard(snmpUnknownPDUHandlers, Report);
_ ->
discard(snmpUnknownPDUHandlers)
end
end;
?vinfo("7.2.13a - invalid securityEngineID: ~p",
[SecEngineID]),
discard({badSecurityEngineID, SecEngineID})
end;
_ ->
7.2.13 - This would be the requests which we should not
discard(Type)
end.
sec_error(T1, T2)
when tuple_size(T1) =:= tuple_size(T2) ->
Tags = {sec_engine_id, msg_sec_model, sec_name, sec_level,
ctx_engine_id, ctx_name, request_id},
sec_error(tuple_size(T1), T1, T2, Tags, []);
sec_error(T1, T2) ->
[{internal_error, T1, T2}].
sec_error(0, _T1, _T2, _Tags, Acc) ->
Acc;
sec_error(Idx, T1, T2, Tags, Acc) ->
case element(Idx, T1) =:= element(Idx, T2) of
true ->
sec_error(Idx - 1, T1, T2, Tags, Acc);
false ->
Elem = {element(Idx, Tags), element(Idx, T1), element(Idx, T2)},
sec_error(Idx - 1, T1, T2, Tags, [Elem|Acc])
end.
tot_mms(MgrMMS, AgentMMS) when MgrMMS > AgentMMS -> AgentMMS;
tot_mms(MgrMMS, _AgentMMS) -> MgrMMS.
get_security_module(?SEC_USM) ->
snmpm_usm;
get_security_module(_) ->
inc(snmpUnknownSecurityModels),
discard(snmpUnknownSecurityModels).
check_sec_level([MsgFlag]) ->
SecLevel = MsgFlag band 3,
if
SecLevel == 2 ->
inc(snmpInvalidMsgs),
discard(snmpInvalidMsgs);
true ->
SecLevel
end;
check_sec_level(_Unknown) ->
inc(snmpInvalidMsgs),
discard(snmpInvalidMsgs).
is_reportable([MsgFlag]) ->
4 == (MsgFlag band 4).
check_sec_module_result({ok, X}, _, _, _, _) ->
X;
check_sec_module_result({error, Reason, Info}, _, _, _, _)
when is_list(Info) ->
discard({securityError, Reason, Info});
check_sec_module_result({error, Reason, ErrorInfo}, V3Hdr, Data, true, Log) ->
?vtrace("security module result:"
"~n Reason: ~p"
"~n ErrorInfo: ~p", [Reason, ErrorInfo]),
#v3_hdr{msgID = MsgID, msgSecurityModel = MsgSecModel} = V3Hdr,
Pdu = get_scoped_pdu(Data),
case generate_v3_report_msg(MsgID, MsgSecModel, Pdu, ErrorInfo, Log) of
{ok, Report} ->
discard({securityError, Reason}, Report);
{discarded, _SomeOtherReason} ->
discard({securityError, Reason})
end;
check_sec_module_result({error, Reason, _ErrorInfo}, _, _, _, _) ->
?vtrace("security module result:"
"~n Reason: ~p"
"~n _ErrorInfo: ~p", [Reason, _ErrorInfo]),
discard({securityError, Reason});
check_sec_module_result(Res, _, _, _, _) ->
?vtrace("security module result:"
"~n Res: ~p", [Res]),
discard({securityError, Res}).
get_scoped_pdu(D) when is_list(D) ->
(catch snmp_pdus:dec_scoped_pdu(D));
get_scoped_pdu(D) ->
D.
generate_msg('version-3', NoteStore, Pdu,
{SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName}, Log) ->
generate_v3_msg(NoteStore, Pdu,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log);
generate_msg(Vsn, _NoteStore, Pdu, {Comm, _SecModel}, Log) ->
generate_v1_v2c_msg(Vsn, Pdu, Comm, Log).
generate_v3_msg(NoteStore, Pdu,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log) ->
?vdebug("generate_v3_msg -> 7.1.6", []),
ScopedPDU = #scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = Pdu},
case (catch snmp_pdus:enc_scoped_pdu(ScopedPDU)) of
{'EXIT', Reason} ->
user_err("failed encoding scoped pdu "
"~n pdu: ~w"
"~n contextName: ~w"
"~n reason: ~w", [Pdu, CtxName, Reason]),
{discarded, Reason};
ScopedPDUBytes ->
{ok, generate_v3_msg(NoteStore, Pdu, ScopedPDUBytes,
SecModel, SecName, SecLevel,
CtxEngineID, CtxName, TargetName, Log)}
end.
generate_v3_msg(NoteStore,
#pdu{type = Type} = Pdu, ScopedPduBytes,
SecModel, SecName, SecLevel, CtxEngineID, CtxName,
TargetName, Log) ->
7.1.7
?vdebug("generate_v3_msg -> 7.1.7", []),
MsgID = next_msg_id(),
MsgFlags = snmp_misc:mk_msg_flags(Type, SecLevel),
V3Hdr = #v3_hdr{msgID = MsgID,
msgMaxSize = get_max_message_size(),
msgFlags = MsgFlags,
msgSecurityModel = SecModel},
Message = #message{version = 'version-3',
vsn_hdr = V3Hdr,
data = ScopedPduBytes},
SecModule = sec_module(SecModel),
7.1.9a
?vdebug("generate_v3_msg -> 7.1.9a", []),
SecEngineID = sec_engine_id(TargetName),
?vtrace("SecEngineID: ~p", [SecEngineID]),
?vdebug("generate_v3_msg -> 7.1.9b", []),
case generate_v3_outgoing_msg(Message, SecModule, SecEngineID,
SecName, [], SecLevel) of
{ok, Packet} ->
7.1.9c
Store in cache for 150 sec .
?vdebug("generate_v3_msg -> 7.1.9c", []),
CacheVal = {SecEngineID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, Pdu#pdu.request_id},
snmp_note_store:set_note(NoteStore, 1500, MsgID, CacheVal),
Log(Packet),
inc_snmp_out(Pdu),
?vdebug("generate_v3_msg -> done", []),
Packet;
Error ->
throw(Error)
end.
sec_module(?SEC_USM) ->
snmpm_usm.
9 ) If the PDU is a GetRequest - PDU , GetNextRequest - PDU ,
GetBulkRequest - PDU , SetRequest - PDU , InformRequest - PDU , or or
a ) If the PDU is an SNMPv2 - Trap - PDU , then securityEngineID is set
transportDomain and transportAddress . The value of
sec_engine_id(TargetName) ->
case get_agent_engine_id(TargetName) of
{ok, EngineId} ->
EngineId;
_ ->
config_err("Can't find engineID for "
"snmpTargetAddrName ~p", [TargetName]),
this will trigger error in secmodule
""
end.
Common / shared ? Should there be differences ?
generate_v1_v2c_msg(Vsn, Pdu, Community, Log) ->
?vdebug("generate_v1_v2c_msg -> encode pdu", []),
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu: "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Community, Reason]),
{discarded, Reason};
PduBytes ->
MMS = get_max_message_size(),
Message = #message{version = Vsn,
vsn_hdr = Community,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Community, Reason]),
{discarded, Reason};
{ok, Packet} when byte_size(Packet) =< MMS ->
Log(Packet),
inc_snmp_out(Pdu),
{ok, Packet};
{ok, Packet} ->
?vlog("packet max size exceeded: "
"~n MMS: ~p"
"~n Len: ~p",
[MMS, byte_size(Packet)]),
{discarded, tooBig}
end
end.
generate_response_msg('version-3', Pdu,
{MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData}, Log) ->
generate_v3_response_msg(Pdu, MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log);
generate_response_msg(Vsn, Pdu, {Comm, _SecModel}, Log) ->
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log);
generate_response_msg(Vsn, Pdu, {Comm, _SecModel, _TDomain, _TAddress}, Log) ->
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log).
generate_v3_response_msg(#pdu{type = Type} = Pdu, MsgID,
SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log) ->
rfc2272 : 7.1 steps 6 - 8
ScopedPdu = #scopedPdu{contextEngineID = CtxEngineID,
contextName = CtxName,
data = Pdu},
case (catch snmp_pdus:enc_scoped_pdu(ScopedPdu)) of
{'EXIT', Reason} ->
user_err("failed encoded scoped pdu "
"(pdu: ~w, contextName: ~w): ~n~w",
[Pdu, CtxName, Reason]),
{discarded, Reason};
ScopedPduBytes ->
MMS = get_max_message_size(),
MsgFlags = snmp_misc:mk_msg_flags(Type, SecLevel),
V3Hdr = #v3_hdr{msgID = MsgID,
msgMaxSize = MMS,
msgFlags = MsgFlags,
msgSecurityModel = SecModel},
Message = #message{version = 'version-3',
vsn_hdr = V3Hdr,
data = ScopedPduBytes},
SecModule = sec_module(SecModel),
SecEngineID = get_engine_id(),
case generate_v3_outgoing_msg(Message, SecModule, SecEngineID,
SecName, SecData, SecLevel) of
{ok, Packet} when byte_size(Packet) =< MMS ->
if
SecLevel == 3 ->
encrypted - log decrypted pdu
Log({V3Hdr, ScopedPduBytes});
true ->
Log(Packet)
end,
inc_snmp_out(Pdu),
{ok, Packet};
{ok, _Packet} when Pdu#pdu.error_status =:= tooBig ->
?vlog("packet max size exceeded (tooBog): "
"~n MMS: ~p", [MMS]),
inc(snmpSilentDrops),
{discarded, tooBig};
{ok, _Packet} ->
?vlog("packet max size exceeded: "
"~n MMS: ~p", [MMS]),
TooBigPdu = Pdu#pdu{error_status = tooBig,
error_index = 0,
varbinds = []},
generate_v3_response_msg(TooBigPdu, MsgID,
SecModel, SecName, SecLevel,
CtxEngineID,
CtxName,
SecData, Log);
Error ->
Error
end
end.
generate_v3_outgoing_msg(Message,
SecModule, SecEngineID, SecName, SecData, SecLevel) ->
case (catch SecModule:generate_outgoing_msg(Message,
SecEngineID,
SecName, SecData,
SecLevel)) of
{'EXIT', Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
{discarded, Reason};
{error, Reason} ->
config_err("~p (message: ~p)", [Reason, Message]),
{discarded, Reason};
Bin when is_binary(Bin) ->
{ok, Bin};
OutMsg when is_list(OutMsg) ->
case (catch list_to_binary(OutMsg)) of
Bin when is_binary(Bin) ->
{ok, Bin};
{'EXIT', Reason} ->
{error, Reason}
end
end.
generate_v1_v2c_response_msg(Vsn, Pdu, Comm, Log) ->
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu: "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
PduBytes ->
MMS = get_max_message_size(),
Message = #message{version = Vsn,
vsn_hdr = Comm,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message only "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
{ok, Packet} when byte_size(Packet) =< MMS ->
Log(Packet),
inc_snmp_out(Pdu),
{ok, Packet};
too_big(Vsn, Pdu, Comm, MMS, byte_size(Packet), Log)
end
end.
too_big('version-1' = Vsn, #pdu{type = 'get-response'} = Pdu,
Comm, _MMS, _Len, Log) ->
V = set_vb_null(Pdu#pdu.varbinds),
TooBigPdu = Pdu#pdu{error_status = tooBig, error_index = 0, varbinds = V},
too_big(Vsn, TooBigPdu, Comm, Log);
too_big('version-2' = Vsn, #pdu{type = 'get-response'} = Pdu,
Comm, _MMS, _Len, Log) ->
TooBigPdu = Pdu#pdu{error_status = tooBig, error_index = 0, varbinds = []},
too_big(Vsn, TooBigPdu, Comm, Log);
too_big(_Vsn, Pdu, _Comm, _Log, MMS, Len) ->
user_err("encoded pdu, ~p bytes, exceeded "
"max message size of ~p bytes. Pdu: ~n~w",
[Len, MMS, Pdu]),
{discarded, tooBig}.
too_big(Vsn, Pdu, Comm, Log) ->
case (catch snmp_pdus:enc_pdu(Pdu)) of
{'EXIT', Reason} ->
user_err("failed encoding pdu "
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
PduBytes ->
Message = #message{version = Vsn,
vsn_hdr = Comm,
data = PduBytes},
case generate_v1_v2c_outgoing_msg(Message) of
{error, Reason} ->
user_err("failed encoding message only"
"(pdu: ~w, community: ~w): ~n~w",
[Pdu, Comm, Reason]),
{discarded, Reason};
{ok, Bin} ->
Log(Bin),
inc_snmp_out(Pdu),
{ok, Bin}
end
end.
set_vb_null(Vbs) ->
[Vb#varbind{variabletype = 'NULL', value = 'NULL'} || Vb <- Vbs].
generate_v1_v2c_outgoing_msg(Message) ->
?vdebug("generate_v1_v2c_outgoing_msg -> encode message", []),
case (catch snmp_pdus:enc_message_only(Message)) of
{'EXIT', Reason} ->
{error, Reason};
Bin when is_binary(Bin) ->
{ok, Bin};
Packet when is_list(Packet) ->
case (catch list_to_binary(Packet)) of
Bin when is_binary(Bin) ->
{ok, Bin};
{'EXIT', Reason} ->
{error, Reason}
end
end.
generate_v3_report_msg(MsgID, SecModel, ScopedPdu, ErrInfo, Log)
when is_record(ScopedPdu, scopedPdu) ->
ReqID = (ScopedPdu#scopedPdu.data)#pdu.request_id,
generate_v3_report_msg2(MsgID, ReqID, SecModel, ErrInfo, Log);
generate_v3_report_msg(MsgID, SecModel, _, ErrInfo, Log) ->
generate_v3_report_msg2(MsgID, 0, SecModel, ErrInfo, Log).
generate_v3_report_msg2(MsgID, ReqID, SecModel, ErrInfo, Log) ->
{Varbind, SecName, Opts} = ErrInfo,
Pdu = #pdu{type = report,
request_id = ReqID,
error_status = noError,
error_index = 0,
varbinds = [Varbind]},
SecLevel = snmp_misc:get_option(securityLevel, Opts, 0),
CtxEngineID = snmp_misc:get_option(contextEngineID, Opts, get_engine_id()),
CtxName = snmp_misc:get_option(contextName, Opts, ""),
SecData = snmp_misc:get_option(sec_data, Opts, []),
generate_v3_response_msg(Pdu,
MsgID, SecModel, SecName, SecLevel,
CtxEngineID, CtxName, SecData, Log).
Get " our " ( manager ) MMS
get_max_message_size() ->
case snmpm_config:get_engine_max_message_size() of
{ok, MMS} ->
MMS;
E ->
user_err("failed retrieving engine max message size: ~w", [E]),
484
end.
get_agent_max_message_size(Domain, Addr) ->
case snmpm_config:get_agent_engine_max_message_size(Domain, Addr) of
{ok, MMS} ->
MMS;
_Error ->
TAddr = fun(TN) ->
case snmpm_config:agent_info(TN, taddress) of
{ok, TA} ->
TA;
{error, _} ->
undefined
end
end,
KnownAgents =
[{TargetName, TAddr(TargetName)} ||
TargetName <- snmpm_config:which_agents()],
?vlog("[agent engine max msg size lookup] unknown agent: ~s"
"~n Known Agents: ~p",
[snmp_conf:mk_addr_string({Domain, Addr}), KnownAgents]),
get_max_message_size()
end.
case snmpm_config : get_agent_engine_max_message_size(Addr , Port ) of
? agent : ~w:~w " , [ Addr , Port ] ) ,
get_engine_id() ->
case snmpm_config:get_engine_id() of
{ok, Id} ->
Id;
_Error ->
""
end.
get_agent_engine_id(Name) ->
snmpm_config:get_agent_engine_id(Name).
is_known_engine_id(EngineID, {Addr, Port}) ->
snmpm_config:is_known_engine_id(EngineID, Addr, Port).
next_msg_id() ->
next_id(msg_id).
next_req_id() ->
next_id(req_id).
next_id(Id) ->
snmpm_config:incr_counter(Id, 1).
init_versions([], S) ->
S;
init_versions([v1|Vsns], S) ->
init_versions(Vsns, S#state{v1 = true});
init_versions([v2|Vsns], S) ->
init_versions(Vsns, S#state{v2c = true});
init_versions([v3|Vsns], S) ->
init_versions(Vsns, S#state{v3 = true}).
init_usm(true) ->
snmpm_usm:init();
init_usm(_) ->
ok.
init_counters() ->
F = fun(Counter) -> maybe_create_counter(Counter) end,
lists:map(F, counters()).
reset_counters() ->
F = fun(Counter) -> snmpm_config:reset_stats_counter(Counter) end,
lists:map(F, counters()).
reset_usm(true) ->
snmpm_usm:reset();
reset_usm(_) ->
ok.
maybe_create_counter(Counter) ->
snmpm_config:maybe_cre_stats_counter(Counter, 0).
counters() ->
[snmpInPkts,
snmpOutPkts,
snmpInBadVersions,
snmpInBadCommunityNames,
snmpInBadCommunityUses,
snmpInASNParseErrs,
snmpInTooBigs,
snmpInNoSuchNames,
snmpInBadValues,
snmpInReadOnlys,
snmpInGenErrs,
snmpInTotalReqVars,
snmpInTotalSetVars,
snmpInGetRequests,
snmpInGetNexts,
snmpInSetRequests,
snmpInGetResponses,
snmpInTraps,
snmpOutTooBigs,
snmpOutNoSuchNames,
snmpOutBadValues,
snmpOutGenErrs,
snmpOutGetRequests,
snmpOutGetNexts,
snmpOutSetRequests,
snmpOutGetResponses,
snmpOutTraps,
snmpSilentDrops,
snmpProxyDrops,
From SNMP - MPD - MIB
snmpUnknownSecurityModels,
snmpInvalidMsgs,
snmpUnknownPDUHandlers
].
the local mib . ( e.g. snmpInPkts )
inc(Name) -> inc(Name, 1).
inc(Name, N) -> snmpm_config:incr_stats_counter(Name, N).
inc_snmp_in(#pdu{type = Type}) ->
inc_in_type(Type);
inc_snmp_in(TrapPdu) when is_record(TrapPdu, trappdu) ->
inc(snmpInPkts),
inc(snmpInTraps).
inc_snmp_out(#pdu{type = Type,
error_status = ErrorStatus}) ->
inc(snmpOutPkts),
inc_out_err(ErrorStatus),
inc_out_type(Type).
inc_out_type('get-request') -> inc(snmpOutGetRequests);
inc_out_type('get-next-request') -> inc(snmpOutGetNexts);
inc_out_type('set-request') -> inc(snmpOutSetRequests);
inc_out_type(_) -> ok.
inc_out_err(genErr) -> inc(snmpOutGenErrs);
inc_out_err(tooBig) -> inc(snmpOutTooBigs);
inc_out_err(noSuchName) -> inc(snmpOutNoSuchNames);
inc_out_err(badValue) -> inc(snmpOutBadValues);
inc_out_err(_) -> ok.
inc_in_type('get-response') -> inc(snmpInGetResponses);
inc_in_type(_) -> ok.
discard(Reason) ->
throw({discarded, Reason}).
discard(Reason, Report) ->
throw({discarded, Reason, Report}).
user_err(F, A) ->
error_msg("USER ERROR: " ++ F ++ "~n", A).
config_err(F, A) ->
error_msg("CONFIG ERROR: " ++ F ++ "~n", A).
error_msg(F, A) ->
?snmpm_error("MPD: " ++ F, A).
|
fa27fd1ca945a019d6034f127349bdf1c50800a8143a7953f31427660877f714 | shayne-fletcher/zen | c_type.mli | include C_type_sig.S
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/regexp/c_type.mli | ocaml | include C_type_sig.S
| |
97603671f5c8006194558464551bf65fb342564b66eed1ea2c315e9531e2780f | sellout/haskerwaul | Frobenius.hs | # language UndecidableSuperClasses #
module Haskerwaul.Monoid.Frobenius
( module Haskerwaul.Monoid.Frobenius
-- * extended modules
, module Haskerwaul.Comonoid
, module Haskerwaul.Monoid
) where
import Haskerwaul.Comonoid
import Haskerwaul.Monoid
-- | [nLab](+algebra)
class (Monoid c t a, Comonoid c t a) => FrobeniusMonoid c t a
| null | https://raw.githubusercontent.com/sellout/haskerwaul/cf54bd7ce5bf4f3d1fd0d9d991dc733785b66a73/src/Haskerwaul/Monoid/Frobenius.hs | haskell | * extended modules
| [nLab](+algebra) | # language UndecidableSuperClasses #
module Haskerwaul.Monoid.Frobenius
( module Haskerwaul.Monoid.Frobenius
, module Haskerwaul.Comonoid
, module Haskerwaul.Monoid
) where
import Haskerwaul.Comonoid
import Haskerwaul.Monoid
class (Monoid c t a, Comonoid c t a) => FrobeniusMonoid c t a
|
f9237b1fe8cf086cb4ef9889c40a88c040f561466d8ed95e30dad880ef7a0757 | Opetushallitus/aipal | i18n.clj | Copyright ( c ) 2013 The Finnish National Board of Education - Opetushallitus
;;
This program is free software : Licensed under the EUPL , Version 1.1 or - as
soon as they will be approved by the European Commission - subsequent versions
of the EUPL ( the " Licence " ) ;
;;
;; You may not use this work except in compliance with the Licence.
;; You may obtain a copy of the Licence at: /
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
European Union Public Licence for more details .
(ns aipalvastaus.rest-api.i18n
(:import (java.util Locale
ResourceBundle
ResourceBundle$Control))
(:require [compojure.api.core :refer [defroutes GET]]
[schema.core :as s]
[oph.common.util.http-util :refer [response-nocache]]
[oph.common.util.util :refer [pisteavaimet->puu]]))
(def Kieli (s/enum "fi" "sv"))
(defn hae-tekstit [kieli]
(ResourceBundle/clearCache)
(let [bundle (ResourceBundle/getBundle "i18n/tekstit" (Locale. kieli) (ResourceBundle$Control/getNoFallbackControl ResourceBundle$Control/FORMAT_PROPERTIES))]
(->> (for [key (.keySet bundle)]
[(keyword key) (.getString bundle key)])
(into {})
pisteavaimet->puu)))
(defroutes reitit
(GET "/:kieli" []
:path-params [kieli :- Kieli]
(response-nocache (hae-tekstit kieli))))
| null | https://raw.githubusercontent.com/Opetushallitus/aipal/767bd14ec7153dc97fdf688443b9687cdb70082f/aipal-vastaus/src/clj/aipalvastaus/rest_api/i18n.clj | clojure |
You may not use this work except in compliance with the Licence.
You may obtain a copy of the Licence at: /
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | Copyright ( c ) 2013 The Finnish National Board of Education - Opetushallitus
This program is free software : Licensed under the EUPL , Version 1.1 or - as
soon as they will be approved by the European Commission - subsequent versions
European Union Public Licence for more details .
(ns aipalvastaus.rest-api.i18n
(:import (java.util Locale
ResourceBundle
ResourceBundle$Control))
(:require [compojure.api.core :refer [defroutes GET]]
[schema.core :as s]
[oph.common.util.http-util :refer [response-nocache]]
[oph.common.util.util :refer [pisteavaimet->puu]]))
(def Kieli (s/enum "fi" "sv"))
(defn hae-tekstit [kieli]
(ResourceBundle/clearCache)
(let [bundle (ResourceBundle/getBundle "i18n/tekstit" (Locale. kieli) (ResourceBundle$Control/getNoFallbackControl ResourceBundle$Control/FORMAT_PROPERTIES))]
(->> (for [key (.keySet bundle)]
[(keyword key) (.getString bundle key)])
(into {})
pisteavaimet->puu)))
(defroutes reitit
(GET "/:kieli" []
:path-params [kieli :- Kieli]
(response-nocache (hae-tekstit kieli))))
|
48664db48938a143ef12333040ca691006c36600b053637f0ea1a4d472fdb462 | janestreet/ppx_sexp_conv | test_functors.ml | open! Base
module Maybe = struct
type 'a t = 'a option [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a t) -> ()
let t_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar -> option_sexp_grammar _'a_sexp_grammar
;;
let _ = t_sexp_grammar
[@@@end]
end
module Make (T : sig
type 'a t [@@deriving sexp_grammar]
end) =
struct
[@@@warning "-37"]
type 'a t = T of 'a T.t u
and 'a u = U of 'a T.t t Maybe.t [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a t) -> ()
let _ = fun (_ : 'a u) -> ()
include struct
open struct
let (grammars__001_ : Sexplib0.Sexp_grammar.defn Stdlib.List.t Stdlib.Lazy.t) =
lazy
(let t_sexp_grammar
: 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t
=
fun _'a_sexp_grammar ->
{ untyped = Recursive ("t", [ _'a_sexp_grammar.untyped ]) }
and u_sexp_grammar
: 'a. 'a Sexplib0.Sexp_grammar.t -> 'a u Sexplib0.Sexp_grammar.t
=
fun _'a_sexp_grammar ->
{ untyped = Recursive ("u", [ _'a_sexp_grammar.untyped ]) }
in
[ { tycon = "t"
; tyvars = [ "a" ]
; grammar =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "T"
; clause_kind =
List_clause
{ args =
Cons
( (u_sexp_grammar
(T.t_sexp_grammar { untyped = Tyvar "a" }))
.untyped
, Empty )
}
}
]
}
}
; { tycon = "u"
; tyvars = [ "a" ]
; grammar =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "U"
; clause_kind =
List_clause
{ args =
Cons
( (Maybe.t_sexp_grammar
(t_sexp_grammar
(T.t_sexp_grammar { untyped = Tyvar "a" })))
.untyped
, Empty )
}
}
]
}
}
])
;;
let _ = grammars__001_
end
let t_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Tycon ("t", [ _'a_sexp_grammar.untyped ], Stdlib.Lazy.force grammars__001_)
}
and u_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a u Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Tycon ("u", [ _'a_sexp_grammar.untyped ], Stdlib.Lazy.force grammars__001_)
}
;;
let _ = t_sexp_grammar
and _ = u_sexp_grammar
end
[@@@end]
type 'a v = V of 'a t [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a v) -> ()
let v_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a v Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "V"
; clause_kind =
List_clause
{ args = Cons ((t_sexp_grammar _'a_sexp_grammar).untyped, Empty) }
}
]
}
}
;;
let _ = v_sexp_grammar
[@@@end]
end
module T1 = Make (Maybe)
module T2 = Make (T1)
type t = int T2.t * int T1.t [@@deriving_inline sexp_grammar]
let _ = fun (_ : t) -> ()
let (t_sexp_grammar : t Sexplib0.Sexp_grammar.t) =
{ untyped =
Lazy
(lazy
(List
(Cons
( (T2.t_sexp_grammar int_sexp_grammar).untyped
, Cons ((T1.t_sexp_grammar int_sexp_grammar).untyped, Empty) ))))
}
;;
let _ = t_sexp_grammar
[@@@end]
| null | https://raw.githubusercontent.com/janestreet/ppx_sexp_conv/04959fa8bd3799734ba1ee5e50b9eab9872057aa/test/sexp_grammar/test_functors.ml | ocaml | open! Base
module Maybe = struct
type 'a t = 'a option [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a t) -> ()
let t_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar -> option_sexp_grammar _'a_sexp_grammar
;;
let _ = t_sexp_grammar
[@@@end]
end
module Make (T : sig
type 'a t [@@deriving sexp_grammar]
end) =
struct
[@@@warning "-37"]
type 'a t = T of 'a T.t u
and 'a u = U of 'a T.t t Maybe.t [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a t) -> ()
let _ = fun (_ : 'a u) -> ()
include struct
open struct
let (grammars__001_ : Sexplib0.Sexp_grammar.defn Stdlib.List.t Stdlib.Lazy.t) =
lazy
(let t_sexp_grammar
: 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t
=
fun _'a_sexp_grammar ->
{ untyped = Recursive ("t", [ _'a_sexp_grammar.untyped ]) }
and u_sexp_grammar
: 'a. 'a Sexplib0.Sexp_grammar.t -> 'a u Sexplib0.Sexp_grammar.t
=
fun _'a_sexp_grammar ->
{ untyped = Recursive ("u", [ _'a_sexp_grammar.untyped ]) }
in
[ { tycon = "t"
; tyvars = [ "a" ]
; grammar =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "T"
; clause_kind =
List_clause
{ args =
Cons
( (u_sexp_grammar
(T.t_sexp_grammar { untyped = Tyvar "a" }))
.untyped
, Empty )
}
}
]
}
}
; { tycon = "u"
; tyvars = [ "a" ]
; grammar =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "U"
; clause_kind =
List_clause
{ args =
Cons
( (Maybe.t_sexp_grammar
(t_sexp_grammar
(T.t_sexp_grammar { untyped = Tyvar "a" })))
.untyped
, Empty )
}
}
]
}
}
])
;;
let _ = grammars__001_
end
let t_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a t Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Tycon ("t", [ _'a_sexp_grammar.untyped ], Stdlib.Lazy.force grammars__001_)
}
and u_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a u Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Tycon ("u", [ _'a_sexp_grammar.untyped ], Stdlib.Lazy.force grammars__001_)
}
;;
let _ = t_sexp_grammar
and _ = u_sexp_grammar
end
[@@@end]
type 'a v = V of 'a t [@@deriving_inline sexp_grammar]
let _ = fun (_ : 'a v) -> ()
let v_sexp_grammar : 'a. 'a Sexplib0.Sexp_grammar.t -> 'a v Sexplib0.Sexp_grammar.t =
fun _'a_sexp_grammar ->
{ untyped =
Variant
{ case_sensitivity = Case_sensitive_except_first_character
; clauses =
[ No_tag
{ name = "V"
; clause_kind =
List_clause
{ args = Cons ((t_sexp_grammar _'a_sexp_grammar).untyped, Empty) }
}
]
}
}
;;
let _ = v_sexp_grammar
[@@@end]
end
module T1 = Make (Maybe)
module T2 = Make (T1)
type t = int T2.t * int T1.t [@@deriving_inline sexp_grammar]
let _ = fun (_ : t) -> ()
let (t_sexp_grammar : t Sexplib0.Sexp_grammar.t) =
{ untyped =
Lazy
(lazy
(List
(Cons
( (T2.t_sexp_grammar int_sexp_grammar).untyped
, Cons ((T1.t_sexp_grammar int_sexp_grammar).untyped, Empty) ))))
}
;;
let _ = t_sexp_grammar
[@@@end]
| |
e3cdd3d6d34aa630815fceb0c1164f809d5a2cd4b1e0ce28a3dba5149b278f6f | amccausl/Swish | TraceHelpers.hs | --------------------------------------------------------------------------------
$ I d : TraceHelpers.hs , v 1.1 2004/01/13 12:31:24 graham Exp $
--
Copyright ( c ) 2003 , . All rights reserved .
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
Module : TraceHelpers
Copyright : ( c ) 2003 ,
License : GPL V2
--
Maintainer :
-- Stability : provisional
-- Portability : H98
--
-- This module provides access to tracing functions from the pre-2003
-- Hugs trace module. Over time, it may accumulate other tracing
-- functions that I find useful.
--
--------------------------------------------------------------------------------
module Swish.HaskellUtils.TraceHelpers
( trace, traceShow
)
where
import Debug.Trace
( trace )
------------------------------------------------------------
traceShow function from older Hugs trace module
------------------------------------------------------------
traceShow :: Show a => String -> a -> a
traceShow msg x = trace (msg ++ show x) x
--------------------------------------------------------------------------------
--
Copyright ( c ) 2003 , . All rights reserved .
--
This file is part of Swish .
--
Swish is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
-- (at your option) any later version.
--
Swish is distributed in the hope that it will be useful ,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
along with Swish ; if not , write to :
The Free Software Foundation , Inc. ,
59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
--
--------------------------------------------------------------------------------
$ Source : /file / / HaskellUtils / TraceHelpers.hs , v $
$ Author :
$ Revision : 1.1 $
-- $Log: TraceHelpers.hs,v $
Revision 1.1 2004/01/13 12:31:24 graham
Move modules from HaskellRDF to HaskellUtils project
--
Revision 1.1 2003/12/20 12:02:08
Introduced new TraceHelpers module for Hugs-2003 compatibility .
--
Revision 1.3 2003/12/18 18:29:03 graham
-- ??????
--
| null | https://raw.githubusercontent.com/amccausl/Swish/9a7356300960c62e3f0468067bda0c34ee3606bd/Swish/HaskellUtils/TraceHelpers.hs | haskell | ------------------------------------------------------------------------------
See end of this file for licence information.
------------------------------------------------------------------------------
|
Stability : provisional
Portability : H98
This module provides access to tracing functions from the pre-2003
Hugs trace module. Over time, it may accumulate other tracing
functions that I find useful.
------------------------------------------------------------------------------
----------------------------------------------------------
----------------------------------------------------------
------------------------------------------------------------------------------
(at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
------------------------------------------------------------------------------
$Log: TraceHelpers.hs,v $
??????
| $ I d : TraceHelpers.hs , v 1.1 2004/01/13 12:31:24 graham Exp $
Copyright ( c ) 2003 , . All rights reserved .
Module : TraceHelpers
Copyright : ( c ) 2003 ,
License : GPL V2
Maintainer :
module Swish.HaskellUtils.TraceHelpers
( trace, traceShow
)
where
import Debug.Trace
( trace )
traceShow function from older Hugs trace module
traceShow :: Show a => String -> a -> a
traceShow msg x = trace (msg ++ show x) x
Copyright ( c ) 2003 , . All rights reserved .
This file is part of Swish .
Swish is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
Swish is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with Swish ; if not , write to :
The Free Software Foundation , Inc. ,
59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
$ Source : /file / / HaskellUtils / TraceHelpers.hs , v $
$ Author :
$ Revision : 1.1 $
Revision 1.1 2004/01/13 12:31:24 graham
Move modules from HaskellRDF to HaskellUtils project
Revision 1.1 2003/12/20 12:02:08
Introduced new TraceHelpers module for Hugs-2003 compatibility .
Revision 1.3 2003/12/18 18:29:03 graham
|
2d78cb6cbdd2575cbd1208b413d018d30072d2f9c455d5a383282bd7e867155c | OCamlPro/ocplib-endian | be_ocaml_401.ml | let get_uint16 s off =
if not Sys.big_endian
then swap16 (get_16 s off)
else get_16 s off
[@@ocaml.inline]
let get_int16 s off =
((get_uint16 s off) lsl ( Sys.int_size - 16 )) asr ( Sys.int_size - 16 )
[@@ocaml.inline]
let get_int32 s off =
if not Sys.big_endian
then swap32 (get_32 s off)
else get_32 s off
[@@ocaml.inline]
let get_int64 s off =
if not Sys.big_endian
then swap64 (get_64 s off)
else get_64 s off
[@@ocaml.inline]
let set_int16 s off v =
if not Sys.big_endian
then (set_16 s off (swap16 v))
else set_16 s off v
[@@ocaml.inline]
let set_int32 s off v =
if not Sys.big_endian
then set_32 s off (swap32 v)
else set_32 s off v
[@@ocaml.inline]
let set_int64 s off v =
if not Sys.big_endian
then set_64 s off (swap64 v)
else set_64 s off v
[@@ocaml.inline]
| null | https://raw.githubusercontent.com/OCamlPro/ocplib-endian/10292cd3ffa4d23d737e3f855ad04f22d3d95460/src/be_ocaml_401.ml | ocaml | let get_uint16 s off =
if not Sys.big_endian
then swap16 (get_16 s off)
else get_16 s off
[@@ocaml.inline]
let get_int16 s off =
((get_uint16 s off) lsl ( Sys.int_size - 16 )) asr ( Sys.int_size - 16 )
[@@ocaml.inline]
let get_int32 s off =
if not Sys.big_endian
then swap32 (get_32 s off)
else get_32 s off
[@@ocaml.inline]
let get_int64 s off =
if not Sys.big_endian
then swap64 (get_64 s off)
else get_64 s off
[@@ocaml.inline]
let set_int16 s off v =
if not Sys.big_endian
then (set_16 s off (swap16 v))
else set_16 s off v
[@@ocaml.inline]
let set_int32 s off v =
if not Sys.big_endian
then set_32 s off (swap32 v)
else set_32 s off v
[@@ocaml.inline]
let set_int64 s off v =
if not Sys.big_endian
then set_64 s off (swap64 v)
else set_64 s off v
[@@ocaml.inline]
| |
d139cc8d5c832b297ec48891b4c6108712397710b9fc2a66f562f5acd7afcaf3 | penpot/penpot | align.cljs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.main.ui.workspace.sidebar.options.menus.align
(:require
[app.main.data.workspace :as dw]
[app.main.data.workspace.shortcuts :as sc]
[app.main.refs :as refs]
[app.main.store :as st]
[app.main.ui.icons :as i]
[app.util.i18n :as i18n :refer [tr]]
[rumext.v2 :as mf]))
(mf/defc align-options
[]
(let [selected (mf/deref refs/selected-shapes)
;; don't need to watch objects, only read the value
objects (deref refs/workspace-page-objects)
disabled (not (dw/can-align? selected objects))
disabled-distribute (not(dw/can-distribute? selected))]
[:div.align-options
[:div.align-group
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hleft" (sc/get-tooltip :align-left))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hleft))}
i/shape-halign-left]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hcenter" (sc/get-tooltip :align-hcenter))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hcenter))}
i/shape-halign-center]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hright" (sc/get-tooltip :align-right))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hright))}
i/shape-halign-right]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hdistribute" (sc/get-tooltip :h-distribute))
:class (when disabled-distribute "disabled")
:on-click #(st/emit! (dw/distribute-objects :horizontal))}
i/shape-hdistribute]]
[:div.align-group
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vtop" (sc/get-tooltip :align-top))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vtop))}
i/shape-valign-top]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vcenter" (sc/get-tooltip :align-vcenter))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vcenter))}
i/shape-valign-center]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vbottom" (sc/get-tooltip :align-bottom))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vbottom))}
i/shape-valign-bottom]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vdistribute" (sc/get-tooltip :v-distribute))
:class (when disabled-distribute "disabled")
:on-click #(st/emit! (dw/distribute-objects :vertical))}
i/shape-vdistribute]]]))
| null | https://raw.githubusercontent.com/penpot/penpot/7303d311d5f23d515fa3fcdc6cd13cf7f429d1fe/frontend/src/app/main/ui/workspace/sidebar/options/menus/align.cljs | clojure |
Copyright (c) KALEIDOS INC
don't need to watch objects, only read the value | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.main.ui.workspace.sidebar.options.menus.align
(:require
[app.main.data.workspace :as dw]
[app.main.data.workspace.shortcuts :as sc]
[app.main.refs :as refs]
[app.main.store :as st]
[app.main.ui.icons :as i]
[app.util.i18n :as i18n :refer [tr]]
[rumext.v2 :as mf]))
(mf/defc align-options
[]
(let [selected (mf/deref refs/selected-shapes)
objects (deref refs/workspace-page-objects)
disabled (not (dw/can-align? selected objects))
disabled-distribute (not(dw/can-distribute? selected))]
[:div.align-options
[:div.align-group
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hleft" (sc/get-tooltip :align-left))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hleft))}
i/shape-halign-left]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hcenter" (sc/get-tooltip :align-hcenter))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hcenter))}
i/shape-halign-center]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hright" (sc/get-tooltip :align-right))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :hright))}
i/shape-halign-right]
[:div.align-button.tooltip.tooltip-bottom
{:alt (tr "workspace.align.hdistribute" (sc/get-tooltip :h-distribute))
:class (when disabled-distribute "disabled")
:on-click #(st/emit! (dw/distribute-objects :horizontal))}
i/shape-hdistribute]]
[:div.align-group
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vtop" (sc/get-tooltip :align-top))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vtop))}
i/shape-valign-top]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vcenter" (sc/get-tooltip :align-vcenter))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vcenter))}
i/shape-valign-center]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vbottom" (sc/get-tooltip :align-bottom))
:class (when disabled "disabled")
:on-click #(st/emit! (dw/align-objects :vbottom))}
i/shape-valign-bottom]
[:div.align-button.tooltip.tooltip-bottom-left
{:alt (tr "workspace.align.vdistribute" (sc/get-tooltip :v-distribute))
:class (when disabled-distribute "disabled")
:on-click #(st/emit! (dw/distribute-objects :vertical))}
i/shape-vdistribute]]]))
|
f84b897d0cd50b9d2c27282eccd80cee328f7189b76ecb4b75d82aefa25d4e25 | eslick/cl-registry | qualitymetric.lisp | ;; -*- Mode:Lisp; tab-width:2; indent-tabs-mode:nil; -*-
Copyright ( c ) 2008 - 2010 , Massachusetts Institute of;Technology . All rights reserved .
Copyright ( c ) 2008 - 2010 , LAM Treatment Alliance . All rights reserved .
;;; Released under a BSD-style license: -license.php
;;; See LICENSE file
(in-package :registry)
(define-plugin qualitymetric ()
)
;;; QualityMetric client
;; Process flow is as follows:
;;
1 ) User browses to /qualitymetric / start and presses Submit ( TBD : parameterize survey name )
2 ) Client form connects to QualityMetric site to login and run survey
3 ) QualityMetric calls back via GET to /qualitymetric / done and launches survey
4 ) After survey complete QualityMetric calls back via POST to /qualitymetric / results
;;
;; Components:
1 ) qualitymetric class to encapsulate query and results
2 ) widget for form
3 ) selector and hander methods for URLs to render form , process results , and handle errors
(defvar *qualitymetric-show-results-in-iframe* t)
(defvar *qualitymetric-root-pathname* (make-pathname :directory '(:absolute "qualitymetric")))
(defun qualitymetric-pathname (pathname)
(merge-pathnames pathname *qualitymetric-root-pathname*))
;;; Request URIs
(defun qualitymetric-page-url (pathname &key
(http "http" http-supplied-p)
(port (hunchentoot:acceptor-port *weblocks-server*))
(address (hunchentoot:acceptor-address *weblocks-server*)))
(check-type port (integer 80.))
(let ((httpx (cond
((and (= port 443.) (not http-supplied-p))
"https")
(http)
(t "http"))))
(format nil "~A://~A~@[:~D~]~A"
httpx address port
(puri:uri (qualitymetric-pathname pathname)))))
(defun qualitymetric-request-uri ()
"URL of page for current request"
(qualitymetric-page-url (weblocks:request-uri-path)))
(defun qualitymetric-start-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "start"))))
(defun qualitymetric-start-page-url ()
E.g. " :8080 / qualitymetric / start/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "start"))))
(defun qualitymetric-results-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "results"))))
(defun qualitymetric-results-page-url ()
E.g. " :8080 / qualitymetric / results/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "results"))))
(defun qualitymetric-results-page-helper-pathname ()
(make-pathname :directory '(:absolute "qualitymetric-results-helper")))
(defun qualitymetric-results-page-helper-url ()
;; E.g. "/qualitymetric/results-helper/"
TODO : Fill in >
(qualitymetric-page-url (qualitymetric-results-page-helper-pathname)))
(defun qualitymetric-done-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "done"))))
(defun qualitymetric-done-page-url ()
E.g. " :8080 / qualitymetric / done/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "done"))))
(defun qualitymetric-test-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "test"))))
(defun qualitymetric-test-page-url ()
E.g. " :8080 / qualitymetric / test/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "test"))))
Connect to QualityMetric server
;;
;; Encapsulate everything we need to connect to QualityMetric site and get survey results
(defclass qualitymetric-connect ()
((uri :accessor qmconn-uri :initarg :uri)
(referrer :accessor qmconn-referrer :initarg :referrer)
(survey :accessor qmconn-survey :initarg :survey)
(group :accessor qmconn-group :initarg :group)
(login :accessor qmconn-login :initarg :login)))
;;; Set load time global defaults
;;
;; Todo: get initial values from site config params
(defvar *qualitymetric-connect-default-obj*)
(defun qualitymetric-connect-init-defaults (&key uri referrer survey group)
Parameter values supplied by QualityMetric Corp
(setq *qualitymetric-connect-default-obj*
(make-instance 'qualitymetric-connect :uri uri :referrer referrer :survey survey :group group)))
(qualitymetric-connect-init-defaults
;; Testing server:
;; :uri ""
;; Production server:
:uri ""
Credentials for SF-36 survey licensed to LTA
:referrer "sbillmeier" :survey 52486. :group 53167.)
(defun make-qualitymetric-connect (&key uri referrer survey group login)
(let ((qm-defaults (or *qualitymetric-connect-default-obj* (qualitymetric-connect-init-defaults))))
(check-type qm-defaults qualitymetric-connect)
(make-instance 'qualitymetric-connect
:uri (or uri (qmconn-uri qm-defaults))
:referrer (or referrer (qmconn-referrer qm-defaults))
:survey (or survey (qmconn-survey qm-defaults))
:group (or group (qmconn-group qm-defaults))
:login login)))
(defmethod qualitymetric-connect-url ((obj qualitymetric-connect))
(with-slots (uri referrer) obj
(format nil "~A?Partner=~A" uri referrer)))
;; POST to server
(defmethod qualitymetric-connect-post ((obj qualitymetric-connect) &key (cookie-jar (make-instance 'drakma:cookie-jar)))
(multiple-value-bind (result status headers uri stream must-close reason)
(with-slots (uri referrer survey login group) obj
(drakma:http-request (qualitymetric-connect-url obj)
:method :post :cookie-jar cookie-jar
:parameters
`(("LoginName" . ,login)
("GroupID" . ,group)
("Action" . "1") ;login and run survey
("SurveyID" . ,(princ-to-string survey))
,@(if *qualitymetric-show-results-in-iframe* '(("NW" . "1")))
("OUT" . "3") ;show Member Report at end of survey and return data
("ErrorURL" . ,(qualitymetric-results-page-url))
("NB" . ,(qualitymetric-done-page-url)) ;URL to land on after survey
)))
;; Returns
(values result status headers uri stream must-close reason)))
;; Test driver - connect and send POST data request
(defun qmconn-test ()
(let ((qmconn-test (make-qualitymetric-connect :login "kmcorbett"))
(cookie-jar (make-instance 'drakma:cookie-jar)))
(qualitymetric-connect-post qmconn-test :cookie-jar cookie-jar)))
;;; Web app
;;; Start page - form widget
(defwidget qualitymetric-start-form ()
((connect :accessor qmform-connect :initarg :state :initform nil)))
(defun make-qualitymetric-start-form ()
(let* ((current-user t)
(patient (and current-user (current-patient)))
(login
;; TBD: unique-ify across centers, registry installations
(and (typep patient 'patient) (id patient)))
(widget
(if login
(make-instance 'qualitymetric-start-form :state (make-qualitymetric-connect :login login))
(make-widget (f* (with-html (:P :CLASS "qualitymetric-message" "Unable to start survey - no current patient")))))))
;; Returns
widget))
(defun make-qualitymetric-start-page ()
(make-instance 'composite :widgets (list (make-qualitymetric-start-form))))
(defmethod qualitymetric-start-action ((widget qualitymetric-start-form))
(let* ((comp (widget-parent widget))
(sel (widget-parent comp)))
(declare (ignore sel))
(mark-dirty comp) ;pointless?
(qualitymetric-connect-url (qmform-connect widget))))
(defmethod render-widget-body ((widget qualitymetric-start-form) &rest args
&aux (counter 0.) (connect (qmform-connect widget)))
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(let ((debug (get-site-config-param :enable-debugging)))
(when debug
(if (> (incf counter) 1)
(htm (:P (str (format nil "Rendering x~D" counter)))))))
(with-slots (login group survey) connect
(cond
((null login)
(with-html (:P "Internal error: No current user")))
(t
(with-html
(:DIV :CLASS "qualitymetric-input"
(:FORM :CLASS "qualitymetric-form" :METHOD :POST
:ACTION (qualitymetric-start-action widget)
:TARGET (if *qualitymetric-show-results-in-iframe* "result" "_self")
(:P (str (format nil "Patient: ~A" login)))
(:INPUT :NAME "LoginName" :TYPE "hidden" :VALUE login)
(: : NAME " AuxiliaryID " : TYPE " hidden " : VALUE " " )
(:INPUT :NAME "GroupID" :TYPE "hidden" :VALUE group)
(:INPUT :NAME "Action" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "SurveyID" :TYPE "hidden" :VALUE (princ-to-string survey))
(:INPUT :NAME "OUT" :TYPE "hidden" :VALUE "3") ;show Member Report at end of survey and return data
(: : NAME " SessionID " : TYPE " hidden " : VALUE " " )
(: : NAME " ReportID " : TYPE " hidden " : VALUE " " )
(if *qualitymetric-show-results-in-iframe*
(htm (:INPUT :NAME "NW" :TYPE "hidden" :VALUE "1")))
(:INPUT :NAME "NB" :TYPE "hidden" :VALUE (qualitymetric-done-page-url))
(:INPUT :NAME "ErrorURL"
:TYPE "hidden"
:VALUE (qualitymetric-results-page-url))
(:INPUT :NAME "ConfirmExit" :TYPE "hidden" :VALUE "1")
(render-button "Start Survey"))))
(if *qualitymetric-show-results-in-iframe*
(with-html
(:DIV :CLASS "qualitymetric-survey" :ALIGN "center"
;; Frame to hold survey window
(:IFRAME :SRC "about:blank" :NAME "result" :WIDTH "98%" :ALIGN "center" :HEIGHT "900px"))))))))))
;;; Results page
(defwidget qualitymetric-results-page ()
()
)
(defun qualitymetric-handle-results (&optional patient)
(with-main-content-area-html
(:MIDDLE-INDENT
(:P "Survey results")
(let ((debug (get-site-config-param :enable-debugging))
(get-params (hunchentoot::get-parameters*))
(post-params (hunchentoot::maybe-read-post-parameters :force t)))
(when debug
(htm (:P (str (format nil "GET parameters (~D)" (length get-params))))
(:UL
(dolist (param get-params)
(htm (:LI (str (format nil "~A = ~A" (car param) (cdr param))))))))
(htm (:P (str (format nil "POST parameters (~D)" (length post-params))))
(:UL
(dolist (param post-params)
(htm (:LI (str (format nil "~A = ~A" (car param) (cdr param)))))))))
;; Get parameters
(let ((stat (cdr (assoc "Stat" get-params :test #'string-equal)))
(desc (cdr (assoc "Desc" get-params :test #'string-equal)))
(sessions (cdr (assoc "numSessions" post-params :test #'string-equal)))
(scores (cdr (assoc "NumScores1" post-params :test #'string-equal)))
(loginName (cdr (assoc "LoginName" post-params :test #'string-equal)))
(nscores nil))
(flet ((qm-error (fmt &rest args)
(let ((message
(concatenate 'string
"Error: In QualityMetric survey processing: "
(apply #'format nil fmt args))))
(htm (:P :CLASS "qualitymetric-message"
(str message)))
(return-from qualitymetric-handle-results (values nil message)))))
Workaround for bug # 327 - losing current - patient when QualityMetric results returned
(when (null patient)
(aif (get-patient loginName nil t)
(setq patient it)))
;; Check return status now
(cond
We should be checking status but QM is returning 21 on success not 0 ! !
((null desc)
(qm-error "no status returned"))
((not (and desc (string-equal desc "Data successfully returned")))
(qm-error "~A~@[ (~D)~]" stat desc))
Assertions about sessions : assume only one session
((not (string-equal sessions "1"))
(qm-error "Invalid numSessions = ~S" sessions))
;; Assertions about scores
((or (not (stringp scores)) (zerop (length scores)))
(qm-error "Missing NumScores"))
((not (typep (setq nscores (ignore-errors (read-from-string scores nil 0.)))
'(integer 1)))
(qm-error "Invalid NumScores: ~A" scores))
;; Now check that we have a valid patient
((null patient)
;; What, still??
(if loginName
(qm-error "invalid LoginName: ~A" loginName)
(qm-error "null LoginName")))
Success ! !
(t
;; Loop over questions and get answers
(loop for num from 1 to nscores
as name-param = (format nil "1Name~D" num)
as name-value = (cdr (assoc name-param post-params :test #'string-equal))
as score-param = (format nil "1Score~D" num)
as score-valuestr = (cdr (assoc score-param post-params :test #'string-equal))
with score-value
with question
do (cond
((null name-value)
(qm-error "Missing score name: ~A" name-param))
((not (and (stringp name-value) (> (length name-value) 1)))
(qm-error "Invalid name value pair: ~A = ~S" name-param name-value))
((null score-valuestr)
(qm-error "Missing score value: ~A" score-param))
((not (and (floatp (setq score-value (ignore-errors (read-from-string score-valuestr nil nil))))
(> score-value 0.0)))
(qm-error "Invalid score value pair ~A = ~S" score-param score-valuestr))
;; Find matching survey question
((null (setq question (first (get-instances-by-value 'question 'name name-value))))
(qm-error "Unrecognized question name: ~A" name-value))
;; Finally
(t
(htm (:P (str (format nil "~A = ~A" name-value score-value))))
(add-answer question patient score-value))))
;; Returns
(return-from qualitymetric-handle-results nscores))))))))
;; Returns
nil)
(defmethod render-widget-body ((widget qualitymetric-results-page) &rest args)
(declare (ignore args))
(multiple-value-bind (results message) (qualitymetric-handle-results (current-patient))
;; Redirect to results helper page
(redirect
(format nil "~A?results=~:[failure~;success~]~@[&message=~A~]"
(qualitymetric-results-page-helper-url)
results
(and message (hunchentoot:url-encode message)))
:defer ':post-render)))
(defun make-qualitymetric-results-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-results-page))))
;;; Done page
(defwidget qualitymetric-done-page ()
()
)
(defmethod render-widget-body ((widget qualitymetric-done-page) &rest args)
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(:P "Running survey... Click on a navigation button when done.")
#+IFWEWANTTOPARSERAWPOSTDATA
(let ((post-data (hunchentoot:raw-post-data :force-text t)))
(if (get-site-config-param :enable-debugging)
(htm (:P (str post-data)))))
#-IFWEWANTTOPARSERAWPOSTDATA
(let ((params (hunchentoot::maybe-read-post-parameters :force t)))
(dolist (param params)
(htm (:P (str (format nil "~A = ~A" (car param) (cdr param))))))))))
(defun make-qualitymetric-done-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-done-page))))
;;; Test page
(defwidget qualitymetric-test-page ()
()
)
(defmethod render-widget-body ((widget qualitymetric-test-page) &rest args)
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(with-html
(:P (str (qualitymetric-request-uri))))
(:P "Test page - under construction")
(:P "Test results page:"
(:FORM :METHOD :POST
:ACTION "/qualitymetric/results"
(:INPUT :NAME "RESULTS1" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "RESULTS2" :TYPE "hidden" :VALUE "2")
(:INPUT :NAME "RESULTS3" :TYPE "hidden" :VALUE "3")
(render-button "Submit")))
(:P "Test done page:"
(:FORM :METHOD :POST
:ACTION "/qualitymetric/done"
(:INPUT :NAME "DONE1" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "DONE2" :TYPE "hidden" :VALUE "2")
(:INPUT :NAME "DONE3" :TYPE "hidden" :VALUE "3")
(render-button "Submit"))))))
(defun make-qualitymetric-test-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-test-page))))
Selector to handle /qualitymetric URIs
(defwidget qualitymetric-selector (static-selector)
((state :accessor qmsel-state :initform nil)))
(defun make-qualitymetric-selector ()
(let* ((qm-start (make-qualitymetric-start-page))
(qm-results (make-qualitymetric-results-page))
(qm-done (make-qualitymetric-done-page))
(qm-test (make-qualitymetric-test-page))
(qm-sel (make-instance 'qualitymetric-selector
:name "qualitymetric"
:panes `(("start" ,qm-start)
("results" ,qm-results)
("done" ,qm-done)
("test" ,qm-test)))))
(make-instance 'composite :widgets (list qm-sel))))
;;; Handler for helper page
(defun qualitymetric-results-page-helper-handler (request)
(if (string= (hunchentoot:script-name* request) (namestring (qualitymetric-results-page-helper-pathname)))
#'(lambda ()
(let* ((get-params (hunchentoot::get-parameters*))
(results (cdr (assoc "results" get-params :test #'string-equal)))
(message (cdr (assoc "message" get-params :test #'string-equal))))
(with-html-to-string
(:H2 "QualityMetric survey results")
(:P :CLASS "qualitymetric-message"
(:BR)
(str
(format nil
(cond
((string-equal results "failure")
"Error: Cannot get survey results~@[: ~A~]")
((string-equal results "success")
"Success: Survey results stored~@[: ~A~]")
(t
"Internal error: Unable to check survey results"))
message))))))))
(pushnew 'qualitymetric-results-page-helper-handler hunchentoot::*dispatch-table*)
| null | https://raw.githubusercontent.com/eslick/cl-registry/d4015c400dc6abf0eeaf908ed9056aac956eee82/src/plugins/qualitymetric/qualitymetric.lisp | lisp | -*- Mode:Lisp; tab-width:2; indent-tabs-mode:nil; -*-
Technology . All rights reserved .
Released under a BSD-style license: -license.php
See LICENSE file
QualityMetric client
Process flow is as follows:
Components:
Request URIs
E.g. "/qualitymetric/results-helper/"
Encapsulate everything we need to connect to QualityMetric site and get survey results
Set load time global defaults
Todo: get initial values from site config params
Testing server:
:uri ""
Production server:
POST to server
login and run survey
show Member Report at end of survey and return data
URL to land on after survey
Returns
Test driver - connect and send POST data request
Web app
Start page - form widget
TBD: unique-ify across centers, registry installations
Returns
pointless?
show Member Report at end of survey and return data
Frame to hold survey window
Results page
Get parameters
Check return status now
Assertions about scores
Now check that we have a valid patient
What, still??
Loop over questions and get answers
Find matching survey question
Finally
Returns
Returns
Redirect to results helper page
Done page
Test page
Handler for helper page |
Copyright ( c ) 2008 - 2010 , LAM Treatment Alliance . All rights reserved .
(in-package :registry)
(define-plugin qualitymetric ()
)
1 ) User browses to /qualitymetric / start and presses Submit ( TBD : parameterize survey name )
2 ) Client form connects to QualityMetric site to login and run survey
3 ) QualityMetric calls back via GET to /qualitymetric / done and launches survey
4 ) After survey complete QualityMetric calls back via POST to /qualitymetric / results
1 ) qualitymetric class to encapsulate query and results
2 ) widget for form
3 ) selector and hander methods for URLs to render form , process results , and handle errors
(defvar *qualitymetric-show-results-in-iframe* t)
(defvar *qualitymetric-root-pathname* (make-pathname :directory '(:absolute "qualitymetric")))
(defun qualitymetric-pathname (pathname)
(merge-pathnames pathname *qualitymetric-root-pathname*))
(defun qualitymetric-page-url (pathname &key
(http "http" http-supplied-p)
(port (hunchentoot:acceptor-port *weblocks-server*))
(address (hunchentoot:acceptor-address *weblocks-server*)))
(check-type port (integer 80.))
(let ((httpx (cond
((and (= port 443.) (not http-supplied-p))
"https")
(http)
(t "http"))))
(format nil "~A://~A~@[:~D~]~A"
httpx address port
(puri:uri (qualitymetric-pathname pathname)))))
(defun qualitymetric-request-uri ()
"URL of page for current request"
(qualitymetric-page-url (weblocks:request-uri-path)))
(defun qualitymetric-start-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "start"))))
(defun qualitymetric-start-page-url ()
E.g. " :8080 / qualitymetric / start/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "start"))))
(defun qualitymetric-results-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "results"))))
(defun qualitymetric-results-page-url ()
E.g. " :8080 / qualitymetric / results/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "results"))))
(defun qualitymetric-results-page-helper-pathname ()
(make-pathname :directory '(:absolute "qualitymetric-results-helper")))
(defun qualitymetric-results-page-helper-url ()
TODO : Fill in >
(qualitymetric-page-url (qualitymetric-results-page-helper-pathname)))
(defun qualitymetric-done-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "done"))))
(defun qualitymetric-done-page-url ()
E.g. " :8080 / qualitymetric / done/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "done"))))
(defun qualitymetric-test-page-pathname ()
(qualitymetric-pathname (make-pathname :directory '(:relative "test"))))
(defun qualitymetric-test-page-url ()
E.g. " :8080 / qualitymetric / test/ "
(qualitymetric-page-url (make-pathname :directory '(:relative "test"))))
Connect to QualityMetric server
(defclass qualitymetric-connect ()
((uri :accessor qmconn-uri :initarg :uri)
(referrer :accessor qmconn-referrer :initarg :referrer)
(survey :accessor qmconn-survey :initarg :survey)
(group :accessor qmconn-group :initarg :group)
(login :accessor qmconn-login :initarg :login)))
(defvar *qualitymetric-connect-default-obj*)
(defun qualitymetric-connect-init-defaults (&key uri referrer survey group)
Parameter values supplied by QualityMetric Corp
(setq *qualitymetric-connect-default-obj*
(make-instance 'qualitymetric-connect :uri uri :referrer referrer :survey survey :group group)))
(qualitymetric-connect-init-defaults
:uri ""
Credentials for SF-36 survey licensed to LTA
:referrer "sbillmeier" :survey 52486. :group 53167.)
(defun make-qualitymetric-connect (&key uri referrer survey group login)
(let ((qm-defaults (or *qualitymetric-connect-default-obj* (qualitymetric-connect-init-defaults))))
(check-type qm-defaults qualitymetric-connect)
(make-instance 'qualitymetric-connect
:uri (or uri (qmconn-uri qm-defaults))
:referrer (or referrer (qmconn-referrer qm-defaults))
:survey (or survey (qmconn-survey qm-defaults))
:group (or group (qmconn-group qm-defaults))
:login login)))
(defmethod qualitymetric-connect-url ((obj qualitymetric-connect))
(with-slots (uri referrer) obj
(format nil "~A?Partner=~A" uri referrer)))
(defmethod qualitymetric-connect-post ((obj qualitymetric-connect) &key (cookie-jar (make-instance 'drakma:cookie-jar)))
(multiple-value-bind (result status headers uri stream must-close reason)
(with-slots (uri referrer survey login group) obj
(drakma:http-request (qualitymetric-connect-url obj)
:method :post :cookie-jar cookie-jar
:parameters
`(("LoginName" . ,login)
("GroupID" . ,group)
("SurveyID" . ,(princ-to-string survey))
,@(if *qualitymetric-show-results-in-iframe* '(("NW" . "1")))
("ErrorURL" . ,(qualitymetric-results-page-url))
)))
(values result status headers uri stream must-close reason)))
(defun qmconn-test ()
(let ((qmconn-test (make-qualitymetric-connect :login "kmcorbett"))
(cookie-jar (make-instance 'drakma:cookie-jar)))
(qualitymetric-connect-post qmconn-test :cookie-jar cookie-jar)))
(defwidget qualitymetric-start-form ()
((connect :accessor qmform-connect :initarg :state :initform nil)))
(defun make-qualitymetric-start-form ()
(let* ((current-user t)
(patient (and current-user (current-patient)))
(login
(and (typep patient 'patient) (id patient)))
(widget
(if login
(make-instance 'qualitymetric-start-form :state (make-qualitymetric-connect :login login))
(make-widget (f* (with-html (:P :CLASS "qualitymetric-message" "Unable to start survey - no current patient")))))))
widget))
(defun make-qualitymetric-start-page ()
(make-instance 'composite :widgets (list (make-qualitymetric-start-form))))
(defmethod qualitymetric-start-action ((widget qualitymetric-start-form))
(let* ((comp (widget-parent widget))
(sel (widget-parent comp)))
(declare (ignore sel))
(qualitymetric-connect-url (qmform-connect widget))))
(defmethod render-widget-body ((widget qualitymetric-start-form) &rest args
&aux (counter 0.) (connect (qmform-connect widget)))
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(let ((debug (get-site-config-param :enable-debugging)))
(when debug
(if (> (incf counter) 1)
(htm (:P (str (format nil "Rendering x~D" counter)))))))
(with-slots (login group survey) connect
(cond
((null login)
(with-html (:P "Internal error: No current user")))
(t
(with-html
(:DIV :CLASS "qualitymetric-input"
(:FORM :CLASS "qualitymetric-form" :METHOD :POST
:ACTION (qualitymetric-start-action widget)
:TARGET (if *qualitymetric-show-results-in-iframe* "result" "_self")
(:P (str (format nil "Patient: ~A" login)))
(:INPUT :NAME "LoginName" :TYPE "hidden" :VALUE login)
(: : NAME " AuxiliaryID " : TYPE " hidden " : VALUE " " )
(:INPUT :NAME "GroupID" :TYPE "hidden" :VALUE group)
(:INPUT :NAME "Action" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "SurveyID" :TYPE "hidden" :VALUE (princ-to-string survey))
(: : NAME " SessionID " : TYPE " hidden " : VALUE " " )
(: : NAME " ReportID " : TYPE " hidden " : VALUE " " )
(if *qualitymetric-show-results-in-iframe*
(htm (:INPUT :NAME "NW" :TYPE "hidden" :VALUE "1")))
(:INPUT :NAME "NB" :TYPE "hidden" :VALUE (qualitymetric-done-page-url))
(:INPUT :NAME "ErrorURL"
:TYPE "hidden"
:VALUE (qualitymetric-results-page-url))
(:INPUT :NAME "ConfirmExit" :TYPE "hidden" :VALUE "1")
(render-button "Start Survey"))))
(if *qualitymetric-show-results-in-iframe*
(with-html
(:DIV :CLASS "qualitymetric-survey" :ALIGN "center"
(:IFRAME :SRC "about:blank" :NAME "result" :WIDTH "98%" :ALIGN "center" :HEIGHT "900px"))))))))))
(defwidget qualitymetric-results-page ()
()
)
(defun qualitymetric-handle-results (&optional patient)
(with-main-content-area-html
(:MIDDLE-INDENT
(:P "Survey results")
(let ((debug (get-site-config-param :enable-debugging))
(get-params (hunchentoot::get-parameters*))
(post-params (hunchentoot::maybe-read-post-parameters :force t)))
(when debug
(htm (:P (str (format nil "GET parameters (~D)" (length get-params))))
(:UL
(dolist (param get-params)
(htm (:LI (str (format nil "~A = ~A" (car param) (cdr param))))))))
(htm (:P (str (format nil "POST parameters (~D)" (length post-params))))
(:UL
(dolist (param post-params)
(htm (:LI (str (format nil "~A = ~A" (car param) (cdr param)))))))))
(let ((stat (cdr (assoc "Stat" get-params :test #'string-equal)))
(desc (cdr (assoc "Desc" get-params :test #'string-equal)))
(sessions (cdr (assoc "numSessions" post-params :test #'string-equal)))
(scores (cdr (assoc "NumScores1" post-params :test #'string-equal)))
(loginName (cdr (assoc "LoginName" post-params :test #'string-equal)))
(nscores nil))
(flet ((qm-error (fmt &rest args)
(let ((message
(concatenate 'string
"Error: In QualityMetric survey processing: "
(apply #'format nil fmt args))))
(htm (:P :CLASS "qualitymetric-message"
(str message)))
(return-from qualitymetric-handle-results (values nil message)))))
Workaround for bug # 327 - losing current - patient when QualityMetric results returned
(when (null patient)
(aif (get-patient loginName nil t)
(setq patient it)))
(cond
We should be checking status but QM is returning 21 on success not 0 ! !
((null desc)
(qm-error "no status returned"))
((not (and desc (string-equal desc "Data successfully returned")))
(qm-error "~A~@[ (~D)~]" stat desc))
Assertions about sessions : assume only one session
((not (string-equal sessions "1"))
(qm-error "Invalid numSessions = ~S" sessions))
((or (not (stringp scores)) (zerop (length scores)))
(qm-error "Missing NumScores"))
((not (typep (setq nscores (ignore-errors (read-from-string scores nil 0.)))
'(integer 1)))
(qm-error "Invalid NumScores: ~A" scores))
((null patient)
(if loginName
(qm-error "invalid LoginName: ~A" loginName)
(qm-error "null LoginName")))
Success ! !
(t
(loop for num from 1 to nscores
as name-param = (format nil "1Name~D" num)
as name-value = (cdr (assoc name-param post-params :test #'string-equal))
as score-param = (format nil "1Score~D" num)
as score-valuestr = (cdr (assoc score-param post-params :test #'string-equal))
with score-value
with question
do (cond
((null name-value)
(qm-error "Missing score name: ~A" name-param))
((not (and (stringp name-value) (> (length name-value) 1)))
(qm-error "Invalid name value pair: ~A = ~S" name-param name-value))
((null score-valuestr)
(qm-error "Missing score value: ~A" score-param))
((not (and (floatp (setq score-value (ignore-errors (read-from-string score-valuestr nil nil))))
(> score-value 0.0)))
(qm-error "Invalid score value pair ~A = ~S" score-param score-valuestr))
((null (setq question (first (get-instances-by-value 'question 'name name-value))))
(qm-error "Unrecognized question name: ~A" name-value))
(t
(htm (:P (str (format nil "~A = ~A" name-value score-value))))
(add-answer question patient score-value))))
(return-from qualitymetric-handle-results nscores))))))))
nil)
(defmethod render-widget-body ((widget qualitymetric-results-page) &rest args)
(declare (ignore args))
(multiple-value-bind (results message) (qualitymetric-handle-results (current-patient))
(redirect
(format nil "~A?results=~:[failure~;success~]~@[&message=~A~]"
(qualitymetric-results-page-helper-url)
results
(and message (hunchentoot:url-encode message)))
:defer ':post-render)))
(defun make-qualitymetric-results-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-results-page))))
(defwidget qualitymetric-done-page ()
()
)
(defmethod render-widget-body ((widget qualitymetric-done-page) &rest args)
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(:P "Running survey... Click on a navigation button when done.")
#+IFWEWANTTOPARSERAWPOSTDATA
(let ((post-data (hunchentoot:raw-post-data :force-text t)))
(if (get-site-config-param :enable-debugging)
(htm (:P (str post-data)))))
#-IFWEWANTTOPARSERAWPOSTDATA
(let ((params (hunchentoot::maybe-read-post-parameters :force t)))
(dolist (param params)
(htm (:P (str (format nil "~A = ~A" (car param) (cdr param))))))))))
(defun make-qualitymetric-done-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-done-page))))
(defwidget qualitymetric-test-page ()
()
)
(defmethod render-widget-body ((widget qualitymetric-test-page) &rest args)
(declare (ignore args))
(with-main-content-area-html
(:MIDDLE-INDENT
(with-html
(:P (str (qualitymetric-request-uri))))
(:P "Test page - under construction")
(:P "Test results page:"
(:FORM :METHOD :POST
:ACTION "/qualitymetric/results"
(:INPUT :NAME "RESULTS1" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "RESULTS2" :TYPE "hidden" :VALUE "2")
(:INPUT :NAME "RESULTS3" :TYPE "hidden" :VALUE "3")
(render-button "Submit")))
(:P "Test done page:"
(:FORM :METHOD :POST
:ACTION "/qualitymetric/done"
(:INPUT :NAME "DONE1" :TYPE "hidden" :VALUE "1")
(:INPUT :NAME "DONE2" :TYPE "hidden" :VALUE "2")
(:INPUT :NAME "DONE3" :TYPE "hidden" :VALUE "3")
(render-button "Submit"))))))
(defun make-qualitymetric-test-page ()
(make-instance 'composite :widgets (list (make-instance 'qualitymetric-test-page))))
Selector to handle /qualitymetric URIs
(defwidget qualitymetric-selector (static-selector)
((state :accessor qmsel-state :initform nil)))
(defun make-qualitymetric-selector ()
(let* ((qm-start (make-qualitymetric-start-page))
(qm-results (make-qualitymetric-results-page))
(qm-done (make-qualitymetric-done-page))
(qm-test (make-qualitymetric-test-page))
(qm-sel (make-instance 'qualitymetric-selector
:name "qualitymetric"
:panes `(("start" ,qm-start)
("results" ,qm-results)
("done" ,qm-done)
("test" ,qm-test)))))
(make-instance 'composite :widgets (list qm-sel))))
(defun qualitymetric-results-page-helper-handler (request)
(if (string= (hunchentoot:script-name* request) (namestring (qualitymetric-results-page-helper-pathname)))
#'(lambda ()
(let* ((get-params (hunchentoot::get-parameters*))
(results (cdr (assoc "results" get-params :test #'string-equal)))
(message (cdr (assoc "message" get-params :test #'string-equal))))
(with-html-to-string
(:H2 "QualityMetric survey results")
(:P :CLASS "qualitymetric-message"
(:BR)
(str
(format nil
(cond
((string-equal results "failure")
"Error: Cannot get survey results~@[: ~A~]")
((string-equal results "success")
"Success: Survey results stored~@[: ~A~]")
(t
"Internal error: Unable to check survey results"))
message))))))))
(pushnew 'qualitymetric-results-page-helper-handler hunchentoot::*dispatch-table*)
|
b1b15f57e2622a7591a642c281eec2956cc0dec902c2a717466d4533484418eb | ocaml-multicore/tezos | client_proto_context_commands.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
Copyright ( c ) 2019 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
open Alpha_context
open Tezos_micheline
open Client_proto_context
open Client_proto_contracts
open Client_proto_programs
open Client_keys
open Client_proto_args
let encrypted_switch =
Clic.switch ~long:"encrypted" ~doc:"encrypt the key on-disk" ()
let dry_run_switch =
Clic.switch
~long:"dry-run"
~short:'D'
~doc:"don't inject the operation, just display it"
()
let verbose_signing_switch =
Clic.switch
~long:"verbose-signing"
~doc:"display extra information before signing the operation"
()
let simulate_switch =
Clic.switch
~long:"simulation"
~doc:
"Simulate the execution of the command, without needing any signatures."
()
let force_switch =
Clic.switch
~long:"force"
~doc:
"Inject the operation even if the simulation results in a failure. This \
switch requires --gas-limit, --storage-limit, and --fee."
()
let report_michelson_errors ?(no_print_source = false) ~msg
(cctxt : #Client_context.printer) = function
| Error errs ->
cctxt#warning
"%a"
(Michelson_v1_error_reporter.report_errors
~details:(not no_print_source)
~show_source:(not no_print_source)
?parsed:None)
errs
>>= fun () ->
cctxt#error "%s" msg >>= fun () -> Lwt.return_none
| Ok data -> Lwt.return_some data
let json_file_or_text_parameter =
Clic.parameter (fun _ p ->
match String.split ~limit:1 ':' p with
| ["text"; text] -> return (Ezjsonm.from_string text)
| ["file"; path] -> Lwt_utils_unix.Json.read_file path
| _ -> (
if Sys.file_exists p then Lwt_utils_unix.Json.read_file p
else
try return (Ezjsonm.from_string p)
with Ezjsonm.Parse_error _ ->
failwith "Neither an existing file nor valid JSON: '%s'" p))
let non_negative_param =
Clic.parameter (fun _ s ->
match int_of_string_opt s with
| Some i when i >= 0 -> return i
| _ -> failwith "Parameter should be a non-negative integer literal")
let block_hash_param =
Clic.parameter (fun _ s ->
try return (Block_hash.of_b58check_exn s)
with _ -> failwith "Parameter '%s' is an invalid block hash" s)
let group =
{
Clic.name = "context";
title = "Block contextual commands (see option -block)";
}
let alphanet = {Clic.name = "alphanet"; title = "Alphanet only commands"}
let binary_description =
{Clic.name = "description"; title = "Binary Description"}
let transfer_command amount source destination (cctxt : #Client_context.printer)
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ) =
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
(* When --force is used we want to inject the transfer even if it fails.
In that case we cannot rely on simulation to compute limits and fees
so we require the corresponding options to be set. *)
let check_force_dependency name = function
| None ->
cctxt#error
"When the --force switch is used, the %s option is required."
name
| _ -> Lwt.return_unit
in
(if force then
check_force_dependency "--gas-limit" gas_limit >>= fun () ->
check_force_dependency "--storage-limit" storage_limit >>= fun () ->
check_force_dependency "--fee" fee
else Lwt.return_unit)
>>= fun () ->
(match Contract.is_implicit source with
| None ->
let contract = source in
Managed_contract.get_contract_manager cctxt source >>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.transfer
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~force
~fee_parameter
?fee
~contract
~source
~src_pk
~src_sk
~destination
?entrypoint
?arg
~amount
?gas_limit
?storage_limit
?counter
()
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
transfer
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~simulation
~force
~verbose_signing
~fee_parameter
~source
?fee
~src_pk
~src_sk
~destination
?entrypoint
?arg
~amount
?gas_limit
?storage_limit
?counter
())
>>= report_michelson_errors
~no_print_source
~msg:"transfer simulation failed"
cctxt
>>= function
| None -> return_unit
| Some (_res, _contracts) -> return_unit
let tez_of_string_exn index field s =
match Tez.of_string s with
| Some t -> return t
| None ->
failwith
"Invalid \xEA\x9C\xA9 notation at entry %i, field \"%s\": %s"
index
field
s
let tez_of_opt_string_exn index field s =
match s with
| None -> return None
| Some s -> tez_of_string_exn index field s >>=? fun s -> return (Some s)
let prepare_batch_operation cctxt ?arg ?fee ?gas_limit ?storage_limit
?entrypoint source index batch =
Client_proto_contracts.ContractAlias.find_destination cctxt batch.destination
>>=? fun (_, destination) ->
tez_of_string_exn index "amount" batch.amount >>=? fun amount ->
tez_of_opt_string_exn index "fee" batch.fee >>=? fun batch_fee ->
let fee = Option.either batch_fee fee in
let arg = Option.either batch.arg arg in
let gas_limit = Option.either batch.gas_limit gas_limit in
let storage_limit = Option.either batch.storage_limit storage_limit in
let entrypoint = Option.either batch.entrypoint entrypoint in
parse_arg_transfer arg >>=? fun parameters ->
(match Contract.is_implicit source with
| None ->
Managed_contract.build_transaction_operation
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract:source
~destination
?entrypoint
?arg
~amount
?fee
?gas_limit
?storage_limit
()
| Some _ ->
return
(build_transaction_operation
~amount
~parameters
?entrypoint
?fee
?gas_limit
?storage_limit
destination))
>>=? fun operation ->
return (Annotated_manager_operation.Annotated_manager_operation operation)
let commands network () =
let open Clic in
[
command
~group
~desc:"Access the timestamp of the block."
(args1
(switch ~doc:"output time in seconds" ~short:'s' ~long:"seconds" ()))
(fixed ["get"; "timestamp"])
(fun seconds (cctxt : Protocol_client_context.full) ->
Shell_services.Blocks.Header.shell_header
cctxt
~chain:cctxt#chain
~block:cctxt#block
()
>>=? fun {timestamp = v; _} ->
(if seconds then cctxt#message "%Ld" (Time.Protocol.to_seconds v)
else cctxt#message "%s" (Time.Protocol.to_notation v))
>>= fun () -> return_unit);
command
~group
~desc:"Lists all non empty contracts of the block."
no_options
(fixed ["list"; "contracts"])
(fun () (cctxt : Protocol_client_context.full) ->
list_contract_labels cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun contracts ->
List.iter_s
(fun (alias, hash, kind) -> cctxt#message "%s%s%s" hash kind alias)
contracts
>>= fun () -> return_unit);
command
~group
~desc:"Lists cached contracts and their age in LRU ordering."
no_options
(prefixes ["list"; "cached"; "contracts"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
cached_contracts cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun keys ->
List.iter_s
(fun (key, size) ->
cctxt#message "%a %d" Alpha_context.Contract.pp key size)
keys
>>= fun () -> return_unit);
command
~group
~desc:"Get the key rank of a cache key."
no_options
(prefixes ["get"; "cached"; "contract"; "rank"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
contract_rank cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>=? fun rank ->
match rank with
| None ->
cctxt#error
"Invalid contract: %a"
Alpha_context.Contract.pp
contract
>>= fun () -> return_unit
| Some rank -> cctxt#message "%d" rank >>= fun () -> return_unit);
command
~group
~desc:"Get cache contract size."
no_options
(prefixes ["get"; "cache"; "contract"; "size"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
contract_cache_size cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun t ->
cctxt#message "%d" t >>= fun () -> return_unit);
command
~group
~desc:"Get cache contract size limit."
no_options
(prefixes ["get"; "cache"; "contract"; "size"; "limit"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
contract_cache_size_limit cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun t ->
cctxt#message "%d" t >>= fun () -> return_unit);
command
~group
~desc:"Get the balance of a contract."
no_options
(prefixes ["get"; "balance"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
get_balance cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>=? fun amount ->
cctxt#answer "%a %s" Tez.pp amount Client_proto_args.tez_sym
>>= fun () -> return_unit);
command
~group
~desc:"Get the storage of a contract."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "contract"; "storage"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun unparsing_mode (_, contract) (cctxt : Protocol_client_context.full) ->
get_storage
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
contract
>>=? function
| None -> cctxt#error "This is not a smart contract."
| Some storage ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped storage
>>= fun () -> return_unit);
command
~group
~desc:
"Get the value associated to a key in the big map storage of a \
contract (deprecated)."
no_options
(prefixes ["get"; "big"; "map"; "value"; "for"]
@@ Clic.param ~name:"key" ~desc:"the key to look for" data_parameter
@@ prefixes ["of"; "type"]
@@ Clic.param ~name:"type" ~desc:"type of the key" data_parameter
@@ prefix "in"
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () key key_type (_, contract) (cctxt : Protocol_client_context.full) ->
get_contract_big_map_value
cctxt
~chain:cctxt#chain
~block:cctxt#block
contract
(key.expanded, key_type.expanded)
>>=? function
| None -> cctxt#error "No value associated to this key."
| Some value ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped value
>>= fun () -> return_unit);
command
~group
~desc:"Get a value in a big map."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "element"]
@@ Clic.param
~name:"key"
~desc:"the key to look for"
(Clic.parameter (fun _ s ->
return (Script_expr_hash.of_b58check_exn s)))
@@ prefixes ["of"; "big"; "map"]
@@ Clic.param
~name:"big_map"
~desc:"identifier of the big_map"
int_parameter
@@ stop)
(fun unparsing_mode key id (cctxt : Protocol_client_context.full) ->
get_big_map_value
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
(Big_map.Id.parse_z (Z.of_int id))
key
>>=? fun value ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped value
>>= fun () -> return_unit);
command
~group
~desc:"Get the code of a contract."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "contract"; "code"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun unparsing_mode (_, contract) (cctxt : Protocol_client_context.full) ->
get_script
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
contract
>>=? function
| None -> cctxt#error "This is not a smart contract."
| Some {code; storage = _} -> (
match Script_repr.force_decode code with
| Error errs ->
cctxt#error "%a" Environment.Error_monad.pp_trace errs
| Ok code ->
let {Michelson_v1_parser.source; _} =
Michelson_v1_printer.unparse_toplevel code
in
cctxt#answer "%s" source >>= return));
command
~group
~desc:"Get the `BLAKE2B` script hash of a contract."
no_options
(prefixes ["get"; "contract"; "script"; "hash"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
get_script_hash cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>= function
| Error trace -> cctxt#error "%a" pp_print_trace trace
| Ok None -> cctxt#error "This is not a smart contract."
| Ok (Some hash) -> cctxt#answer "%a" Script_expr_hash.pp hash >|= ok);
command
~group
~desc:"Get the type of an entrypoint of a contract."
no_options
(prefixes ["get"; "contract"; "entrypoint"; "type"; "of"]
@@ Clic.string ~name:"entrypoint" ~desc:"the entrypoint to describe"
@@ prefixes ["for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () entrypoint (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.contract_entrypoint_type
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
~entrypoint
>>= Michelson_v1_entrypoints.print_entrypoint_type
cctxt
~emacs:false
~contract
~entrypoint);
command
~group
~desc:"Get the entrypoint list of a contract."
no_options
(prefixes ["get"; "contract"; "entrypoints"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.list_contract_entrypoints
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
>>= Michelson_v1_entrypoints.print_entrypoints_list
cctxt
~emacs:false
~contract);
command
~group
~desc:"Get the list of unreachable paths in a contract's parameter type."
no_options
(prefixes ["get"; "contract"; "unreachable"; "paths"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.list_contract_unreachables
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
>>= Michelson_v1_entrypoints.print_unreachables
cctxt
~emacs:false
~contract);
command
~group
~desc:"Get the delegate of a contract."
no_options
(prefixes ["get"; "delegate"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Client_proto_contracts.get_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
contract
>>=? function
| None -> cctxt#message "none" >>= fun () -> return_unit
| Some delegate ->
Public_key_hash.rev_find cctxt delegate >>=? fun mn ->
Public_key_hash.to_source delegate >>=? fun m ->
cctxt#message
"%s (%s)"
m
(match mn with None -> "unknown" | Some n -> "known as " ^ n)
>>= fun () -> return_unit);
command
~group
~desc:"Set the delegate of a contract."
(args10
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["set"; "delegate"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ prefix "to"
@@ Public_key_hash.source_param
~name:"dlgt"
~desc:"new delegate of the contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, contract)
delegate
(cctxt : Protocol_client_context.full) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
match Contract.is_implicit contract with
| None ->
Managed_contract.get_contract_manager cctxt contract
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~fee_parameter
?fee
~source
~src_pk
~src_sk
contract
(Some delegate)
>>= fun errors ->
report_michelson_errors
~no_print_source:true
~msg:"Setting delegate through entrypoints failed."
cctxt
errors
>>= fun _ -> return_unit
| Some mgr ->
Client_keys.get_key cctxt mgr >>=? fun (_, src_pk, manager_sk) ->
set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~fee_parameter
?fee
mgr
(Some delegate)
~src_pk
~manager_sk
>>=? fun _ -> return_unit);
command
~group
~desc:"Withdraw the delegate from a contract."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["withdraw"; "delegate"; "from"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, contract)
(cctxt : Protocol_client_context.full) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
match Contract.is_implicit contract with
| None ->
Managed_contract.get_contract_manager cctxt contract
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~fee_parameter
?fee
~source
~src_pk
~src_sk
contract
None
>>= fun errors ->
report_michelson_errors
~no_print_source:true
~msg:"Withdrawing delegate through entrypoints failed."
cctxt
errors
>>= fun _ -> return_unit
| Some mgr ->
Client_keys.get_key cctxt mgr >>=? fun (_, src_pk, manager_sk) ->
set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~fee_parameter
mgr
None
?fee
~src_pk
~manager_sk
>>= fun _ -> return_unit);
command
~group
~desc:"Launch a smart contract on the blockchain."
(args15
fee_arg
dry_run_switch
verbose_signing_switch
gas_limit_arg
storage_limit_arg
delegate_arg
(Client_keys.force_switch ())
init_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["originate"; "contract"]
@@ RawContractAlias.fresh_alias_param
~name:"new"
~desc:"name of the new contract"
@@ prefix "transferring"
@@ tez_param ~name:"qty" ~desc:"amount taken from source"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "running"
@@ Program.source_param
~name:"prg"
~desc:
"script of the account\n\
Combine with -init if the storage type is not unit."
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
gas_limit,
storage_limit,
delegate,
force,
initial_storage,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
alias_name
balance
(_, source)
program
(cctxt : Protocol_client_context.full) ->
RawContractAlias.of_fresh cctxt force alias_name >>=? fun alias_name ->
Lwt.return (Micheline_parser.no_parsing_error program)
>>=? fun {expanded = code; _} ->
match Contract.is_implicit source with
| None ->
failwith
"only implicit accounts can be the source of an origination"
| Some source -> (
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
originate_contract
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
?fee
?gas_limit
?storage_limit
~delegate
~initial_storage
~balance
~source
~src_pk
~src_sk
~code
~fee_parameter
()
>>= fun errors ->
report_michelson_errors
~no_print_source
~msg:"origination simulation failed"
cctxt
errors
>>= function
| None -> return_unit
| Some (_res, contract) ->
if dry_run then return_unit
else
save_contract ~force cctxt alias_name contract >>=? fun () ->
return_unit));
command
~group
~desc:
"Execute multiple transfers from a single source account.\n\
If one of the transfers fails, none of them get executed."
(args16
default_fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
default_gas_limit_arg
default_storage_limit_arg
counter_arg
default_arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
default_entrypoint_arg)
(prefixes ["multiple"; "transfers"; "from"]
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "using"
@@ param
~name:"transfers.json"
~desc:
"List of operations originating from the source contract in JSON \
format (from a file or directly inlined). The input JSON must be \
an array of objects of the form: '[ {\"destination\": dst, \
\"amount\": qty (, <field>: <val> ...) } (, ...) ]', where an \
optional <field> can either be \"fee\", \"gas-limit\", \
\"storage-limit\", \"arg\", or \"entrypoint\"."
json_file_or_text_parameter
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
(_, source)
operations_json
cctxt ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
let prepare i =
prepare_batch_operation
cctxt
?arg
?fee
?gas_limit
?storage_limit
?entrypoint
source
i
in
match
Data_encoding.Json.destruct
(Data_encoding.list
Client_proto_context.batch_transfer_operation_encoding)
operations_json
with
| [] -> failwith "Empty operation list"
| operations ->
(match Contract.is_implicit source with
| None ->
Managed_contract.get_contract_manager cctxt source
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
return (source, src_pk, src_sk)
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
return (source, src_pk, src_sk))
>>=? fun (source, src_pk, src_sk) ->
List.mapi_ep prepare operations >>=? fun contents ->
let (Manager_list contents) =
Annotated_manager_operation.manager_of_list contents
in
Injection.inject_manager_operation
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~source
~fee:(Limit.of_option fee)
~gas_limit:(Limit.of_option gas_limit)
~storage_limit:(Limit.of_option storage_limit)
?counter
~src_pk
~src_sk
~fee_parameter
contents
>>= report_michelson_errors
~no_print_source
~msg:"multiple transfers simulation failed"
cctxt
>>= fun _ -> return_unit
| exception (Data_encoding.Json.Cannot_destruct (path, exn2) as exn)
-> (
match (path, operations_json) with
| ([`Index n], `A lj) -> (
match List.nth_opt lj n with
| Some j ->
failwith
"Invalid transfer at index %i: %a %a"
n
(fun ppf -> Data_encoding.Json.print_error ppf)
exn2
Data_encoding.Json.pp
j
| _ ->
failwith
"Invalid transfer at index %i: %a"
n
(fun ppf -> Data_encoding.Json.print_error ppf)
exn2)
| _ ->
failwith
"Invalid transfer file: %a %a"
(fun ppf -> Data_encoding.Json.print_error ppf)
exn
Data_encoding.Json.pp
operations_json));
command
~group
~desc:"Transfer tokens / call a smart contract."
(args17
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
force_switch
gas_limit_arg
storage_limit_arg
counter_arg
arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
entrypoint_arg)
(prefixes ["transfer"]
@@ tez_param ~name:"qty" ~desc:"amount taken from source"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "to"
@@ ContractAlias.destination_param
~name:"dst"
~desc:"name/literal of the destination contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
amount
(_, source)
(_, destination)
cctxt ->
transfer_command
amount
source
destination
cctxt
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ));
command
~group
~desc:"Register a global constant"
(args12
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
storage_limit_arg
counter_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["register"; "global"; "constant"]
@@ global_constant_param
~name:"expression"
~desc:
"Michelson expression to register. Note the value is not \
typechecked before registration."
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the account registering the global constant"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
storage_limit,
counter,
force_low_fee,
fee_cap,
burn_cap )
global_constant_str
(_, source)
cctxt ->
match Contract.is_implicit source with
| None ->
failwith "Only implicit accounts can register global constants"
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
register_global_constant
cctxt
~chain:cctxt#chain
~block:cctxt#block
?dry_run:(Some dry_run)
?verbose_signing:(Some verbose_signing)
?fee
?storage_limit
?counter
?confirmations:cctxt#confirmations
~simulation
~source
~src_pk
~src_sk
~fee_parameter
~constant:global_constant_str
()
>>= fun errors ->
report_michelson_errors
~no_print_source:false
~msg:"register global constant simulation failed"
cctxt
errors
>>= fun _ -> return_unit);
command
~group
~desc:"Call a smart contract (same as 'transfer 0')."
(args17
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
force_switch
gas_limit_arg
storage_limit_arg
counter_arg
arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
entrypoint_arg)
(prefixes ["call"]
@@ ContractAlias.destination_param
~name:"dst"
~desc:"name/literal of the destination contract"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
(_, destination)
(_, source)
cctxt ->
let amount = Tez.zero in
transfer_command
amount
source
destination
cctxt
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ));
command
~group
~desc:"Reveal the public key of the contract manager."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["reveal"; "key"; "for"]
@@ ContractAlias.alias_param
~name:"src"
~desc:"name of the source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, source)
cctxt ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can be revealed"
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
reveal
cctxt
~dry_run
~verbose_signing
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~source
?fee
~src_pk
~src_sk
~fee_parameter
()
>>=? fun _res -> return_unit);
command
~group
~desc:"Register the public key hash as a delegate."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["register"; "key"]
@@ Public_key_hash.source_param ~name:"mgr" ~desc:"the delegate key"
@@ prefixes ["as"; "delegate"]
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
src_pkh
cctxt ->
Client_keys.get_key cctxt src_pkh >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
register_as_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~fee_parameter
~verbose_signing
?fee
~manager_sk:src_sk
src_pk
>>= function
| Ok _ -> return_unit
| Error [Environment.Ecoproto_error Delegate_storage.Active_delegate] ->
cctxt#message "Delegate already activated." >>= fun () ->
return_unit
| Error el -> Lwt.return_error el);
]
@ (match network with
| Some `Mainnet -> []
| Some `Testnet | None ->
[
command
~group
~desc:"Register and activate an Alphanet/Zeronet faucet account."
(args2 (Secret_key.force_switch ()) encrypted_switch)
(prefixes ["activate"; "account"]
@@ Secret_key.fresh_alias_param
@@ prefixes ["with"]
@@ param
~name:"activation_key"
~desc:
"Activate an Alphanet/Zeronet faucet account from the JSON \
(file or directly inlined)."
json_file_or_text_parameter
@@ stop)
(fun (force, encrypted) name activation_json cctxt ->
Secret_key.of_fresh cctxt force name >>=? fun name ->
match
Data_encoding.Json.destruct
Client_proto_context.activation_key_encoding
activation_json
with
| exception (Data_encoding.Json.Cannot_destruct _ as exn) ->
Format.kasprintf
(fun s -> failwith "%s" s)
"Invalid activation file: %a %a"
(fun ppf -> Data_encoding.Json.print_error ppf)
exn
Data_encoding.Json.pp
activation_json
| key ->
activate_account
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~encrypted
~force
key
name
>>=? fun _res -> return_unit);
])
@ (match network with
| Some `Testnet | None -> []
| Some `Mainnet ->
[
command
~group
~desc:"Activate a fundraiser account."
(args1 dry_run_switch)
(prefixes ["activate"; "fundraiser"; "account"]
@@ Public_key_hash.alias_param
@@ prefixes ["with"]
@@ param
~name:"code"
(Clic.parameter (fun _ctx code ->
protect (fun () ->
return
(Blinded_public_key_hash.activation_code_of_hex
code))))
~desc:"Activation code obtained from the Tezos foundation."
@@ stop)
(fun dry_run (name, _pkh) code cctxt ->
activate_existing_account
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
name
code
>>=? fun _res -> return_unit);
])
@ [
command
~desc:"Wait until an operation is included in a block"
(args3
(default_arg
~long:"confirmations"
~placeholder:"num_blocks"
~doc:
"wait until 'N' additional blocks after the operation appears \
in the considered chain"
~default:"0"
non_negative_param)
(default_arg
~long:"check-previous"
~placeholder:"num_blocks"
~doc:"number of previous blocks to check"
~default:"10"
non_negative_param)
(arg
~long:"branch"
~placeholder:"block_hash"
~doc:
"hash of the oldest block where we should look for the \
operation"
block_hash_param))
(prefixes ["wait"; "for"]
@@ param
~name:"operation"
~desc:"Operation to be included"
(parameter (fun _ x ->
match Operation_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid operation hash: '%s'" x
| Some hash -> return hash))
@@ prefixes ["to"; "be"; "included"]
@@ stop)
(fun (confirmations, predecessors, branch)
operation_hash
(ctxt : Protocol_client_context.full) ->
Client_confirmations.wait_for_operation_inclusion
ctxt
~chain:ctxt#chain
~confirmations
~predecessors
?branch
operation_hash
>>=? fun _ -> return_unit);
command
~desc:"Get receipt for past operation"
(args1
(default_arg
~long:"check-previous"
~placeholder:"num_blocks"
~doc:"number of previous blocks to check"
~default:"10"
non_negative_param))
(prefixes ["get"; "receipt"; "for"]
@@ param
~name:"operation"
~desc:"Operation to be looked up"
(parameter (fun _ x ->
match Operation_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid operation hash: '%s'" x
| Some hash -> return hash))
@@ stop)
(fun predecessors operation_hash (ctxt : Protocol_client_context.full) ->
display_receipt_for_operation
ctxt
~chain:ctxt#chain
~predecessors
operation_hash
>>=? fun _ -> return_unit);
command
~group:binary_description
~desc:"Describe unsigned block header"
no_options
(fixed ["describe"; "unsigned"; "block"; "header"])
(fun () (cctxt : Protocol_client_context.full) ->
cctxt#message
"%a"
Data_encoding.Binary_schema.pp
(Data_encoding.Binary.describe
Alpha_context.Block_header.unsigned_encoding)
>>= fun () -> return_unit);
command
~group:binary_description
~desc:"Describe unsigned operation"
no_options
(fixed ["describe"; "unsigned"; "operation"])
(fun () (cctxt : Protocol_client_context.full) ->
cctxt#message
"%a"
Data_encoding.Binary_schema.pp
(Data_encoding.Binary.describe
Alpha_context.Operation.unsigned_encoding)
>>= fun () -> return_unit);
command
~group
~desc:"Submit protocol proposals"
(args3
dry_run_switch
verbose_signing_switch
(switch
~doc:
"Do not fail when the checks that try to prevent the user from \
shooting themselves in the foot do."
~long:"force"
()))
(prefixes ["submit"; "proposals"; "for"]
@@ ContractAlias.destination_param
~name:"delegate"
~desc:"the delegate who makes the proposal"
@@ seq_of_param
(param
~name:"proposal"
~desc:"the protocol hash proposal to be submitted"
(parameter (fun _ x ->
match Protocol_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid proposal hash: '%s'" x
| Some hash -> return hash))))
(fun (dry_run, verbose_signing, force)
(_name, source)
proposals
(cctxt : Protocol_client_context.full) ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can submit proposals"
| Some src_pkh -> (
Client_keys.get_key cctxt src_pkh
>>=? fun (src_name, _src_pk, src_sk) ->
get_period_info
(* Find period info of the successor, because the operation will
be injected on the next block at the earliest *)
~successor:true
~chain:cctxt#chain
~block:cctxt#block
cctxt
>>=? fun info ->
(match info.current_period_kind with
| Proposal -> return_unit
| _ -> cctxt#error "Not in a proposal period")
>>=? fun () ->
Shell_services.Protocol.list cctxt >>=? fun known_protos ->
get_proposals ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun known_proposals ->
Alpha_services.Voting.listings cctxt (cctxt#chain, cctxt#block)
>>=? fun listings ->
for a proposal to be valid it must either a protocol that was already
proposed by somebody else or a protocol known by the node , because
the user is the first proposer and just injected it with
tezos - admin - client
proposed by somebody else or a protocol known by the node, because
the user is the first proposer and just injected it with
tezos-admin-client *)
let check_proposals proposals : bool tzresult Lwt.t =
let n = List.length proposals in
let errors = ref [] in
let error ppf =
Format.kasprintf (fun s -> errors := s :: !errors) ppf
in
if n = 0 then error "Empty proposal list." ;
if n > Constants.max_proposals_per_delegate then
error
"Too many proposals: %d > %d."
n
Constants.max_proposals_per_delegate ;
(match
Base.List.find_all_dups
~compare:Protocol_hash.compare
proposals
with
| [] -> ()
| dups ->
error
"There %s: %a."
(if Compare.List_length_with.(dups = 1) then
"is a duplicate proposal"
else "are duplicate proposals")
Format.(
pp_print_list
~pp_sep:(fun ppf () -> pp_print_string ppf ", ")
Protocol_hash.pp)
dups) ;
List.iter
(fun (p : Protocol_hash.t) ->
if
List.mem ~equal:Protocol_hash.equal p known_protos
|| Environment.Protocol_hash.Map.mem p known_proposals
then ()
else
error
"Protocol %a is not a known proposal."
Protocol_hash.pp
p)
proposals ;
if
not
(List.exists
(fun (pkh, _) ->
Signature.Public_key_hash.equal pkh src_pkh)
listings)
then
error
"Public-key-hash `%a` from account `%s` does not appear to \
have voting rights."
Signature.Public_key_hash.pp
src_pkh
src_name ;
if !errors <> [] then
cctxt#message
"There %s with the submission:%t"
(if Compare.List_length_with.(!errors = 1) then
"is an issue"
else "are issues")
Format.(
fun ppf ->
pp_print_cut ppf () ;
pp_open_vbox ppf 0 ;
List.iter
(fun msg ->
pp_open_hovbox ppf 2 ;
pp_print_string ppf "* " ;
pp_print_text ppf msg ;
pp_close_box ppf () ;
pp_print_cut ppf ())
!errors ;
pp_close_box ppf ())
>>= fun () -> return_false
else return_true
in
check_proposals proposals >>=? fun all_valid ->
(if all_valid then cctxt#message "All proposals are valid."
else if force then
cctxt#message
"Some proposals are not valid, but `--force` was used."
else cctxt#error "Submission failed because of invalid proposals.")
>>= fun () ->
submit_proposals
~dry_run
~verbose_signing
cctxt
~chain:cctxt#chain
~block:cctxt#block
~src_sk
src_pkh
proposals
>>= function
| Ok _res -> return_unit
| Error errs ->
(match errs with
| [
Unregistered_error
(`O [("kind", `String "generic"); ("error", `String msg)]);
] ->
cctxt#message
"Error:@[<hov>@.%a@]"
Format.pp_print_text
(String.split_on_char ' ' msg
|> List.filter (function
| "" | "\n" -> false
| _ -> true)
|> String.concat " "
|> String.map (function '\n' | '\t' -> ' ' | c -> c))
| trace -> cctxt#message "Error:@ %a" pp_print_trace trace)
>>= fun () -> failwith "Failed to submit proposals"));
command
~group
~desc:"Submit a ballot"
(args2 verbose_signing_switch dry_run_switch)
(prefixes ["submit"; "ballot"; "for"]
@@ ContractAlias.destination_param
~name:"delegate"
~desc:"the delegate who votes"
@@ param
~name:"proposal"
~desc:"the protocol hash proposal to vote for"
(parameter (fun _ x ->
match Protocol_hash.of_b58check_opt x with
| None -> failwith "Invalid proposal hash: '%s'" x
| Some hash -> return hash))
@@ param
~name:"ballot"
~desc:"the ballot value (yea/yay, nay, or pass)"
(parameter
~autocomplete:(fun _ -> return ["yea"; "nay"; "pass"])
(fun _ s ->
(* We should have [Vote.of_string]. *)
match String.lowercase_ascii s with
| "yay" | "yea" -> return Vote.Yay
| "nay" -> return Vote.Nay
| "pass" -> return Vote.Pass
| s -> failwith "Invalid ballot: '%s'" s))
@@ stop)
(fun (verbose_signing, dry_run)
(_name, source)
proposal
ballot
(cctxt : Protocol_client_context.full) ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can submit ballot"
| Some src_pkh ->
Client_keys.get_key cctxt src_pkh
>>=? fun (_src_name, _src_pk, src_sk) ->
get_period_info
(* Find period info of the successor, because the operation will
be injected on the next block at the earliest *)
~successor:true
~chain:cctxt#chain
~block:cctxt#block
cctxt
>>=? fun info ->
(match info.current_period_kind with
| Exploration | Promotion -> return_unit
| _ -> cctxt#error "Not in Exploration or Promotion period")
>>=? fun () ->
submit_ballot
cctxt
~chain:cctxt#chain
~block:cctxt#block
~src_sk
src_pkh
~verbose_signing
~dry_run
proposal
ballot
>>=? fun _res -> return_unit);
command
~group
~desc:"Summarize the current voting period"
no_options
(fixed ["show"; "voting"; "period"])
(fun () (cctxt : Protocol_client_context.full) ->
get_period_info ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun info ->
cctxt#message
"Current period: %a\nBlocks remaining until end of period: %ld"
Data_encoding.Json.pp
(Data_encoding.Json.construct
Alpha_context.Voting_period.kind_encoding
info.current_period_kind)
info.remaining
>>= fun () ->
Shell_services.Protocol.list cctxt >>=? fun known_protos ->
get_proposals ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun props ->
let ranks =
Environment.Protocol_hash.Map.bindings props
|> List.sort (fun (_, v1) (_, v2) -> Int32.(compare v2 v1))
in
let print_proposal = function
| None ->
cctxt#message "The current proposal has already been cleared."
(* The proposal is cleared on the last block of adoption period, and
also on the last block of the exploration and promotion
periods when the proposal is not approved *)
| Some proposal ->
cctxt#message "Current proposal: %a" Protocol_hash.pp proposal
in
match info.current_period_kind with
| Proposal ->
(* the current proposals are cleared on the last block of the
proposal period *)
if info.remaining <> 0l then
cctxt#answer
"Current proposals:%t"
Format.(
fun ppf ->
pp_print_cut ppf () ;
pp_open_vbox ppf 0 ;
List.iter
(fun (p, w) ->
fprintf
ppf
"* %a %ld (%sknown by the node)@."
Protocol_hash.pp
p
w
(if
List.mem ~equal:Protocol_hash.equal p known_protos
then ""
else "not "))
ranks ;
pp_close_box ppf ())
>>= fun () -> return_unit
else
cctxt#message "The proposals have already been cleared."
>>= fun () -> return_unit
| Exploration | Promotion ->
print_proposal info.current_proposal >>= fun () ->
(* the ballots are cleared on the last block of these periods *)
if info.remaining <> 0l then
get_ballots_info ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun ballots_info ->
cctxt#answer
"Ballots: %a@,\
Current participation %.2f%%, necessary quorum %.2f%%@,\
Current in favor %ld, needed supermajority %ld"
Data_encoding.Json.pp
(Data_encoding.Json.construct
Vote.ballots_encoding
ballots_info.ballots)
(Int32.to_float ballots_info.participation /. 100.)
(Int32.to_float ballots_info.current_quorum /. 100.)
ballots_info.ballots.yay
ballots_info.supermajority
>>= fun () -> return_unit
else
cctxt#message "The ballots have already been cleared."
>>= fun () -> return_unit
| Cooldown ->
print_proposal info.current_proposal >>= fun () -> return_unit
| Adoption ->
print_proposal info.current_proposal >>= fun () -> return_unit);
]
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_011_PtHangz2/lib_client_commands/client_proto_context_commands.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
When --force is used we want to inject the transfer even if it fails.
In that case we cannot rely on simulation to compute limits and fees
so we require the corresponding options to be set.
Find period info of the successor, because the operation will
be injected on the next block at the earliest
We should have [Vote.of_string].
Find period info of the successor, because the operation will
be injected on the next block at the earliest
The proposal is cleared on the last block of adoption period, and
also on the last block of the exploration and promotion
periods when the proposal is not approved
the current proposals are cleared on the last block of the
proposal period
the ballots are cleared on the last block of these periods | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
Copyright ( c ) 2019 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Alpha_context
open Tezos_micheline
open Client_proto_context
open Client_proto_contracts
open Client_proto_programs
open Client_keys
open Client_proto_args
let encrypted_switch =
Clic.switch ~long:"encrypted" ~doc:"encrypt the key on-disk" ()
let dry_run_switch =
Clic.switch
~long:"dry-run"
~short:'D'
~doc:"don't inject the operation, just display it"
()
let verbose_signing_switch =
Clic.switch
~long:"verbose-signing"
~doc:"display extra information before signing the operation"
()
let simulate_switch =
Clic.switch
~long:"simulation"
~doc:
"Simulate the execution of the command, without needing any signatures."
()
let force_switch =
Clic.switch
~long:"force"
~doc:
"Inject the operation even if the simulation results in a failure. This \
switch requires --gas-limit, --storage-limit, and --fee."
()
let report_michelson_errors ?(no_print_source = false) ~msg
(cctxt : #Client_context.printer) = function
| Error errs ->
cctxt#warning
"%a"
(Michelson_v1_error_reporter.report_errors
~details:(not no_print_source)
~show_source:(not no_print_source)
?parsed:None)
errs
>>= fun () ->
cctxt#error "%s" msg >>= fun () -> Lwt.return_none
| Ok data -> Lwt.return_some data
let json_file_or_text_parameter =
Clic.parameter (fun _ p ->
match String.split ~limit:1 ':' p with
| ["text"; text] -> return (Ezjsonm.from_string text)
| ["file"; path] -> Lwt_utils_unix.Json.read_file path
| _ -> (
if Sys.file_exists p then Lwt_utils_unix.Json.read_file p
else
try return (Ezjsonm.from_string p)
with Ezjsonm.Parse_error _ ->
failwith "Neither an existing file nor valid JSON: '%s'" p))
let non_negative_param =
Clic.parameter (fun _ s ->
match int_of_string_opt s with
| Some i when i >= 0 -> return i
| _ -> failwith "Parameter should be a non-negative integer literal")
let block_hash_param =
Clic.parameter (fun _ s ->
try return (Block_hash.of_b58check_exn s)
with _ -> failwith "Parameter '%s' is an invalid block hash" s)
let group =
{
Clic.name = "context";
title = "Block contextual commands (see option -block)";
}
let alphanet = {Clic.name = "alphanet"; title = "Alphanet only commands"}
let binary_description =
{Clic.name = "description"; title = "Binary Description"}
let transfer_command amount source destination (cctxt : #Client_context.printer)
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ) =
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
let check_force_dependency name = function
| None ->
cctxt#error
"When the --force switch is used, the %s option is required."
name
| _ -> Lwt.return_unit
in
(if force then
check_force_dependency "--gas-limit" gas_limit >>= fun () ->
check_force_dependency "--storage-limit" storage_limit >>= fun () ->
check_force_dependency "--fee" fee
else Lwt.return_unit)
>>= fun () ->
(match Contract.is_implicit source with
| None ->
let contract = source in
Managed_contract.get_contract_manager cctxt source >>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.transfer
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~force
~fee_parameter
?fee
~contract
~source
~src_pk
~src_sk
~destination
?entrypoint
?arg
~amount
?gas_limit
?storage_limit
?counter
()
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
transfer
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~simulation
~force
~verbose_signing
~fee_parameter
~source
?fee
~src_pk
~src_sk
~destination
?entrypoint
?arg
~amount
?gas_limit
?storage_limit
?counter
())
>>= report_michelson_errors
~no_print_source
~msg:"transfer simulation failed"
cctxt
>>= function
| None -> return_unit
| Some (_res, _contracts) -> return_unit
let tez_of_string_exn index field s =
match Tez.of_string s with
| Some t -> return t
| None ->
failwith
"Invalid \xEA\x9C\xA9 notation at entry %i, field \"%s\": %s"
index
field
s
let tez_of_opt_string_exn index field s =
match s with
| None -> return None
| Some s -> tez_of_string_exn index field s >>=? fun s -> return (Some s)
let prepare_batch_operation cctxt ?arg ?fee ?gas_limit ?storage_limit
?entrypoint source index batch =
Client_proto_contracts.ContractAlias.find_destination cctxt batch.destination
>>=? fun (_, destination) ->
tez_of_string_exn index "amount" batch.amount >>=? fun amount ->
tez_of_opt_string_exn index "fee" batch.fee >>=? fun batch_fee ->
let fee = Option.either batch_fee fee in
let arg = Option.either batch.arg arg in
let gas_limit = Option.either batch.gas_limit gas_limit in
let storage_limit = Option.either batch.storage_limit storage_limit in
let entrypoint = Option.either batch.entrypoint entrypoint in
parse_arg_transfer arg >>=? fun parameters ->
(match Contract.is_implicit source with
| None ->
Managed_contract.build_transaction_operation
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract:source
~destination
?entrypoint
?arg
~amount
?fee
?gas_limit
?storage_limit
()
| Some _ ->
return
(build_transaction_operation
~amount
~parameters
?entrypoint
?fee
?gas_limit
?storage_limit
destination))
>>=? fun operation ->
return (Annotated_manager_operation.Annotated_manager_operation operation)
let commands network () =
let open Clic in
[
command
~group
~desc:"Access the timestamp of the block."
(args1
(switch ~doc:"output time in seconds" ~short:'s' ~long:"seconds" ()))
(fixed ["get"; "timestamp"])
(fun seconds (cctxt : Protocol_client_context.full) ->
Shell_services.Blocks.Header.shell_header
cctxt
~chain:cctxt#chain
~block:cctxt#block
()
>>=? fun {timestamp = v; _} ->
(if seconds then cctxt#message "%Ld" (Time.Protocol.to_seconds v)
else cctxt#message "%s" (Time.Protocol.to_notation v))
>>= fun () -> return_unit);
command
~group
~desc:"Lists all non empty contracts of the block."
no_options
(fixed ["list"; "contracts"])
(fun () (cctxt : Protocol_client_context.full) ->
list_contract_labels cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun contracts ->
List.iter_s
(fun (alias, hash, kind) -> cctxt#message "%s%s%s" hash kind alias)
contracts
>>= fun () -> return_unit);
command
~group
~desc:"Lists cached contracts and their age in LRU ordering."
no_options
(prefixes ["list"; "cached"; "contracts"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
cached_contracts cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun keys ->
List.iter_s
(fun (key, size) ->
cctxt#message "%a %d" Alpha_context.Contract.pp key size)
keys
>>= fun () -> return_unit);
command
~group
~desc:"Get the key rank of a cache key."
no_options
(prefixes ["get"; "cached"; "contract"; "rank"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
contract_rank cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>=? fun rank ->
match rank with
| None ->
cctxt#error
"Invalid contract: %a"
Alpha_context.Contract.pp
contract
>>= fun () -> return_unit
| Some rank -> cctxt#message "%d" rank >>= fun () -> return_unit);
command
~group
~desc:"Get cache contract size."
no_options
(prefixes ["get"; "cache"; "contract"; "size"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
contract_cache_size cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun t ->
cctxt#message "%d" t >>= fun () -> return_unit);
command
~group
~desc:"Get cache contract size limit."
no_options
(prefixes ["get"; "cache"; "contract"; "size"; "limit"] @@ stop)
(fun () (cctxt : Protocol_client_context.full) ->
contract_cache_size_limit cctxt ~chain:cctxt#chain ~block:cctxt#block
>>=? fun t ->
cctxt#message "%d" t >>= fun () -> return_unit);
command
~group
~desc:"Get the balance of a contract."
no_options
(prefixes ["get"; "balance"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
get_balance cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>=? fun amount ->
cctxt#answer "%a %s" Tez.pp amount Client_proto_args.tez_sym
>>= fun () -> return_unit);
command
~group
~desc:"Get the storage of a contract."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "contract"; "storage"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun unparsing_mode (_, contract) (cctxt : Protocol_client_context.full) ->
get_storage
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
contract
>>=? function
| None -> cctxt#error "This is not a smart contract."
| Some storage ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped storage
>>= fun () -> return_unit);
command
~group
~desc:
"Get the value associated to a key in the big map storage of a \
contract (deprecated)."
no_options
(prefixes ["get"; "big"; "map"; "value"; "for"]
@@ Clic.param ~name:"key" ~desc:"the key to look for" data_parameter
@@ prefixes ["of"; "type"]
@@ Clic.param ~name:"type" ~desc:"type of the key" data_parameter
@@ prefix "in"
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () key key_type (_, contract) (cctxt : Protocol_client_context.full) ->
get_contract_big_map_value
cctxt
~chain:cctxt#chain
~block:cctxt#block
contract
(key.expanded, key_type.expanded)
>>=? function
| None -> cctxt#error "No value associated to this key."
| Some value ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped value
>>= fun () -> return_unit);
command
~group
~desc:"Get a value in a big map."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "element"]
@@ Clic.param
~name:"key"
~desc:"the key to look for"
(Clic.parameter (fun _ s ->
return (Script_expr_hash.of_b58check_exn s)))
@@ prefixes ["of"; "big"; "map"]
@@ Clic.param
~name:"big_map"
~desc:"identifier of the big_map"
int_parameter
@@ stop)
(fun unparsing_mode key id (cctxt : Protocol_client_context.full) ->
get_big_map_value
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
(Big_map.Id.parse_z (Z.of_int id))
key
>>=? fun value ->
cctxt#answer "%a" Michelson_v1_printer.print_expr_unwrapped value
>>= fun () -> return_unit);
command
~group
~desc:"Get the code of a contract."
(args1 (unparsing_mode_arg ~default:"Readable"))
(prefixes ["get"; "contract"; "code"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun unparsing_mode (_, contract) (cctxt : Protocol_client_context.full) ->
get_script
cctxt
~chain:cctxt#chain
~block:cctxt#block
~unparsing_mode
contract
>>=? function
| None -> cctxt#error "This is not a smart contract."
| Some {code; storage = _} -> (
match Script_repr.force_decode code with
| Error errs ->
cctxt#error "%a" Environment.Error_monad.pp_trace errs
| Ok code ->
let {Michelson_v1_parser.source; _} =
Michelson_v1_printer.unparse_toplevel code
in
cctxt#answer "%s" source >>= return));
command
~group
~desc:"Get the `BLAKE2B` script hash of a contract."
no_options
(prefixes ["get"; "contract"; "script"; "hash"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
get_script_hash cctxt ~chain:cctxt#chain ~block:cctxt#block contract
>>= function
| Error trace -> cctxt#error "%a" pp_print_trace trace
| Ok None -> cctxt#error "This is not a smart contract."
| Ok (Some hash) -> cctxt#answer "%a" Script_expr_hash.pp hash >|= ok);
command
~group
~desc:"Get the type of an entrypoint of a contract."
no_options
(prefixes ["get"; "contract"; "entrypoint"; "type"; "of"]
@@ Clic.string ~name:"entrypoint" ~desc:"the entrypoint to describe"
@@ prefixes ["for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () entrypoint (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.contract_entrypoint_type
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
~entrypoint
>>= Michelson_v1_entrypoints.print_entrypoint_type
cctxt
~emacs:false
~contract
~entrypoint);
command
~group
~desc:"Get the entrypoint list of a contract."
no_options
(prefixes ["get"; "contract"; "entrypoints"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.list_contract_entrypoints
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
>>= Michelson_v1_entrypoints.print_entrypoints_list
cctxt
~emacs:false
~contract);
command
~group
~desc:"Get the list of unreachable paths in a contract's parameter type."
no_options
(prefixes ["get"; "contract"; "unreachable"; "paths"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Michelson_v1_entrypoints.list_contract_unreachables
cctxt
~chain:cctxt#chain
~block:cctxt#block
~contract
>>= Michelson_v1_entrypoints.print_unreachables
cctxt
~emacs:false
~contract);
command
~group
~desc:"Get the delegate of a contract."
no_options
(prefixes ["get"; "delegate"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun () (_, contract) (cctxt : Protocol_client_context.full) ->
Client_proto_contracts.get_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
contract
>>=? function
| None -> cctxt#message "none" >>= fun () -> return_unit
| Some delegate ->
Public_key_hash.rev_find cctxt delegate >>=? fun mn ->
Public_key_hash.to_source delegate >>=? fun m ->
cctxt#message
"%s (%s)"
m
(match mn with None -> "unknown" | Some n -> "known as " ^ n)
>>= fun () -> return_unit);
command
~group
~desc:"Set the delegate of a contract."
(args10
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["set"; "delegate"; "for"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ prefix "to"
@@ Public_key_hash.source_param
~name:"dlgt"
~desc:"new delegate of the contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, contract)
delegate
(cctxt : Protocol_client_context.full) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
match Contract.is_implicit contract with
| None ->
Managed_contract.get_contract_manager cctxt contract
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~fee_parameter
?fee
~source
~src_pk
~src_sk
contract
(Some delegate)
>>= fun errors ->
report_michelson_errors
~no_print_source:true
~msg:"Setting delegate through entrypoints failed."
cctxt
errors
>>= fun _ -> return_unit
| Some mgr ->
Client_keys.get_key cctxt mgr >>=? fun (_, src_pk, manager_sk) ->
set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~fee_parameter
?fee
mgr
(Some delegate)
~src_pk
~manager_sk
>>=? fun _ -> return_unit);
command
~group
~desc:"Withdraw the delegate from a contract."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["withdraw"; "delegate"; "from"]
@@ ContractAlias.destination_param ~name:"src" ~desc:"source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, contract)
(cctxt : Protocol_client_context.full) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
match Contract.is_implicit contract with
| None ->
Managed_contract.get_contract_manager cctxt contract
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
Managed_contract.set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~fee_parameter
?fee
~source
~src_pk
~src_sk
contract
None
>>= fun errors ->
report_michelson_errors
~no_print_source:true
~msg:"Withdrawing delegate through entrypoints failed."
cctxt
errors
>>= fun _ -> return_unit
| Some mgr ->
Client_keys.get_key cctxt mgr >>=? fun (_, src_pk, manager_sk) ->
set_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~fee_parameter
mgr
None
?fee
~src_pk
~manager_sk
>>= fun _ -> return_unit);
command
~group
~desc:"Launch a smart contract on the blockchain."
(args15
fee_arg
dry_run_switch
verbose_signing_switch
gas_limit_arg
storage_limit_arg
delegate_arg
(Client_keys.force_switch ())
init_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["originate"; "contract"]
@@ RawContractAlias.fresh_alias_param
~name:"new"
~desc:"name of the new contract"
@@ prefix "transferring"
@@ tez_param ~name:"qty" ~desc:"amount taken from source"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "running"
@@ Program.source_param
~name:"prg"
~desc:
"script of the account\n\
Combine with -init if the storage type is not unit."
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
gas_limit,
storage_limit,
delegate,
force,
initial_storage,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
alias_name
balance
(_, source)
program
(cctxt : Protocol_client_context.full) ->
RawContractAlias.of_fresh cctxt force alias_name >>=? fun alias_name ->
Lwt.return (Micheline_parser.no_parsing_error program)
>>=? fun {expanded = code; _} ->
match Contract.is_implicit source with
| None ->
failwith
"only implicit accounts can be the source of an origination"
| Some source -> (
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
originate_contract
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
?fee
?gas_limit
?storage_limit
~delegate
~initial_storage
~balance
~source
~src_pk
~src_sk
~code
~fee_parameter
()
>>= fun errors ->
report_michelson_errors
~no_print_source
~msg:"origination simulation failed"
cctxt
errors
>>= function
| None -> return_unit
| Some (_res, contract) ->
if dry_run then return_unit
else
save_contract ~force cctxt alias_name contract >>=? fun () ->
return_unit));
command
~group
~desc:
"Execute multiple transfers from a single source account.\n\
If one of the transfers fails, none of them get executed."
(args16
default_fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
default_gas_limit_arg
default_storage_limit_arg
counter_arg
default_arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
default_entrypoint_arg)
(prefixes ["multiple"; "transfers"; "from"]
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "using"
@@ param
~name:"transfers.json"
~desc:
"List of operations originating from the source contract in JSON \
format (from a file or directly inlined). The input JSON must be \
an array of objects of the form: '[ {\"destination\": dst, \
\"amount\": qty (, <field>: <val> ...) } (, ...) ]', where an \
optional <field> can either be \"fee\", \"gas-limit\", \
\"storage-limit\", \"arg\", or \"entrypoint\"."
json_file_or_text_parameter
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
(_, source)
operations_json
cctxt ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
let prepare i =
prepare_batch_operation
cctxt
?arg
?fee
?gas_limit
?storage_limit
?entrypoint
source
i
in
match
Data_encoding.Json.destruct
(Data_encoding.list
Client_proto_context.batch_transfer_operation_encoding)
operations_json
with
| [] -> failwith "Empty operation list"
| operations ->
(match Contract.is_implicit source with
| None ->
Managed_contract.get_contract_manager cctxt source
>>=? fun source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
return (source, src_pk, src_sk)
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
return (source, src_pk, src_sk))
>>=? fun (source, src_pk, src_sk) ->
List.mapi_ep prepare operations >>=? fun contents ->
let (Manager_list contents) =
Annotated_manager_operation.manager_of_list contents
in
Injection.inject_manager_operation
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~verbose_signing
~simulation
~source
~fee:(Limit.of_option fee)
~gas_limit:(Limit.of_option gas_limit)
~storage_limit:(Limit.of_option storage_limit)
?counter
~src_pk
~src_sk
~fee_parameter
contents
>>= report_michelson_errors
~no_print_source
~msg:"multiple transfers simulation failed"
cctxt
>>= fun _ -> return_unit
| exception (Data_encoding.Json.Cannot_destruct (path, exn2) as exn)
-> (
match (path, operations_json) with
| ([`Index n], `A lj) -> (
match List.nth_opt lj n with
| Some j ->
failwith
"Invalid transfer at index %i: %a %a"
n
(fun ppf -> Data_encoding.Json.print_error ppf)
exn2
Data_encoding.Json.pp
j
| _ ->
failwith
"Invalid transfer at index %i: %a"
n
(fun ppf -> Data_encoding.Json.print_error ppf)
exn2)
| _ ->
failwith
"Invalid transfer file: %a %a"
(fun ppf -> Data_encoding.Json.print_error ppf)
exn
Data_encoding.Json.pp
operations_json));
command
~group
~desc:"Transfer tokens / call a smart contract."
(args17
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
force_switch
gas_limit_arg
storage_limit_arg
counter_arg
arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
entrypoint_arg)
(prefixes ["transfer"]
@@ tez_param ~name:"qty" ~desc:"amount taken from source"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ prefix "to"
@@ ContractAlias.destination_param
~name:"dst"
~desc:"name/literal of the destination contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
amount
(_, source)
(_, destination)
cctxt ->
transfer_command
amount
source
destination
cctxt
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ));
command
~group
~desc:"Register a global constant"
(args12
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
storage_limit_arg
counter_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["register"; "global"; "constant"]
@@ global_constant_param
~name:"expression"
~desc:
"Michelson expression to register. Note the value is not \
typechecked before registration."
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the account registering the global constant"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
storage_limit,
counter,
force_low_fee,
fee_cap,
burn_cap )
global_constant_str
(_, source)
cctxt ->
match Contract.is_implicit source with
| None ->
failwith "Only implicit accounts can register global constants"
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
register_global_constant
cctxt
~chain:cctxt#chain
~block:cctxt#block
?dry_run:(Some dry_run)
?verbose_signing:(Some verbose_signing)
?fee
?storage_limit
?counter
?confirmations:cctxt#confirmations
~simulation
~source
~src_pk
~src_sk
~fee_parameter
~constant:global_constant_str
()
>>= fun errors ->
report_michelson_errors
~no_print_source:false
~msg:"register global constant simulation failed"
cctxt
errors
>>= fun _ -> return_unit);
command
~group
~desc:"Call a smart contract (same as 'transfer 0')."
(args17
fee_arg
dry_run_switch
verbose_signing_switch
simulate_switch
force_switch
gas_limit_arg
storage_limit_arg
counter_arg
arg_arg
no_print_source_flag
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg
entrypoint_arg)
(prefixes ["call"]
@@ ContractAlias.destination_param
~name:"dst"
~desc:"name/literal of the destination contract"
@@ prefix "from"
@@ ContractAlias.destination_param
~name:"src"
~desc:"name of the source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint )
(_, destination)
(_, source)
cctxt ->
let amount = Tez.zero in
transfer_command
amount
source
destination
cctxt
( fee,
dry_run,
verbose_signing,
simulation,
force,
gas_limit,
storage_limit,
counter,
arg,
no_print_source,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap,
entrypoint ));
command
~group
~desc:"Reveal the public key of the contract manager."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["reveal"; "key"; "for"]
@@ ContractAlias.alias_param
~name:"src"
~desc:"name of the source contract"
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
(_, source)
cctxt ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can be revealed"
| Some source ->
Client_keys.get_key cctxt source >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
reveal
cctxt
~dry_run
~verbose_signing
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~source
?fee
~src_pk
~src_sk
~fee_parameter
()
>>=? fun _res -> return_unit);
command
~group
~desc:"Register the public key hash as a delegate."
(args9
fee_arg
dry_run_switch
verbose_signing_switch
minimal_fees_arg
minimal_nanotez_per_byte_arg
minimal_nanotez_per_gas_unit_arg
force_low_fee_arg
fee_cap_arg
burn_cap_arg)
(prefixes ["register"; "key"]
@@ Public_key_hash.source_param ~name:"mgr" ~desc:"the delegate key"
@@ prefixes ["as"; "delegate"]
@@ stop)
(fun ( fee,
dry_run,
verbose_signing,
minimal_fees,
minimal_nanotez_per_byte,
minimal_nanotez_per_gas_unit,
force_low_fee,
fee_cap,
burn_cap )
src_pkh
cctxt ->
Client_keys.get_key cctxt src_pkh >>=? fun (_, src_pk, src_sk) ->
let fee_parameter =
{
Injection.minimal_fees;
minimal_nanotez_per_byte;
minimal_nanotez_per_gas_unit;
force_low_fee;
fee_cap;
burn_cap;
}
in
register_as_delegate
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
~fee_parameter
~verbose_signing
?fee
~manager_sk:src_sk
src_pk
>>= function
| Ok _ -> return_unit
| Error [Environment.Ecoproto_error Delegate_storage.Active_delegate] ->
cctxt#message "Delegate already activated." >>= fun () ->
return_unit
| Error el -> Lwt.return_error el);
]
@ (match network with
| Some `Mainnet -> []
| Some `Testnet | None ->
[
command
~group
~desc:"Register and activate an Alphanet/Zeronet faucet account."
(args2 (Secret_key.force_switch ()) encrypted_switch)
(prefixes ["activate"; "account"]
@@ Secret_key.fresh_alias_param
@@ prefixes ["with"]
@@ param
~name:"activation_key"
~desc:
"Activate an Alphanet/Zeronet faucet account from the JSON \
(file or directly inlined)."
json_file_or_text_parameter
@@ stop)
(fun (force, encrypted) name activation_json cctxt ->
Secret_key.of_fresh cctxt force name >>=? fun name ->
match
Data_encoding.Json.destruct
Client_proto_context.activation_key_encoding
activation_json
with
| exception (Data_encoding.Json.Cannot_destruct _ as exn) ->
Format.kasprintf
(fun s -> failwith "%s" s)
"Invalid activation file: %a %a"
(fun ppf -> Data_encoding.Json.print_error ppf)
exn
Data_encoding.Json.pp
activation_json
| key ->
activate_account
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~encrypted
~force
key
name
>>=? fun _res -> return_unit);
])
@ (match network with
| Some `Testnet | None -> []
| Some `Mainnet ->
[
command
~group
~desc:"Activate a fundraiser account."
(args1 dry_run_switch)
(prefixes ["activate"; "fundraiser"; "account"]
@@ Public_key_hash.alias_param
@@ prefixes ["with"]
@@ param
~name:"code"
(Clic.parameter (fun _ctx code ->
protect (fun () ->
return
(Blinded_public_key_hash.activation_code_of_hex
code))))
~desc:"Activation code obtained from the Tezos foundation."
@@ stop)
(fun dry_run (name, _pkh) code cctxt ->
activate_existing_account
cctxt
~chain:cctxt#chain
~block:cctxt#block
?confirmations:cctxt#confirmations
~dry_run
name
code
>>=? fun _res -> return_unit);
])
@ [
command
~desc:"Wait until an operation is included in a block"
(args3
(default_arg
~long:"confirmations"
~placeholder:"num_blocks"
~doc:
"wait until 'N' additional blocks after the operation appears \
in the considered chain"
~default:"0"
non_negative_param)
(default_arg
~long:"check-previous"
~placeholder:"num_blocks"
~doc:"number of previous blocks to check"
~default:"10"
non_negative_param)
(arg
~long:"branch"
~placeholder:"block_hash"
~doc:
"hash of the oldest block where we should look for the \
operation"
block_hash_param))
(prefixes ["wait"; "for"]
@@ param
~name:"operation"
~desc:"Operation to be included"
(parameter (fun _ x ->
match Operation_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid operation hash: '%s'" x
| Some hash -> return hash))
@@ prefixes ["to"; "be"; "included"]
@@ stop)
(fun (confirmations, predecessors, branch)
operation_hash
(ctxt : Protocol_client_context.full) ->
Client_confirmations.wait_for_operation_inclusion
ctxt
~chain:ctxt#chain
~confirmations
~predecessors
?branch
operation_hash
>>=? fun _ -> return_unit);
command
~desc:"Get receipt for past operation"
(args1
(default_arg
~long:"check-previous"
~placeholder:"num_blocks"
~doc:"number of previous blocks to check"
~default:"10"
non_negative_param))
(prefixes ["get"; "receipt"; "for"]
@@ param
~name:"operation"
~desc:"Operation to be looked up"
(parameter (fun _ x ->
match Operation_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid operation hash: '%s'" x
| Some hash -> return hash))
@@ stop)
(fun predecessors operation_hash (ctxt : Protocol_client_context.full) ->
display_receipt_for_operation
ctxt
~chain:ctxt#chain
~predecessors
operation_hash
>>=? fun _ -> return_unit);
command
~group:binary_description
~desc:"Describe unsigned block header"
no_options
(fixed ["describe"; "unsigned"; "block"; "header"])
(fun () (cctxt : Protocol_client_context.full) ->
cctxt#message
"%a"
Data_encoding.Binary_schema.pp
(Data_encoding.Binary.describe
Alpha_context.Block_header.unsigned_encoding)
>>= fun () -> return_unit);
command
~group:binary_description
~desc:"Describe unsigned operation"
no_options
(fixed ["describe"; "unsigned"; "operation"])
(fun () (cctxt : Protocol_client_context.full) ->
cctxt#message
"%a"
Data_encoding.Binary_schema.pp
(Data_encoding.Binary.describe
Alpha_context.Operation.unsigned_encoding)
>>= fun () -> return_unit);
command
~group
~desc:"Submit protocol proposals"
(args3
dry_run_switch
verbose_signing_switch
(switch
~doc:
"Do not fail when the checks that try to prevent the user from \
shooting themselves in the foot do."
~long:"force"
()))
(prefixes ["submit"; "proposals"; "for"]
@@ ContractAlias.destination_param
~name:"delegate"
~desc:"the delegate who makes the proposal"
@@ seq_of_param
(param
~name:"proposal"
~desc:"the protocol hash proposal to be submitted"
(parameter (fun _ x ->
match Protocol_hash.of_b58check_opt x with
| None ->
Error_monad.failwith "Invalid proposal hash: '%s'" x
| Some hash -> return hash))))
(fun (dry_run, verbose_signing, force)
(_name, source)
proposals
(cctxt : Protocol_client_context.full) ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can submit proposals"
| Some src_pkh -> (
Client_keys.get_key cctxt src_pkh
>>=? fun (src_name, _src_pk, src_sk) ->
get_period_info
~successor:true
~chain:cctxt#chain
~block:cctxt#block
cctxt
>>=? fun info ->
(match info.current_period_kind with
| Proposal -> return_unit
| _ -> cctxt#error "Not in a proposal period")
>>=? fun () ->
Shell_services.Protocol.list cctxt >>=? fun known_protos ->
get_proposals ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun known_proposals ->
Alpha_services.Voting.listings cctxt (cctxt#chain, cctxt#block)
>>=? fun listings ->
for a proposal to be valid it must either a protocol that was already
proposed by somebody else or a protocol known by the node , because
the user is the first proposer and just injected it with
tezos - admin - client
proposed by somebody else or a protocol known by the node, because
the user is the first proposer and just injected it with
tezos-admin-client *)
let check_proposals proposals : bool tzresult Lwt.t =
let n = List.length proposals in
let errors = ref [] in
let error ppf =
Format.kasprintf (fun s -> errors := s :: !errors) ppf
in
if n = 0 then error "Empty proposal list." ;
if n > Constants.max_proposals_per_delegate then
error
"Too many proposals: %d > %d."
n
Constants.max_proposals_per_delegate ;
(match
Base.List.find_all_dups
~compare:Protocol_hash.compare
proposals
with
| [] -> ()
| dups ->
error
"There %s: %a."
(if Compare.List_length_with.(dups = 1) then
"is a duplicate proposal"
else "are duplicate proposals")
Format.(
pp_print_list
~pp_sep:(fun ppf () -> pp_print_string ppf ", ")
Protocol_hash.pp)
dups) ;
List.iter
(fun (p : Protocol_hash.t) ->
if
List.mem ~equal:Protocol_hash.equal p known_protos
|| Environment.Protocol_hash.Map.mem p known_proposals
then ()
else
error
"Protocol %a is not a known proposal."
Protocol_hash.pp
p)
proposals ;
if
not
(List.exists
(fun (pkh, _) ->
Signature.Public_key_hash.equal pkh src_pkh)
listings)
then
error
"Public-key-hash `%a` from account `%s` does not appear to \
have voting rights."
Signature.Public_key_hash.pp
src_pkh
src_name ;
if !errors <> [] then
cctxt#message
"There %s with the submission:%t"
(if Compare.List_length_with.(!errors = 1) then
"is an issue"
else "are issues")
Format.(
fun ppf ->
pp_print_cut ppf () ;
pp_open_vbox ppf 0 ;
List.iter
(fun msg ->
pp_open_hovbox ppf 2 ;
pp_print_string ppf "* " ;
pp_print_text ppf msg ;
pp_close_box ppf () ;
pp_print_cut ppf ())
!errors ;
pp_close_box ppf ())
>>= fun () -> return_false
else return_true
in
check_proposals proposals >>=? fun all_valid ->
(if all_valid then cctxt#message "All proposals are valid."
else if force then
cctxt#message
"Some proposals are not valid, but `--force` was used."
else cctxt#error "Submission failed because of invalid proposals.")
>>= fun () ->
submit_proposals
~dry_run
~verbose_signing
cctxt
~chain:cctxt#chain
~block:cctxt#block
~src_sk
src_pkh
proposals
>>= function
| Ok _res -> return_unit
| Error errs ->
(match errs with
| [
Unregistered_error
(`O [("kind", `String "generic"); ("error", `String msg)]);
] ->
cctxt#message
"Error:@[<hov>@.%a@]"
Format.pp_print_text
(String.split_on_char ' ' msg
|> List.filter (function
| "" | "\n" -> false
| _ -> true)
|> String.concat " "
|> String.map (function '\n' | '\t' -> ' ' | c -> c))
| trace -> cctxt#message "Error:@ %a" pp_print_trace trace)
>>= fun () -> failwith "Failed to submit proposals"));
command
~group
~desc:"Submit a ballot"
(args2 verbose_signing_switch dry_run_switch)
(prefixes ["submit"; "ballot"; "for"]
@@ ContractAlias.destination_param
~name:"delegate"
~desc:"the delegate who votes"
@@ param
~name:"proposal"
~desc:"the protocol hash proposal to vote for"
(parameter (fun _ x ->
match Protocol_hash.of_b58check_opt x with
| None -> failwith "Invalid proposal hash: '%s'" x
| Some hash -> return hash))
@@ param
~name:"ballot"
~desc:"the ballot value (yea/yay, nay, or pass)"
(parameter
~autocomplete:(fun _ -> return ["yea"; "nay"; "pass"])
(fun _ s ->
match String.lowercase_ascii s with
| "yay" | "yea" -> return Vote.Yay
| "nay" -> return Vote.Nay
| "pass" -> return Vote.Pass
| s -> failwith "Invalid ballot: '%s'" s))
@@ stop)
(fun (verbose_signing, dry_run)
(_name, source)
proposal
ballot
(cctxt : Protocol_client_context.full) ->
match Contract.is_implicit source with
| None -> failwith "only implicit accounts can submit ballot"
| Some src_pkh ->
Client_keys.get_key cctxt src_pkh
>>=? fun (_src_name, _src_pk, src_sk) ->
get_period_info
~successor:true
~chain:cctxt#chain
~block:cctxt#block
cctxt
>>=? fun info ->
(match info.current_period_kind with
| Exploration | Promotion -> return_unit
| _ -> cctxt#error "Not in Exploration or Promotion period")
>>=? fun () ->
submit_ballot
cctxt
~chain:cctxt#chain
~block:cctxt#block
~src_sk
src_pkh
~verbose_signing
~dry_run
proposal
ballot
>>=? fun _res -> return_unit);
command
~group
~desc:"Summarize the current voting period"
no_options
(fixed ["show"; "voting"; "period"])
(fun () (cctxt : Protocol_client_context.full) ->
get_period_info ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun info ->
cctxt#message
"Current period: %a\nBlocks remaining until end of period: %ld"
Data_encoding.Json.pp
(Data_encoding.Json.construct
Alpha_context.Voting_period.kind_encoding
info.current_period_kind)
info.remaining
>>= fun () ->
Shell_services.Protocol.list cctxt >>=? fun known_protos ->
get_proposals ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun props ->
let ranks =
Environment.Protocol_hash.Map.bindings props
|> List.sort (fun (_, v1) (_, v2) -> Int32.(compare v2 v1))
in
let print_proposal = function
| None ->
cctxt#message "The current proposal has already been cleared."
| Some proposal ->
cctxt#message "Current proposal: %a" Protocol_hash.pp proposal
in
match info.current_period_kind with
| Proposal ->
if info.remaining <> 0l then
cctxt#answer
"Current proposals:%t"
Format.(
fun ppf ->
pp_print_cut ppf () ;
pp_open_vbox ppf 0 ;
List.iter
(fun (p, w) ->
fprintf
ppf
"* %a %ld (%sknown by the node)@."
Protocol_hash.pp
p
w
(if
List.mem ~equal:Protocol_hash.equal p known_protos
then ""
else "not "))
ranks ;
pp_close_box ppf ())
>>= fun () -> return_unit
else
cctxt#message "The proposals have already been cleared."
>>= fun () -> return_unit
| Exploration | Promotion ->
print_proposal info.current_proposal >>= fun () ->
if info.remaining <> 0l then
get_ballots_info ~chain:cctxt#chain ~block:cctxt#block cctxt
>>=? fun ballots_info ->
cctxt#answer
"Ballots: %a@,\
Current participation %.2f%%, necessary quorum %.2f%%@,\
Current in favor %ld, needed supermajority %ld"
Data_encoding.Json.pp
(Data_encoding.Json.construct
Vote.ballots_encoding
ballots_info.ballots)
(Int32.to_float ballots_info.participation /. 100.)
(Int32.to_float ballots_info.current_quorum /. 100.)
ballots_info.ballots.yay
ballots_info.supermajority
>>= fun () -> return_unit
else
cctxt#message "The ballots have already been cleared."
>>= fun () -> return_unit
| Cooldown ->
print_proposal info.current_proposal >>= fun () -> return_unit
| Adoption ->
print_proposal info.current_proposal >>= fun () -> return_unit);
]
|
33725ce7294868d00c1ca2cf53ceca67ea01c1b7dd6819c4d9cee82263aa0491 | fulcro-legacy/fulcro3 | normalize.cljc | (ns com.fulcrologic.fulcro.algorithms.normalize
"Functions for dealing with normalizing Fulcro databases. In particular `tree->db`."
(:require
[com.fulcrologic.fulcro.algorithms.do-not-use :as util]
[edn-query-language.core :as eql]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.components :refer [has-ident? ident get-ident get-query]]))
(defn- normalize* [query data refs union-seen transform]
(let [data (if (and transform (not (vector? data)))
(transform query data)
data)]
(cond
(= '[*] query) data
;; union case
(map? query)
(let [class (-> query meta :component)
ident (get-ident class data)]
(if-not (nil? ident)
(vary-meta (normalize* (get query (first ident)) data refs union-seen transform)
assoc ::tag (first ident)) ; FIXME: What is tag for?
(throw (ex-info "Union components must have an ident" {}))))
(vector? data) data ;; already normalized
:else
(loop [q (seq query) ret data]
(if-not (nil? q)
(let [expr (first q)]
(if (util/join? expr)
(let [[k sel] (util/join-entry expr)
recursive? (util/recursion? sel)
union-entry (if (util/union? expr) sel union-seen)
sel (if recursive?
(if-not (nil? union-seen)
union-seen
query)
sel)
class (-> sel meta :component)
v (get data k)]
(cond
graph loop : db->tree leaves ident in place
(and recursive? (eql/ident? v)) (recur (next q) ret)
normalize one
(map? v)
(let [x (normalize* sel v refs union-entry transform)]
(if-not (or (nil? class) (not (has-ident? class)))
(let [i (get-ident class x)]
(swap! refs update-in [(first i) (second i)] merge x)
(recur (next q) (assoc ret k i)))
(recur (next q) (assoc ret k x))))
;; normalize many
(and (vector? v) (not (eql/ident? v)) (not (eql/ident? (first v))))
(let [xs (into [] (map #(normalize* sel % refs union-entry transform)) v)]
(if-not (or (nil? class) (not (has-ident? class)))
(let [is (into [] (map #(get-ident class %)) xs)]
(if (vector? sel)
(when-not (empty? is)
(swap! refs
(fn [refs]
(reduce (fn [m [i x]]
(update-in m i merge x))
refs (zipmap is xs)))))
;; union case
(swap! refs
(fn [refs']
(reduce
(fn [ret [i x]]
(update-in ret i merge x))
refs' (map vector is xs)))))
(recur (next q) (assoc ret k is)))
(recur (next q) (assoc ret k xs))))
;; missing key
(nil? v)
(recur (next q) ret)
;; can't handle
:else (recur (next q) (assoc ret k v))))
(let [k (if (seq? expr) (first expr) expr)
v (get data k)]
(if (nil? v)
(recur (next q) ret)
(recur (next q) (assoc ret k v))))))
ret)))))
(defn tree->db
"Given a component class or instance and a tree of data, use the component's
query to transform the tree into the default database format. All nodes that
can be mapped via Ident implementations wil be replaced with ident links. The
original node data will be moved into tables indexed by ident. If merge-idents
option is true, will return these tables in the result instead of as metadata."
([x data]
(tree->db x data false))
([x data #?(:clj merge-idents :cljs ^boolean merge-idents)]
(tree->db x data merge-idents nil))
([x data #?(:clj merge-idents :cljs ^boolean merge-idents) transform]
(let [refs (atom {})
x (if (vector? x) x (get-query x data))
ret (normalize* x data refs nil transform)]
(if merge-idents
(let [refs' @refs] (merge ret refs'))
(with-meta ret @refs)))))
| null | https://raw.githubusercontent.com/fulcro-legacy/fulcro3/5d1b9ab274406afd68b792192d5a783f29963314/src/main/com/fulcrologic/fulcro/algorithms/normalize.cljc | clojure | union case
FIXME: What is tag for?
already normalized
normalize many
union case
missing key
can't handle | (ns com.fulcrologic.fulcro.algorithms.normalize
"Functions for dealing with normalizing Fulcro databases. In particular `tree->db`."
(:require
[com.fulcrologic.fulcro.algorithms.do-not-use :as util]
[edn-query-language.core :as eql]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro.components :refer [has-ident? ident get-ident get-query]]))
(defn- normalize* [query data refs union-seen transform]
(let [data (if (and transform (not (vector? data)))
(transform query data)
data)]
(cond
(= '[*] query) data
(map? query)
(let [class (-> query meta :component)
ident (get-ident class data)]
(if-not (nil? ident)
(vary-meta (normalize* (get query (first ident)) data refs union-seen transform)
(throw (ex-info "Union components must have an ident" {}))))
:else
(loop [q (seq query) ret data]
(if-not (nil? q)
(let [expr (first q)]
(if (util/join? expr)
(let [[k sel] (util/join-entry expr)
recursive? (util/recursion? sel)
union-entry (if (util/union? expr) sel union-seen)
sel (if recursive?
(if-not (nil? union-seen)
union-seen
query)
sel)
class (-> sel meta :component)
v (get data k)]
(cond
graph loop : db->tree leaves ident in place
(and recursive? (eql/ident? v)) (recur (next q) ret)
normalize one
(map? v)
(let [x (normalize* sel v refs union-entry transform)]
(if-not (or (nil? class) (not (has-ident? class)))
(let [i (get-ident class x)]
(swap! refs update-in [(first i) (second i)] merge x)
(recur (next q) (assoc ret k i)))
(recur (next q) (assoc ret k x))))
(and (vector? v) (not (eql/ident? v)) (not (eql/ident? (first v))))
(let [xs (into [] (map #(normalize* sel % refs union-entry transform)) v)]
(if-not (or (nil? class) (not (has-ident? class)))
(let [is (into [] (map #(get-ident class %)) xs)]
(if (vector? sel)
(when-not (empty? is)
(swap! refs
(fn [refs]
(reduce (fn [m [i x]]
(update-in m i merge x))
refs (zipmap is xs)))))
(swap! refs
(fn [refs']
(reduce
(fn [ret [i x]]
(update-in ret i merge x))
refs' (map vector is xs)))))
(recur (next q) (assoc ret k is)))
(recur (next q) (assoc ret k xs))))
(nil? v)
(recur (next q) ret)
:else (recur (next q) (assoc ret k v))))
(let [k (if (seq? expr) (first expr) expr)
v (get data k)]
(if (nil? v)
(recur (next q) ret)
(recur (next q) (assoc ret k v))))))
ret)))))
(defn tree->db
"Given a component class or instance and a tree of data, use the component's
query to transform the tree into the default database format. All nodes that
can be mapped via Ident implementations wil be replaced with ident links. The
original node data will be moved into tables indexed by ident. If merge-idents
option is true, will return these tables in the result instead of as metadata."
([x data]
(tree->db x data false))
([x data #?(:clj merge-idents :cljs ^boolean merge-idents)]
(tree->db x data merge-idents nil))
([x data #?(:clj merge-idents :cljs ^boolean merge-idents) transform]
(let [refs (atom {})
x (if (vector? x) x (get-query x data))
ret (normalize* x data refs nil transform)]
(if merge-idents
(let [refs' @refs] (merge ret refs'))
(with-meta ret @refs)))))
|
86d0b064fced5fa00f45f00f4ece3558a19628fd3f8bedde8cf2b3dda807a704 | camfort/camfort | InferenceBackend.hs |
Copyright 2016 , , , , under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
module Camfort.Specification.Stencils.InferenceBackend
( coalesce
, containedWithin
, inferFromIndicesWithoutLinearity
, inferMinimalVectorRegions
, spansToApproxSpatial
, Span
) where
import Data.List
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Algebra.Lattice (joins1)
import Camfort.Specification.Stencils.Model
import Camfort.Specification.Stencils.DenotationalSemantics
import qualified Camfort.Helpers.Vec as V
import Camfort.Specification.Stencils.Syntax
{- Spans are a pair of a lower and upper bound -}
type Span a = (a, a)
spansToApproxSpatial :: [ Span (V.Vec ('V.S n) Int) ]
-> Either String (Approximation Spatial)
spansToApproxSpatial spans = sequence . fmap intervalsToRegions $ approxUnion
where
approxVecs =
toApprox . map (fmap absRepToInf . transposeVecInterval) $ spans
approxUnion = fmap (optimise . joins1 . NE.fromList . map return) approxVecs
toApprox :: [ V.Vec n (Interval 'Arbitrary) ]
-> Approximation [ V.Vec n (Interval 'Standard) ]
toApprox vs
| parts <- (elongatedPartitions . map approxVec) vs =
case parts of
(orgs, []) -> Exact . map fromExact $ orgs
([], elongs) -> Bound Nothing (Just $ map upperBound elongs)
(orgs, elongs) -> Bound (Just . map upperBound $ orgs)
(Just . map upperBound $ orgs ++ elongs)
elongatedPartitions =
partition $ \case { Exact{} -> True; Bound{} -> False }
-- TODO: DELETE AS SOON AS POSSIBLE
absRepToInf :: Interval 'Arbitrary -> Interval 'Arbitrary
absRepToInf interv@(IntervArbitrary a b)
| fromIntegral a == absoluteRep = IntervInfiniteArbitrary
| fromIntegral b == absoluteRep = IntervInfiniteArbitrary
| otherwise = interv
absRepToInf interv = interv
transposeVecInterval :: Span (V.Vec n Int) -> V.Vec n (Interval 'Arbitrary)
transposeVecInterval (us, vs) = V.zipWith IntervArbitrary us vs
mkTrivialSpan :: V.Vec n Int -> Span (V.Vec n Int)
mkTrivialSpan V.Nil = (V.Nil, V.Nil)
mkTrivialSpan (V.Cons x xs) =
if x == absoluteRep
then (V.Cons (-absoluteRep) ys, V.Cons absoluteRep zs)
else (V.Cons x ys, V.Cons x zs)
where
(ys, zs) = mkTrivialSpan xs
| From a list of vectors of integers , representing relative offsets ,
generate a specification ( but does not do any linearity checking )
( defaults to Mult ) . Instead let the front - end does
the linearity check first as an optimimsation .
Also defaults to the specification being for a stencil
generate a specification (but does not do any linearity checking)
(defaults to Mult). Instead let the front-end does
the linearity check first as an optimimsation.
Also defaults to the specification being for a stencil -}
inferFromIndicesWithoutLinearity :: V.VecList Int -> Specification
inferFromIndicesWithoutLinearity (V.VL ixs) =
Specification (Mult . inferCore $ ixs) True
inferCore :: [V.Vec n Int] -> Approximation Spatial
inferCore subs =
case V.proveNonEmpty . head $ subs of
Just (V.ExistsEqT V.ReflEq) ->
case spansToApproxSpatial . inferMinimalVectorRegions $ subs of
Right a -> a
Left msg -> error msg
Nothing -> error "Input vectors are empty!"
| |inferMinimalVectorRegions| a key part of the algorithm , from a list of
n - dimensional relative indices it infers a list of ( possibly overlapping )
1 - dimensional spans ( vectors ) within the n - dimensional space .
Built from |minimalise| and |allRegionPermutations|
n-dimensional relative indices it infers a list of (possibly overlapping)
1-dimensional spans (vectors) within the n-dimensional space.
Built from |minimalise| and |allRegionPermutations| -}
inferMinimalVectorRegions :: [V.Vec n Int] -> [Span (V.Vec n Int)]
inferMinimalVectorRegions = fixCoalesce . map mkTrivialSpan
where fixCoalesce spans =
let spans' = minimaliseRegions . coalesceContiguous $ spans
in if spans' == spans then spans' else fixCoalesce spans'
-- An alternative that is simpler and possibly quicker
coalesceContiguous :: [Span (V.Vec n Int)] -> [Span (V.Vec n Int)]
coalesceContiguous [] = []
coalesceContiguous [x] = [x]
coalesceContiguous [x, y] =
case coalesce x y of
Nothing -> [x, y]
Just c -> [c]
coalesceContiguous (x:xs) =
case sequenceMaybes (map (coalesce x) xs) of
Nothing -> x : coalesceContiguous xs
Just cs -> coalesceContiguous (cs ++ xs)
sequenceMaybes :: Eq a => [Maybe a] -> Maybe [a]
sequenceMaybes xs | all (== Nothing) xs = Nothing
| otherwise = Just (catMaybes xs)
| Coalesce two intervals of vectors into one , if they are contiguous
coalesce :: Span (V.Vec n Int) -> Span (V.Vec n Int) -> Maybe (Span (V.Vec n Int))
coalesce (V.Nil, V.Nil) (V.Nil, V.Nil) = Just (V.Nil, V.Nil)
If two well - defined intervals are equal , then they can not be coalesced
coalesce x y | x == y = Nothing
-- Otherwise
coalesce (V.Cons l1 ls1, V.Cons u1 us1) (V.Cons l2 ls2, V.Cons u2 us2)
| l1 == l2 && u1 == u2
= case coalesce (ls1, us1) (ls2, us2) of
Just (l, u) -> Just (V.Cons l1 l, V.Cons u1 u)
Nothing -> Nothing
| (u1 + 1 == l2) && (us1 == us2) && (ls1 == ls2)
= Just (V.Cons l1 ls1, V.Cons u2 us2)
| (u2 + 1 == l1) && (us1 == us2) && (ls1 == ls2)
= Just (V.Cons l2 ls2, V.Cons u1 us1)
Fall through ( also catches cases where the initial size pre - condition
-- has been violated in a use of `Helpers.Vec.fromLists`
coalesce _ _
= Nothing
{-| Collapses the regions into a small set by looking for potential overlaps
and eliminating those that overlap -}
minimaliseRegions :: [Span (V.Vec n Int)] -> [Span (V.Vec n Int)]
minimaliseRegions [] = []
minimaliseRegions xss = nub . minimalise $ xss
where localMin x ys = filter' x (\y -> containedWithin x y && (x /= y)) xss ++ ys
minimalise = foldr localMin []
-- If nothing is caught by the filter, i.e. no overlaps then return
-- the original regions r
filter' r f xs = case filter f xs of
[] -> [r]
ys -> ys
| Binary predicate on whether the first region the second
containedWithin :: Span (V.Vec n Int) -> Span (V.Vec n Int) -> Bool
containedWithin (V.Nil, V.Nil) (V.Nil, V.Nil)
= True
containedWithin (V.Cons l1 ls1, V.Cons u1 us1) (V.Cons l2 ls2, V.Cons u2 us2)
= (l2 <= l1 && u1 <= u2) && containedWithin (ls1, us1) (ls2, us2)
-- Local variables:
-- mode: haskell
-- haskell-program-name: "cabal repl"
-- End:
| null | https://raw.githubusercontent.com/camfort/camfort/3421e85f6fbbcaa6503a266b3fae029a09d2ff24/src/Camfort/Specification/Stencils/InferenceBackend.hs | haskell | # LANGUAGE GADTs #
Spans are a pair of a lower and upper bound
TODO: DELETE AS SOON AS POSSIBLE
An alternative that is simpler and possibly quicker
Otherwise
has been violated in a use of `Helpers.Vec.fromLists`
| Collapses the regions into a small set by looking for potential overlaps
and eliminating those that overlap
If nothing is caught by the filter, i.e. no overlaps then return
the original regions r
Local variables:
mode: haskell
haskell-program-name: "cabal repl"
End: |
Copyright 2016 , , , , under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
# LANGUAGE DataKinds #
# LANGUAGE LambdaCase #
module Camfort.Specification.Stencils.InferenceBackend
( coalesce
, containedWithin
, inferFromIndicesWithoutLinearity
, inferMinimalVectorRegions
, spansToApproxSpatial
, Span
) where
import Data.List
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Algebra.Lattice (joins1)
import Camfort.Specification.Stencils.Model
import Camfort.Specification.Stencils.DenotationalSemantics
import qualified Camfort.Helpers.Vec as V
import Camfort.Specification.Stencils.Syntax
type Span a = (a, a)
spansToApproxSpatial :: [ Span (V.Vec ('V.S n) Int) ]
-> Either String (Approximation Spatial)
spansToApproxSpatial spans = sequence . fmap intervalsToRegions $ approxUnion
where
approxVecs =
toApprox . map (fmap absRepToInf . transposeVecInterval) $ spans
approxUnion = fmap (optimise . joins1 . NE.fromList . map return) approxVecs
toApprox :: [ V.Vec n (Interval 'Arbitrary) ]
-> Approximation [ V.Vec n (Interval 'Standard) ]
toApprox vs
| parts <- (elongatedPartitions . map approxVec) vs =
case parts of
(orgs, []) -> Exact . map fromExact $ orgs
([], elongs) -> Bound Nothing (Just $ map upperBound elongs)
(orgs, elongs) -> Bound (Just . map upperBound $ orgs)
(Just . map upperBound $ orgs ++ elongs)
elongatedPartitions =
partition $ \case { Exact{} -> True; Bound{} -> False }
absRepToInf :: Interval 'Arbitrary -> Interval 'Arbitrary
absRepToInf interv@(IntervArbitrary a b)
| fromIntegral a == absoluteRep = IntervInfiniteArbitrary
| fromIntegral b == absoluteRep = IntervInfiniteArbitrary
| otherwise = interv
absRepToInf interv = interv
transposeVecInterval :: Span (V.Vec n Int) -> V.Vec n (Interval 'Arbitrary)
transposeVecInterval (us, vs) = V.zipWith IntervArbitrary us vs
mkTrivialSpan :: V.Vec n Int -> Span (V.Vec n Int)
mkTrivialSpan V.Nil = (V.Nil, V.Nil)
mkTrivialSpan (V.Cons x xs) =
if x == absoluteRep
then (V.Cons (-absoluteRep) ys, V.Cons absoluteRep zs)
else (V.Cons x ys, V.Cons x zs)
where
(ys, zs) = mkTrivialSpan xs
| From a list of vectors of integers , representing relative offsets ,
generate a specification ( but does not do any linearity checking )
( defaults to Mult ) . Instead let the front - end does
the linearity check first as an optimimsation .
Also defaults to the specification being for a stencil
generate a specification (but does not do any linearity checking)
(defaults to Mult). Instead let the front-end does
the linearity check first as an optimimsation.
Also defaults to the specification being for a stencil -}
inferFromIndicesWithoutLinearity :: V.VecList Int -> Specification
inferFromIndicesWithoutLinearity (V.VL ixs) =
Specification (Mult . inferCore $ ixs) True
inferCore :: [V.Vec n Int] -> Approximation Spatial
inferCore subs =
case V.proveNonEmpty . head $ subs of
Just (V.ExistsEqT V.ReflEq) ->
case spansToApproxSpatial . inferMinimalVectorRegions $ subs of
Right a -> a
Left msg -> error msg
Nothing -> error "Input vectors are empty!"
| |inferMinimalVectorRegions| a key part of the algorithm , from a list of
n - dimensional relative indices it infers a list of ( possibly overlapping )
1 - dimensional spans ( vectors ) within the n - dimensional space .
Built from |minimalise| and |allRegionPermutations|
n-dimensional relative indices it infers a list of (possibly overlapping)
1-dimensional spans (vectors) within the n-dimensional space.
Built from |minimalise| and |allRegionPermutations| -}
inferMinimalVectorRegions :: [V.Vec n Int] -> [Span (V.Vec n Int)]
inferMinimalVectorRegions = fixCoalesce . map mkTrivialSpan
where fixCoalesce spans =
let spans' = minimaliseRegions . coalesceContiguous $ spans
in if spans' == spans then spans' else fixCoalesce spans'
coalesceContiguous :: [Span (V.Vec n Int)] -> [Span (V.Vec n Int)]
coalesceContiguous [] = []
coalesceContiguous [x] = [x]
coalesceContiguous [x, y] =
case coalesce x y of
Nothing -> [x, y]
Just c -> [c]
coalesceContiguous (x:xs) =
case sequenceMaybes (map (coalesce x) xs) of
Nothing -> x : coalesceContiguous xs
Just cs -> coalesceContiguous (cs ++ xs)
sequenceMaybes :: Eq a => [Maybe a] -> Maybe [a]
sequenceMaybes xs | all (== Nothing) xs = Nothing
| otherwise = Just (catMaybes xs)
| Coalesce two intervals of vectors into one , if they are contiguous
coalesce :: Span (V.Vec n Int) -> Span (V.Vec n Int) -> Maybe (Span (V.Vec n Int))
coalesce (V.Nil, V.Nil) (V.Nil, V.Nil) = Just (V.Nil, V.Nil)
If two well - defined intervals are equal , then they can not be coalesced
coalesce x y | x == y = Nothing
coalesce (V.Cons l1 ls1, V.Cons u1 us1) (V.Cons l2 ls2, V.Cons u2 us2)
| l1 == l2 && u1 == u2
= case coalesce (ls1, us1) (ls2, us2) of
Just (l, u) -> Just (V.Cons l1 l, V.Cons u1 u)
Nothing -> Nothing
| (u1 + 1 == l2) && (us1 == us2) && (ls1 == ls2)
= Just (V.Cons l1 ls1, V.Cons u2 us2)
| (u2 + 1 == l1) && (us1 == us2) && (ls1 == ls2)
= Just (V.Cons l2 ls2, V.Cons u1 us1)
Fall through ( also catches cases where the initial size pre - condition
coalesce _ _
= Nothing
minimaliseRegions :: [Span (V.Vec n Int)] -> [Span (V.Vec n Int)]
minimaliseRegions [] = []
minimaliseRegions xss = nub . minimalise $ xss
where localMin x ys = filter' x (\y -> containedWithin x y && (x /= y)) xss ++ ys
minimalise = foldr localMin []
filter' r f xs = case filter f xs of
[] -> [r]
ys -> ys
| Binary predicate on whether the first region the second
containedWithin :: Span (V.Vec n Int) -> Span (V.Vec n Int) -> Bool
containedWithin (V.Nil, V.Nil) (V.Nil, V.Nil)
= True
containedWithin (V.Cons l1 ls1, V.Cons u1 us1) (V.Cons l2 ls2, V.Cons u2 us2)
= (l2 <= l1 && u1 <= u2) && containedWithin (ls1, us1) (ls2, us2)
|
d2b3a35604dd6218ea3273b160831adb408bc77f94ef0dc83dca6b82fb2b0a9a | alesaccoia/festival_flinger | mrpa_allophones.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
Centre for Speech Technology Research ; ;
University of Edinburgh , UK ; ;
;; Copyright (c) 1996,1997 ;;
All Rights Reserved . ; ;
;; ;;
;; Permission is hereby granted, free of charge, to use and distribute ;;
;; this software and its documentation without restriction, including ;;
;; without limitation the rights to use, copy, modify, merge, publish, ;;
;; distribute, sublicense, and/or sell copies of this work, and to ;;
;; permit persons to whom this work is furnished to do so, subject to ;;
;; the following conditions: ;;
;; 1. The code must retain the above copyright notice, this list of ;;
;; conditions and the following disclaimer. ;;
;; 2. Any modifications must be clearly marked as such. ;;
3 . Original authors ' names are not deleted . ; ;
;; 4. The authors' names are not used to endorse or promote products ;;
;; derived from this software without specific prior written ;;
;; permission. ;;
;; ;;
;; THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
;; SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ;
;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
;; THIS SOFTWARE. ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
A definition of the extended mrpa phone set used for some diphone sets
;;
(defPhoneSet
mrpa_allophones
;;; Phone Features
(;; vowel or consonant
(vc + -)
vowel length : short long dipthong schwa
(vlng s l d a 0)
;; vowel height: high mid low
(vheight 1 2 3 -)
;; vowel frontness: front mid back
(vfront 1 2 3 -)
;; lip rounding
(vrnd + -)
;; consonant type: stop fricative affricative nasal liquid
(ctype s f a n l 0)
;; place of articulation: labial alveolar palatal labio-dental
;; dental velar
(cplace l a p b d v 0)
;; consonant voicing
(cvox + -)
)
;; Phone set members
(
(uh + s 2 3 - 0 0 +)
(e + s 2 1 - 0 0 +)
(a + s 3 1 - 0 0 +)
(o + s 3 3 - 0 0 +)
(i + s 1 1 - 0 0 +)
(u + s 1 3 + 0 0 +)
(ii + l 1 1 - 0 0 +)
(uu + l 2 3 + 0 0 +)
(oo + l 3 2 - 0 0 +)
(aa + l 3 1 - 0 0 +)
(@@ + l 2 2 - 0 0 +)
(ai + d 3 1 - 0 0 +)
(ei + d 2 1 - 0 0 +)
(oi + d 3 3 - 0 0 +)
(au + d 3 3 + 0 0 +)
(ou + d 3 3 + 0 0 +)
(e@ + d 2 1 - 0 0 +)
(i@ + d 1 1 - 0 0 +)
(u@ + d 3 1 - 0 0 +)
(@ + a - - - 0 0 +)
(p - 0 - - + s l -)
(t - 0 - - + s a -)
(k - 0 - - + s p -)
(b - 0 - - + s l +)
(d - 0 - - + s a +)
(g - 0 - - + s p +)
(s - 0 - - + f a -)
(z - 0 - - + f a +)
(sh - 0 - - + f p -)
(zh - 0 - - + f p +)
(f - 0 - - + f b -)
(v - 0 - - + f b +)
(th - 0 - - + f d -)
(dh - 0 - - + f d +)
(ch - 0 - - + a a -)
(jh - 0 - - + a a +)
(h - 0 - - + a v -)
(m - 0 - - + n l +)
(n - 0 - - + n d +)
(ng - 0 - - + n v +)
(l - 0 - - + l d +)
(ll - 0 - - + l d +)
(y - 0 - - + l a +)
(r - 0 - - + l p +)
(w - 0 - - + l l +)
(# - 0 - - - 0 0 -)
)
)
(PhoneSet.silences '(#))
(provide 'mrpa_allophones)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/mrpa_allophones.scm | scheme |
;;
;
;
Copyright (c) 1996,1997 ;;
;
;;
Permission is hereby granted, free of charge, to use and distribute ;;
this software and its documentation without restriction, including ;;
without limitation the rights to use, copy, modify, merge, publish, ;;
distribute, sublicense, and/or sell copies of this work, and to ;;
permit persons to whom this work is furnished to do so, subject to ;;
the following conditions: ;;
1. The code must retain the above copyright notice, this list of ;;
conditions and the following disclaimer. ;;
2. Any modifications must be clearly marked as such. ;;
;
4. The authors' names are not used to endorse or promote products ;;
derived from this software without specific prior written ;;
permission. ;;
;;
THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
THIS SOFTWARE. ;;
;;
Phone Features
vowel or consonant
vowel height: high mid low
vowel frontness: front mid back
lip rounding
consonant type: stop fricative affricative nasal liquid
place of articulation: labial alveolar palatal labio-dental
dental velar
consonant voicing
Phone set members | A definition of the extended mrpa phone set used for some diphone sets
(defPhoneSet
mrpa_allophones
(vc + -)
vowel length : short long dipthong schwa
(vlng s l d a 0)
(vheight 1 2 3 -)
(vfront 1 2 3 -)
(vrnd + -)
(ctype s f a n l 0)
(cplace l a p b d v 0)
(cvox + -)
)
(
(uh + s 2 3 - 0 0 +)
(e + s 2 1 - 0 0 +)
(a + s 3 1 - 0 0 +)
(o + s 3 3 - 0 0 +)
(i + s 1 1 - 0 0 +)
(u + s 1 3 + 0 0 +)
(ii + l 1 1 - 0 0 +)
(uu + l 2 3 + 0 0 +)
(oo + l 3 2 - 0 0 +)
(aa + l 3 1 - 0 0 +)
(@@ + l 2 2 - 0 0 +)
(ai + d 3 1 - 0 0 +)
(ei + d 2 1 - 0 0 +)
(oi + d 3 3 - 0 0 +)
(au + d 3 3 + 0 0 +)
(ou + d 3 3 + 0 0 +)
(e@ + d 2 1 - 0 0 +)
(i@ + d 1 1 - 0 0 +)
(u@ + d 3 1 - 0 0 +)
(@ + a - - - 0 0 +)
(p - 0 - - + s l -)
(t - 0 - - + s a -)
(k - 0 - - + s p -)
(b - 0 - - + s l +)
(d - 0 - - + s a +)
(g - 0 - - + s p +)
(s - 0 - - + f a -)
(z - 0 - - + f a +)
(sh - 0 - - + f p -)
(zh - 0 - - + f p +)
(f - 0 - - + f b -)
(v - 0 - - + f b +)
(th - 0 - - + f d -)
(dh - 0 - - + f d +)
(ch - 0 - - + a a -)
(jh - 0 - - + a a +)
(h - 0 - - + a v -)
(m - 0 - - + n l +)
(n - 0 - - + n d +)
(ng - 0 - - + n v +)
(l - 0 - - + l d +)
(ll - 0 - - + l d +)
(y - 0 - - + l a +)
(r - 0 - - + l p +)
(w - 0 - - + l l +)
(# - 0 - - - 0 0 -)
)
)
(PhoneSet.silences '(#))
(provide 'mrpa_allophones)
|
be1ec3ee42779964dc66fbbe1f7c3bc62d1d6c5bb10ca13db054b666e26ed37a | GaloisInc/surveyor | Panic.hs | # LANGUAGE TemplateHaskell #
module Surveyor.Brick.Panic (
Component(..),
panic
) where
import qualified Panic as P
data Component = Brick
deriving (Show)
instance P.PanicComponent Component where
panicComponentName _ = "surveyor-brick"
panicComponentIssues _ = ""
panicComponentRevision = $(P.useGitRevision)
panic :: (P.HasCallStack) => String -> [String] -> b
panic = P.panic Brick
| null | https://raw.githubusercontent.com/GaloisInc/surveyor/96b6748d811bc2ab9ef330307a324bd00e04819f/surveyor-brick/src/Surveyor/Brick/Panic.hs | haskell | # LANGUAGE TemplateHaskell #
module Surveyor.Brick.Panic (
Component(..),
panic
) where
import qualified Panic as P
data Component = Brick
deriving (Show)
instance P.PanicComponent Component where
panicComponentName _ = "surveyor-brick"
panicComponentIssues _ = ""
panicComponentRevision = $(P.useGitRevision)
panic :: (P.HasCallStack) => String -> [String] -> b
panic = P.panic Brick
| |
2557b353a6290b6b988ea9fda2b772be66bd526255cc91cc026313dcf9b5b70b | simonmar/parconc-examples | findpar5.hs | # LANGUAGE GeneralizedNewtypeDeriving #
import System.Directory
import Control.Applicative
import Control.Concurrent
import Control.Monad
import System.FilePath
import System.Environment
import Data.List hiding (find)
import GHC.Conc (getNumCapabilities)
import Text.Printf
import qualified Control.Monad.Par.Class as P hiding (runParIO)
import Control.Monad.Par.IO
import Control.Monad.IO.Class
import Control.Exception
main = do
[s,d] <- getArgs
n <- getNumCapabilities
runParIO (unE (find s d)) >>= print
-- <<find
find :: String -> FilePath -> EParIO (Maybe FilePath)
find s d = do
fs <- liftIO $ getDirectoryContents d
let fs' = sort $ filter (`notElem` [".",".."]) fs
if any (== s) fs'
then return (Just (d </> s))
else do
< 1 >
< 2 >
where
dowait as = loop (reverse as) -- <3>
loop [] = return Nothing
< 4 >
< 5 >
case r of
< 6 >
< 7 >
-- >>
-- <<subfind
subfind :: String -> FilePath
-> ([EVar (Maybe FilePath)] -> EParIO (Maybe FilePath))
-> [EVar (Maybe FilePath)] -> EParIO (Maybe FilePath)
subfind s p inner asyncs = do
isdir <- liftIO $ doesDirectoryExist p
if not isdir
then inner asyncs
else do r <- new
liftPar $ P.fork (putResult (find s p) r)
inner (r : asyncs)
-- >>
An exception - handling version of the ParIO monad . Exceptions from
IO computations are caught in liftIO , and propagated in the EParIO
monad . An EVar is like an IVar , but can also contain an exception ,
-- which is propagated by 'get'. Instead of 'put' we have
' ' , which runs an EParIO and puts the result ( or an
exception ) into an EVar .
--
newtype EParIO a = E { unE :: ParIO (Either SomeException a) }
instance Functor EParIO where
fmap f e = e >>= return . f
instance Applicative EParIO where
pure = return
(<*>) = ap
instance Monad EParIO where
return a = E (return (Right a))
E m >>= k = E $ do
r <- m
case r of
Left e -> return (Left e)
Right a -> unE (k a)
instance MonadIO EParIO where
liftIO io = E $ liftIO (try io)
liftPar :: ParIO a -> EParIO a
liftPar p = E $ p >>= return . Right
type EVar a = IVar (Either SomeException a)
new :: EParIO (EVar a)
new = liftPar P.new
get :: EVar a -> EParIO a
get evar = E $ P.get evar
putResult :: EParIO a -> EVar a -> ParIO ()
putResult (E e) var = do e >>= P.put_ var
| null | https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/findpar5.hs | haskell | <<find
<3>
>>
<<subfind
>>
which is propagated by 'get'. Instead of 'put' we have
| # LANGUAGE GeneralizedNewtypeDeriving #
import System.Directory
import Control.Applicative
import Control.Concurrent
import Control.Monad
import System.FilePath
import System.Environment
import Data.List hiding (find)
import GHC.Conc (getNumCapabilities)
import Text.Printf
import qualified Control.Monad.Par.Class as P hiding (runParIO)
import Control.Monad.Par.IO
import Control.Monad.IO.Class
import Control.Exception
main = do
[s,d] <- getArgs
n <- getNumCapabilities
runParIO (unE (find s d)) >>= print
find :: String -> FilePath -> EParIO (Maybe FilePath)
find s d = do
fs <- liftIO $ getDirectoryContents d
let fs' = sort $ filter (`notElem` [".",".."]) fs
if any (== s) fs'
then return (Just (d </> s))
else do
< 1 >
< 2 >
where
loop [] = return Nothing
< 4 >
< 5 >
case r of
< 6 >
< 7 >
subfind :: String -> FilePath
-> ([EVar (Maybe FilePath)] -> EParIO (Maybe FilePath))
-> [EVar (Maybe FilePath)] -> EParIO (Maybe FilePath)
subfind s p inner asyncs = do
isdir <- liftIO $ doesDirectoryExist p
if not isdir
then inner asyncs
else do r <- new
liftPar $ P.fork (putResult (find s p) r)
inner (r : asyncs)
An exception - handling version of the ParIO monad . Exceptions from
IO computations are caught in liftIO , and propagated in the EParIO
monad . An EVar is like an IVar , but can also contain an exception ,
' ' , which runs an EParIO and puts the result ( or an
exception ) into an EVar .
newtype EParIO a = E { unE :: ParIO (Either SomeException a) }
instance Functor EParIO where
fmap f e = e >>= return . f
instance Applicative EParIO where
pure = return
(<*>) = ap
instance Monad EParIO where
return a = E (return (Right a))
E m >>= k = E $ do
r <- m
case r of
Left e -> return (Left e)
Right a -> unE (k a)
instance MonadIO EParIO where
liftIO io = E $ liftIO (try io)
liftPar :: ParIO a -> EParIO a
liftPar p = E $ p >>= return . Right
type EVar a = IVar (Either SomeException a)
new :: EParIO (EVar a)
new = liftPar P.new
get :: EVar a -> EParIO a
get evar = E $ P.get evar
putResult :: EParIO a -> EVar a -> ParIO ()
putResult (E e) var = do e >>= P.put_ var
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.