_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
e5b54b4cffe4f5b419946349c84f78257a09b3a581bca2cf1db219174a7a0ad4 | metaocaml/ber-metaocaml | w59.ml | (* TEST
flags = "-w A"
compile_only = "true"
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
* no-flambda
** setup-ocamlopt.byte-build-env
*** ocamlopt.byte
**** check-ocamlopt.byte-output
* flambda
compiler_reference = "${test_source_directory}/w59.flambda.reference"
** setup-ocamlopt.byte-build-env
*** ocamlopt.byte
**** check-ocamlopt.byte-output
*)
Check that the warning 59 ( assignment to immutable value ) does not
trigger on those examples
trigger on those examples *)
let a = Lazy.force (lazy "a")
let b = Lazy.force (lazy 1)
let c = Lazy.force (lazy 3.14)
let d = Lazy.force (lazy 'a')
let e = Lazy.force (lazy (fun x -> x+1))
let rec f (x:int) : int = g x and g x = f x
let h = Lazy.force (lazy f)
let i = Lazy.force (lazy g)
let j = Lazy.force (lazy 1L)
let k = Lazy.force (lazy (1,2))
let l = Lazy.force (lazy [|3.14|])
let m = Lazy.force (lazy (Sys.opaque_identity 3.14))
let n = Lazy.force (lazy None)
(* Check that obviously wrong code is reported *)
let o = (1,2)
let p = fun x -> x
let q = 3.14
let r = 1
let () =
Obj.set_field (Obj.repr o) 0 (Obj.repr 3);
Obj.set_field (Obj.repr p) 0 (Obj.repr 3);
Obj.set_field (Obj.repr q) 0 (Obj.repr 3);
Obj.set_field (Obj.repr r) 0 (Obj.repr 3)
let set v =
Obj.set_field (Obj.repr v) 0 (Obj.repr 3)
[@@inline]
let () =
set o
(* Sys.opaque_identity hides all information and shouldn't warn *)
let opaque = Sys.opaque_identity (1,2)
let set_opaque =
Obj.set_field
(Obj.repr opaque)
0
(Obj.repr 3)
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/warnings/w59.ml | ocaml | TEST
flags = "-w A"
compile_only = "true"
* setup-ocamlc.byte-build-env
** ocamlc.byte
*** check-ocamlc.byte-output
* no-flambda
** setup-ocamlopt.byte-build-env
*** ocamlopt.byte
**** check-ocamlopt.byte-output
* flambda
compiler_reference = "${test_source_directory}/w59.flambda.reference"
** setup-ocamlopt.byte-build-env
*** ocamlopt.byte
**** check-ocamlopt.byte-output
Check that obviously wrong code is reported
Sys.opaque_identity hides all information and shouldn't warn |
Check that the warning 59 ( assignment to immutable value ) does not
trigger on those examples
trigger on those examples *)
let a = Lazy.force (lazy "a")
let b = Lazy.force (lazy 1)
let c = Lazy.force (lazy 3.14)
let d = Lazy.force (lazy 'a')
let e = Lazy.force (lazy (fun x -> x+1))
let rec f (x:int) : int = g x and g x = f x
let h = Lazy.force (lazy f)
let i = Lazy.force (lazy g)
let j = Lazy.force (lazy 1L)
let k = Lazy.force (lazy (1,2))
let l = Lazy.force (lazy [|3.14|])
let m = Lazy.force (lazy (Sys.opaque_identity 3.14))
let n = Lazy.force (lazy None)
let o = (1,2)
let p = fun x -> x
let q = 3.14
let r = 1
let () =
Obj.set_field (Obj.repr o) 0 (Obj.repr 3);
Obj.set_field (Obj.repr p) 0 (Obj.repr 3);
Obj.set_field (Obj.repr q) 0 (Obj.repr 3);
Obj.set_field (Obj.repr r) 0 (Obj.repr 3)
let set v =
Obj.set_field (Obj.repr v) 0 (Obj.repr 3)
[@@inline]
let () =
set o
let opaque = Sys.opaque_identity (1,2)
let set_opaque =
Obj.set_field
(Obj.repr opaque)
0
(Obj.repr 3)
|
b5b49e782e6589e9362540cb5b79b83a1e037a9cffbc9965561e3ae13146a367 | nokia/web-assembly-self-certifying-compilation-framework | state_types.ml | *
Copyright 2020 Nokia
Licensed under the BSD 3 - Clause License .
SPDX - License - Identifier : BSD-3 - Clause
Copyright 2020 Nokia
Licensed under the BSD 3-Clause License.
SPDX-License-Identifier: BSD-3-Clause
*)
open Wasm
open Wasm_utils
open Instr_graph
open Func_types
open Script_types
open Source
open Debug
module G = Digraph
module String = String_utils
This is the types that are used to represent states for WASM programs .
* All ids here are typed as strings , but must also be SMTLIB - valid ids .
* We make this a separate file from proof_types.ml because this
* is about state - specific manipulation and data structures over the
* func_ir that do not need formulas ; which the proof_types.ml defines
* All ids here are typed as strings, but must also be SMTLIB-valid ids.
* We make this a separate file from proof_types.ml because this
* is about state-specific manipulation and data structures over the
* func_ir that do not need formulas; which the proof_types.ml defines *)
A Walue :)
type wasm_value = Ast.literal
type smtid = string
(* We need to handle symbolic pointers, and so we go an indirect route;
* The types need to be general enough to encode interleaving of:
* value stack; local variables; global variables; and linear memory *)
type pointer =
| IntPtr of int
| Int32Ptr of int32
| IdPtr of smtid (* pointer identified by an id *)
| VarPtr of Ast.var
| OffsetPtr of pointer * int32
| IntOffsetPtr of pointer * int
| CastedPtr of value
| NullPtr
The different forms in which a value is accessed in WASM
and value =
| IntVal of int
| Int32Val of int32
| WasmVal of wasm_value
| TestOpVal of Ast.testop * value
| RelOpVal of value * Ast.relop * value
| UnOpVal of Ast.unop * value
| BinOpVal of value * Ast.binop * value
| CvtOpVal of Ast.cvtop * value
| SelectVal of array_ * pointer
UFVal < name > < values > represents the result of uninterpreted function < name > on a list of values .
| ArrayVal of array_
(* Arrays -- as in the theory of arrays
* This is how we will model the
* value stack; local variables; global variables; memory *)
and array_ =
| IdArr of string (* Array identified by an id *)
| StoreArr of array_ * pointer * value
(* Some value definitions *)
let zero_int32_val : value = Int32Val 0l
The state of a program during execution as determined by the
* control flow graph ( the instrs_graph ) .
* A state occurs between edges , and we identify a state with the
* execution immediately AFTER the branch instruction that sits on the edge .
*
* | instr m |
* v0 | ....... |
* ----------- < - vertex v0
* |
* v0v1_branch
* | < - state { ( v0 , v1 ) ; v0v1_branch ; step = Active 0 }
* -----------
* v1 | instr 1 |
* | | < - state { ( v0 , v1 ) ; v0v1_branch ; step = 1 }
* | instr 2 |
* | | < - state { ( v0 , v1 ) ; v0v1_branch ; step = 2 }
* | ....... |
* | instr n |
* ----------- < - state { ( v0 , v1 ) ; v0v1_branch ; step = n }
* |
* v1v2_branch
* | < - state { ( v1 , v2 ) ; v1v2_branch ; step = n + 1 }
* -----------
* v2 | instr 1 |
* | ....... |
* control flow graph (the instrs_graph).
* A state occurs between edges, and we identify a state with the
* execution immediately AFTER the branch instruction that sits on the edge.
*
* | instr m |
* v0 | ....... |
* ----------- <- vertex v0
* |
* v0v1_branch
* | <- state { (v0, v1); v0v1_branch; step = Active 0 }
* -----------
* v1 | instr 1 |
* | | <- state { (v0, v1); v0v1_branch; step = 1 }
* | instr 2 |
* | | <- state { (v0, v1); v0v1_branch; step = 2 }
* | ....... |
* | instr n |
* ----------- <- state { (v0, v1); v0v1_branch; step = n }
* |
* v1v2_branch
* | <- state { (v1, v2); v1v2_branch; step = n + 1 }
* -----------
* v2 | instr 1 |
* | ....... |
*)
Status defines whether a state is active or a final state ;
* A state is uniquely specified by the CFG edge and this status
* ( plus maybe slightly more additional information )
* because what specifying from the CFG can tell us is limited
* to little more than pure graph data
* A state is uniquely specified by the CFG edge and this status
* (plus maybe slightly more additional information)
* because what specifying from the CFG can tell us is limited
* to little more than pure graph data *)
type step_status =
| Active of int
| Stopped
type state =
{ this_edge : G.basic_edge;
step_status : step_status;
tag : string;
}
let empty_state : state =
{ this_edge = (G.null_vertex, G.null_vertex);
step_status = Active 0;
tag = "TAG";
}
let source_tag : string = "SRC"
let target_tag : string = "TGT"
Initialize the state given vertices and stuff
let init_state0 : G.basic_edge -> string -> state =
fun this_e tag ->
{ empty_state with
this_edge = this_e;
step_status = Active 0;
tag = tag;
}
let init_statef : G.basic_edge -> string -> state =
fun this_e tag ->
{ empty_state with
this_edge = this_e;
step_status = Stopped;
tag = tag
}
(* Tag a state with a particular string - ie update the tag *)
let tag_state : state -> string -> state =
fun state tag ->
{ state with tag = tag }
(* Action labels *)
type action =
| BasicAct of Ast.instr
| JumpAct of branch * G.vertex (* vertex we are branching to *)
| PhiAct of G.vertex * G.vertex (* (old_v, new_v) *)
| CallAct of Ast.var
| CallIndirectAct of Ast.var
| StopAct
(* Calculates the next state based on the action *)
let next_state : state -> action -> state =
fun state0 act ->
match (state0.step_status, act) with
| (Active n, BasicAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, JumpAct (_, next_v)) ->
let (base_v, this_v) = state0.this_edge in
{ state0 with
this_edge = (this_v, next_v);
step_status = Active (n + 1)
}
| (Active n, PhiAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, CallAct x) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, CallIndirectAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active _, StopAct) ->
{ state0 with step_status = Stopped }
| (Stopped, _) ->
(prerr_debug ("next_state: cannot advance final state"); empty_state)
Some stuff for generating strings ( smtids ) that are acceptable
* as identifiers in SMTLIB format
* as identifiers in SMTLIB format *)
(* From the constituents of a state we can extract a lot of string id's *)
let vertex_smtid : G.vertex -> smtid =
fun v -> "v" ^ string_of_int v
let branch_smtid : branch -> smtid = function
| Jump-> "br"
| JumpIf true -> "if_true"
| JumpIf false -> "if_false"
| JumpBrIf true -> "brif_true"
| JumpBrIf false -> "brif_false"
| JumpIndex ind -> "branchindex_" ^ Int32.to_string ind
| JumpDefault size -> "branchdefault_" ^ Int32.to_string size
A pair of edges on a graph along with the status ( Active / Stopped )
* uniquely determines a state ( to the SMT solver )
* The rational behind this design is to make it easier for annotating
* the CFG , where little more than raw graph information can be given .
* uniquely determines a state (to the SMT solver)
* The rational behind this design is to make it easier for annotating
* the CFG, where little more than raw graph information can be given. *)
let step_status_smtid : step_status -> smtid = function
| Active n -> "active_" ^ string_of_int n
| Stopped -> "final"
let state_smtid : state -> smtid =
fun state ->
state.tag
^ "_" ^ (vertex_smtid (fst state.this_edge))
^ "" ^ (vertex_smtid (snd state.this_edge))
^ "_s" ^ step_status_smtid (state.step_status)
let values_smtid : state -> smtid =
fun state ->
"values_" ^ state_smtid state
let stack_pointer_smtid : state -> smtid =
fun state ->
"pointer_" ^ state_smtid state
let locals_smtid : state -> smtid =
fun state ->
"locals_" ^ state_smtid state
let globals_smtid : state -> smtid =
fun state ->
"globals_" ^ state_smtid state
let memory_smtid : state -> smtid =
fun state ->
"memory_" ^ state_smtid state
(* Easy way to actually access the array_ types *)
let values : state -> array_ =
fun state ->
IdArr (values_smtid state)
let stack_pointer : state -> pointer =
fun state ->
IdPtr (stack_pointer_smtid state)
let locals : state -> array_ =
fun state ->
IdArr (locals_smtid state)
let globals : state -> array_ =
fun state ->
IdArr (globals_smtid state)
let full_memory : state -> array_ =
fun state ->
IdArr (memory_smtid state)
let uf_memory : state -> value =
fun state ->
UFVal (memory_smtid state, [])
(* Pointer arithmetics *)
let rec offset_pointer : pointer -> int32 -> pointer =
fun ptr n ->
match ptr with
| OffsetPtr (p, x) -> OffsetPtr (p, I32.add x n)
| _ -> OffsetPtr (ptr, n)
let succ_pointer : pointer -> pointer =
fun ptr ->
offset_pointer ptr 1l
let prev_pointer : pointer -> pointer =
fun ptr ->
offset_pointer ptr (Int32.neg 1l)
The first edge of a path
let path_start_edge : G.basic_path -> G.basic_edge =
fun path ->
match path with
| (v0 :: v1 :: _) -> (v0, v1)
| _ -> (prerr_debug ("path_start_edge: bad"); G.null_basic_edge)
(* The final edge of a path *)
let path_final_edge : G.basic_path -> G.basic_edge =
fun path ->
match List.rev path with
| (vn :: vm :: _) -> (vm, vn) (* note the flip *)
| _ -> (prerr_debug ("path_final_edge: bad"); G.null_basic_edge)
(* The initial state *)
let path_start_state : G.basic_path -> string -> state =
fun path tag ->
init_state0 (path_start_edge path) tag
let path_final_state : G.basic_path -> string -> state =
fun path tag ->
init_statef (path_final_edge path) tag
(* Actions of a vertex *)
let vertex_actions : G.vertex -> ufunc_ir -> action list =
fun v func_ir ->
match G.vertex_label v func_ir.body_graph with
(* The case where label does not exist *)
| None -> (prerr_debug "vertex_actions: None"; [])
(* The case where we have a single call instruction *)
| Some ({ instrs = [{ it = Ast.Call x }] }, _) ->
(match G.vertex_succs v func_ir.body_graph with
| (sv, (Jump, ())) :: [] -> [CallAct x]
| _ ->
(prerr_debug
("vertex_actions: Call at "
^ G.string_of_vertex v
^ " not followed by single successor");
[]))
(* The case where every instruction is basic *)
| Some (block, _) ->
List.map
(fun i -> match i.it with
| Ast.Call x -> CallAct x
| Ast.CallIndirect x -> CallIndirectAct x
| _ when (is_basic_instr i) -> BasicAct i
| _ ->
(prerr_debug ("vertex_action: non-basic " ^ string_of_instr_inline i);
BasicAct i))
block.instrs
Actions of an edge , does * not * include the phi vertex
let edge_actions : G.basic_edge -> ufunc_ir -> action list =
fun (src_v, dst_v) func_ir ->
match G.edge_label (src_v, dst_v) func_ir.body_graph with
| None ->
(prerr_debug
("edge_actions: None @ " ^ G.string_of_basic_edge (src_v, dst_v));
[])
| Some (br, _) -> [JumpAct (br, dst_v)]
Unlike path_actions , gets the action of every thing in this path .
* When we say paths here , we mean paths in the sense of what is used
* for a proof : for these the first and last edges are special markers
* When we say paths here, we mean paths in the sense of what is used
* for a proof: for these the first and last edges are special markers *)
let rec path_actions_raw : G.basic_path -> ufunc_ir -> action list =
fun path func_ir ->
match path with
| [] -> []
| v :: [] -> vertex_actions v func_ir
| v0 :: (v1 :: path_tl) ->
(vertex_actions v0 func_ir)
@ (edge_actions (v0, v1) func_ir)
@ [PhiAct (v0, v1)]
@ path_actions_raw (v1 :: path_tl) func_ir
The type of path actions that we want to suit how we specify proofs .
*
* [ v0 ] -- > [ * v1 ] -- > ... - > [ vm ] -- > [ * vn ]
* ^ ^
* Starred ( * ) points are where actions begin and end :
* ( v0 , v1 , ind_0 ) is the first state and action begins here
* ( vm , vn , ind_0 ) is the penultimate state ; we act over the ( vm , vn ) edge
*
* The last action along a path is the ,
* which serves to mark the penultimate state 's final flag to true .
* The reason for this indirection is to make it easier to declare
* the final state along a path -- which is useful when encoding proofs .
*
* [v0] --> [*v1] --> ... -> [vm] --> [*vn]
* ^ ^
* Starred ( * ) points are where actions begin and end:
* (v0, v1, ind_0) is the first state and action begins here
* (vm, vn, ind_0) is the penultimate state; we act over the (vm, vn) edge
*
* The last action along a path is the StopAct,
* which serves to mark the penultimate state's final flag to true.
* The reason for this indirection is to make it easier to declare
* the final state along a path -- which is useful when encoding proofs.
*)
let rec path_actions : G.basic_path -> ufunc_ir -> action list =
fun path func_ir ->
match path with
(* Completely empty path *)
| [] -> []
| [_] -> []
| (v0 :: v1 :: path_tl) ->
let main_vs = v1 :: path_tl in
match List.rev main_vs with
| (vn :: vm :: mid_rev) ->
let act_vs = List.rev (vm :: mid_rev) in
(PhiAct (v0, v1))
:: (path_actions_raw act_vs func_ir)
@ (edge_actions (vm, vn) func_ir)
@ [StopAct]
| _ -> []
(* All states along a path, as a function of the path_actions *)
let path_states : G.basic_path -> ufunc_ir -> string -> state list =
fun path func_ir tag ->
let state0 = path_start_state path tag in
let acts = path_actions path func_ir in
let (_, states_rev) =
List.fold_left
(fun (s0, accs) act ->
let s1 = next_state s0 act in (s1, s1 :: accs))
(state0, [state0]) acts in
List.rev states_rev
Generate a phi state for each entry of the phi map
let phi_state : string -> (G.vertex * int32) -> (G.vertex * int32) -> state =
fun tag (old_v, old_x) (new_v, new_x) ->
(* Printf.printf "\n Debug tag = %s" tag;*)
let state_tag = tag ^ "_Phi_" ^ Int32.to_string old_x ^ "_" ^ Int32.to_string new_x in
init_state0 (old_v, new_v) state_tag
let phi_states : ('a, 'b) func_ir -> string -> state list =
fun func_ir tag ->
G.VMap.fold
(fun this_v this_map acc_states ->
let this_states =
Int32Map.fold
(fun _ (new_x, phi_entry_for_x) accs ->
let xstates =
List.map
(fun (prev_v, old_x) -> phi_state tag (prev_v, old_x) (this_v, new_x))
phi_entry_for_x in
xstates @ accs)
this_map
[] in
this_states @ acc_states)
func_ir.phi_map
[]
(* Printing things *)
let string_of_wasm_value : wasm_value -> string =
fun wal -> Values.string_of_value wal.it
let rec string_of_pointer : pointer -> string = function
| IntPtr i -> Printf.sprintf "IntPtr %d" i
| Int32Ptr i -> "Int32Ptr " ^ Int32.to_string i
| IdPtr pid -> "IdPtr (" ^ pid ^ ")"
| VarPtr x -> "VarPtr (" ^ string_of_var x ^ ")"
| OffsetPtr (ptr, x) ->
"OffsetPtr (" ^ string_of_pointer ptr ^ ", " ^ Int32.to_string x ^ ")"
| IntOffsetPtr (ptr, x) ->
"IntOffsetPtr (" ^ string_of_pointer ptr ^ ", " ^ (Printf.sprintf "%d" x) ^ ")"
| CastedPtr wal -> "CastedPtr (" ^ string_of_value wal ^ ")"
| NullPtr -> "NullPtr"
and string_of_value : value -> string = function
| IntVal i -> Printf.sprintf "%d" i
| Int32Val i -> Int32.to_string i
| WasmVal wal -> string_of_wasm_value wal
| TestOpVal (testop, wal) ->
"TestOpVal ("
^ string_of_testop testop ^ ", "
^ string_of_value wal ^ ")"
| RelOpVal (wal0, relop, wal1) ->
"RelOpVal ("
^ string_of_value wal0 ^ ", "
^ string_of_relop relop ^ ", "
^ string_of_value wal1 ^ ")"
| UnOpVal (unop, wal) ->
"UnOpVal ("
^ string_of_unop unop ^ ", "
^ string_of_value wal ^ ")"
| BinOpVal (wal0, binop, wal1) ->
"BinOpVal ("
^ string_of_value wal0 ^ ", "
^ string_of_binop binop ^ ", "
^ string_of_value wal1 ^ ")"
| CvtOpVal (cvtop, wal) ->
"CvtOpVal ("
^ string_of_cvtop cvtop ^ ", "
^ string_of_value wal ^ ")"
| SelectVal (arr, ptr) ->
"SelectVal ("
^ string_of_array arr ^ ", "
^ string_of_pointer ptr ^ ")"
| UFVal (funcid, args) ->
"UFVal ("
^ funcid
^ (List.fold_left
(fun r a -> r ^ "," ^ string_of_value a)
""
args)
^ ")"
| ArrayVal (arr) ->
"ArrayVal ("
^ string_of_array arr ^ ") "
and string_of_array : array_ -> string = function
| IdArr aid -> "IdArr (" ^ aid ^ ")"
| StoreArr (arr, ptr, wal) ->
"StoreArr ("
^ string_of_array arr ^ ", "
^ string_of_pointer ptr ^ ", "
^ string_of_value wal ^ ")"
let string_of_step_status : step_status -> string = function
| Active n -> "Active " ^ string_of_int n
| Stopped -> "Stopped"
let string_of_state : state -> string =
fun state ->
"{ state with "
^ "this_edge = " ^ (G.string_of_basic_edge state.this_edge) ^ "; "
^ "step_status = " ^ string_of_step_status state.step_status ^ "; "
^ "tag = " ^ state.tag ^ " }"
let string_of_action : action -> string = function
| BasicAct instr -> "BasicAct (" ^ string_of_instr_inline instr ^ ")"
| JumpAct (br, v) ->
"JumpAct ("
^ string_of_branch br ^ ", " ^ G.string_of_vertex v ^ ")"
| PhiAct (prev_v, this_v) ->
"PhiAct (" ^ G.string_of_vertex prev_v ^ "," ^ G.string_of_vertex this_v ^ ")"
| CallAct x -> "CallAct " ^ string_of_var x
| CallIndirectAct x -> "CallIndirectAct " ^ string_of_var x
| StopAct -> "StopAct"
let string_of_actions : action list -> string =
fun acts ->
String.string_of_list_inline acts string_of_action
let rec convert_to_int_pointer: pointer -> pointer =
used for the uninterpreted sym eval
fun p ->
match p with
| Int32Ptr k -> IntPtr(Int32.to_int k)
| OffsetPtr(q,k) -> IntOffsetPtr(convert_to_int_pointer q, Int32.to_int k)
| IntOffsetPtr(q,k) -> IntOffsetPtr(convert_to_int_pointer q, k)
| _ -> p
| null | https://raw.githubusercontent.com/nokia/web-assembly-self-certifying-compilation-framework/8c31df0bd7d2d94cfbc22089944f5cabb3a61158/src/validator/proofs/state_types.ml | ocaml | We need to handle symbolic pointers, and so we go an indirect route;
* The types need to be general enough to encode interleaving of:
* value stack; local variables; global variables; and linear memory
pointer identified by an id
Arrays -- as in the theory of arrays
* This is how we will model the
* value stack; local variables; global variables; memory
Array identified by an id
Some value definitions
Tag a state with a particular string - ie update the tag
Action labels
vertex we are branching to
(old_v, new_v)
Calculates the next state based on the action
From the constituents of a state we can extract a lot of string id's
Easy way to actually access the array_ types
Pointer arithmetics
The final edge of a path
note the flip
The initial state
Actions of a vertex
The case where label does not exist
The case where we have a single call instruction
The case where every instruction is basic
Completely empty path
All states along a path, as a function of the path_actions
Printf.printf "\n Debug tag = %s" tag;
Printing things | *
Copyright 2020 Nokia
Licensed under the BSD 3 - Clause License .
SPDX - License - Identifier : BSD-3 - Clause
Copyright 2020 Nokia
Licensed under the BSD 3-Clause License.
SPDX-License-Identifier: BSD-3-Clause
*)
open Wasm
open Wasm_utils
open Instr_graph
open Func_types
open Script_types
open Source
open Debug
module G = Digraph
module String = String_utils
This is the types that are used to represent states for WASM programs .
* All ids here are typed as strings , but must also be SMTLIB - valid ids .
* We make this a separate file from proof_types.ml because this
* is about state - specific manipulation and data structures over the
* func_ir that do not need formulas ; which the proof_types.ml defines
* All ids here are typed as strings, but must also be SMTLIB-valid ids.
* We make this a separate file from proof_types.ml because this
* is about state-specific manipulation and data structures over the
* func_ir that do not need formulas; which the proof_types.ml defines *)
A Walue :)
type wasm_value = Ast.literal
type smtid = string
type pointer =
| IntPtr of int
| Int32Ptr of int32
| VarPtr of Ast.var
| OffsetPtr of pointer * int32
| IntOffsetPtr of pointer * int
| CastedPtr of value
| NullPtr
The different forms in which a value is accessed in WASM
and value =
| IntVal of int
| Int32Val of int32
| WasmVal of wasm_value
| TestOpVal of Ast.testop * value
| RelOpVal of value * Ast.relop * value
| UnOpVal of Ast.unop * value
| BinOpVal of value * Ast.binop * value
| CvtOpVal of Ast.cvtop * value
| SelectVal of array_ * pointer
UFVal < name > < values > represents the result of uninterpreted function < name > on a list of values .
| ArrayVal of array_
and array_ =
| StoreArr of array_ * pointer * value
let zero_int32_val : value = Int32Val 0l
The state of a program during execution as determined by the
* control flow graph ( the instrs_graph ) .
* A state occurs between edges , and we identify a state with the
* execution immediately AFTER the branch instruction that sits on the edge .
*
* | instr m |
* v0 | ....... |
* ----------- < - vertex v0
* |
* v0v1_branch
* | < - state { ( v0 , v1 ) ; v0v1_branch ; step = Active 0 }
* -----------
* v1 | instr 1 |
* | | < - state { ( v0 , v1 ) ; v0v1_branch ; step = 1 }
* | instr 2 |
* | | < - state { ( v0 , v1 ) ; v0v1_branch ; step = 2 }
* | ....... |
* | instr n |
* ----------- < - state { ( v0 , v1 ) ; v0v1_branch ; step = n }
* |
* v1v2_branch
* | < - state { ( v1 , v2 ) ; v1v2_branch ; step = n + 1 }
* -----------
* v2 | instr 1 |
* | ....... |
* control flow graph (the instrs_graph).
* A state occurs between edges, and we identify a state with the
* execution immediately AFTER the branch instruction that sits on the edge.
*
* | instr m |
* v0 | ....... |
* ----------- <- vertex v0
* |
* v0v1_branch
* | <- state { (v0, v1); v0v1_branch; step = Active 0 }
* -----------
* v1 | instr 1 |
* | | <- state { (v0, v1); v0v1_branch; step = 1 }
* | instr 2 |
* | | <- state { (v0, v1); v0v1_branch; step = 2 }
* | ....... |
* | instr n |
* ----------- <- state { (v0, v1); v0v1_branch; step = n }
* |
* v1v2_branch
* | <- state { (v1, v2); v1v2_branch; step = n + 1 }
* -----------
* v2 | instr 1 |
* | ....... |
*)
Status defines whether a state is active or a final state ;
* A state is uniquely specified by the CFG edge and this status
* ( plus maybe slightly more additional information )
* because what specifying from the CFG can tell us is limited
* to little more than pure graph data
* A state is uniquely specified by the CFG edge and this status
* (plus maybe slightly more additional information)
* because what specifying from the CFG can tell us is limited
* to little more than pure graph data *)
type step_status =
| Active of int
| Stopped
type state =
{ this_edge : G.basic_edge;
step_status : step_status;
tag : string;
}
let empty_state : state =
{ this_edge = (G.null_vertex, G.null_vertex);
step_status = Active 0;
tag = "TAG";
}
let source_tag : string = "SRC"
let target_tag : string = "TGT"
Initialize the state given vertices and stuff
let init_state0 : G.basic_edge -> string -> state =
fun this_e tag ->
{ empty_state with
this_edge = this_e;
step_status = Active 0;
tag = tag;
}
let init_statef : G.basic_edge -> string -> state =
fun this_e tag ->
{ empty_state with
this_edge = this_e;
step_status = Stopped;
tag = tag
}
let tag_state : state -> string -> state =
fun state tag ->
{ state with tag = tag }
type action =
| BasicAct of Ast.instr
| CallAct of Ast.var
| CallIndirectAct of Ast.var
| StopAct
let next_state : state -> action -> state =
fun state0 act ->
match (state0.step_status, act) with
| (Active n, BasicAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, JumpAct (_, next_v)) ->
let (base_v, this_v) = state0.this_edge in
{ state0 with
this_edge = (this_v, next_v);
step_status = Active (n + 1)
}
| (Active n, PhiAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, CallAct x) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active n, CallIndirectAct _) ->
{ state0 with
step_status = Active (n + 1)
}
| (Active _, StopAct) ->
{ state0 with step_status = Stopped }
| (Stopped, _) ->
(prerr_debug ("next_state: cannot advance final state"); empty_state)
Some stuff for generating strings ( smtids ) that are acceptable
* as identifiers in SMTLIB format
* as identifiers in SMTLIB format *)
let vertex_smtid : G.vertex -> smtid =
fun v -> "v" ^ string_of_int v
let branch_smtid : branch -> smtid = function
| Jump-> "br"
| JumpIf true -> "if_true"
| JumpIf false -> "if_false"
| JumpBrIf true -> "brif_true"
| JumpBrIf false -> "brif_false"
| JumpIndex ind -> "branchindex_" ^ Int32.to_string ind
| JumpDefault size -> "branchdefault_" ^ Int32.to_string size
A pair of edges on a graph along with the status ( Active / Stopped )
* uniquely determines a state ( to the SMT solver )
* The rational behind this design is to make it easier for annotating
* the CFG , where little more than raw graph information can be given .
* uniquely determines a state (to the SMT solver)
* The rational behind this design is to make it easier for annotating
* the CFG, where little more than raw graph information can be given. *)
let step_status_smtid : step_status -> smtid = function
| Active n -> "active_" ^ string_of_int n
| Stopped -> "final"
let state_smtid : state -> smtid =
fun state ->
state.tag
^ "_" ^ (vertex_smtid (fst state.this_edge))
^ "" ^ (vertex_smtid (snd state.this_edge))
^ "_s" ^ step_status_smtid (state.step_status)
let values_smtid : state -> smtid =
fun state ->
"values_" ^ state_smtid state
let stack_pointer_smtid : state -> smtid =
fun state ->
"pointer_" ^ state_smtid state
let locals_smtid : state -> smtid =
fun state ->
"locals_" ^ state_smtid state
let globals_smtid : state -> smtid =
fun state ->
"globals_" ^ state_smtid state
let memory_smtid : state -> smtid =
fun state ->
"memory_" ^ state_smtid state
let values : state -> array_ =
fun state ->
IdArr (values_smtid state)
let stack_pointer : state -> pointer =
fun state ->
IdPtr (stack_pointer_smtid state)
let locals : state -> array_ =
fun state ->
IdArr (locals_smtid state)
let globals : state -> array_ =
fun state ->
IdArr (globals_smtid state)
let full_memory : state -> array_ =
fun state ->
IdArr (memory_smtid state)
let uf_memory : state -> value =
fun state ->
UFVal (memory_smtid state, [])
let rec offset_pointer : pointer -> int32 -> pointer =
fun ptr n ->
match ptr with
| OffsetPtr (p, x) -> OffsetPtr (p, I32.add x n)
| _ -> OffsetPtr (ptr, n)
let succ_pointer : pointer -> pointer =
fun ptr ->
offset_pointer ptr 1l
let prev_pointer : pointer -> pointer =
fun ptr ->
offset_pointer ptr (Int32.neg 1l)
The first edge of a path
let path_start_edge : G.basic_path -> G.basic_edge =
fun path ->
match path with
| (v0 :: v1 :: _) -> (v0, v1)
| _ -> (prerr_debug ("path_start_edge: bad"); G.null_basic_edge)
let path_final_edge : G.basic_path -> G.basic_edge =
fun path ->
match List.rev path with
| _ -> (prerr_debug ("path_final_edge: bad"); G.null_basic_edge)
let path_start_state : G.basic_path -> string -> state =
fun path tag ->
init_state0 (path_start_edge path) tag
let path_final_state : G.basic_path -> string -> state =
fun path tag ->
init_statef (path_final_edge path) tag
let vertex_actions : G.vertex -> ufunc_ir -> action list =
fun v func_ir ->
match G.vertex_label v func_ir.body_graph with
| None -> (prerr_debug "vertex_actions: None"; [])
| Some ({ instrs = [{ it = Ast.Call x }] }, _) ->
(match G.vertex_succs v func_ir.body_graph with
| (sv, (Jump, ())) :: [] -> [CallAct x]
| _ ->
(prerr_debug
("vertex_actions: Call at "
^ G.string_of_vertex v
^ " not followed by single successor");
[]))
| Some (block, _) ->
List.map
(fun i -> match i.it with
| Ast.Call x -> CallAct x
| Ast.CallIndirect x -> CallIndirectAct x
| _ when (is_basic_instr i) -> BasicAct i
| _ ->
(prerr_debug ("vertex_action: non-basic " ^ string_of_instr_inline i);
BasicAct i))
block.instrs
Actions of an edge , does * not * include the phi vertex
let edge_actions : G.basic_edge -> ufunc_ir -> action list =
fun (src_v, dst_v) func_ir ->
match G.edge_label (src_v, dst_v) func_ir.body_graph with
| None ->
(prerr_debug
("edge_actions: None @ " ^ G.string_of_basic_edge (src_v, dst_v));
[])
| Some (br, _) -> [JumpAct (br, dst_v)]
Unlike path_actions , gets the action of every thing in this path .
* When we say paths here , we mean paths in the sense of what is used
* for a proof : for these the first and last edges are special markers
* When we say paths here, we mean paths in the sense of what is used
* for a proof: for these the first and last edges are special markers *)
let rec path_actions_raw : G.basic_path -> ufunc_ir -> action list =
fun path func_ir ->
match path with
| [] -> []
| v :: [] -> vertex_actions v func_ir
| v0 :: (v1 :: path_tl) ->
(vertex_actions v0 func_ir)
@ (edge_actions (v0, v1) func_ir)
@ [PhiAct (v0, v1)]
@ path_actions_raw (v1 :: path_tl) func_ir
The type of path actions that we want to suit how we specify proofs .
*
* [ v0 ] -- > [ * v1 ] -- > ... - > [ vm ] -- > [ * vn ]
* ^ ^
* Starred ( * ) points are where actions begin and end :
* ( v0 , v1 , ind_0 ) is the first state and action begins here
* ( vm , vn , ind_0 ) is the penultimate state ; we act over the ( vm , vn ) edge
*
* The last action along a path is the ,
* which serves to mark the penultimate state 's final flag to true .
* The reason for this indirection is to make it easier to declare
* the final state along a path -- which is useful when encoding proofs .
*
* [v0] --> [*v1] --> ... -> [vm] --> [*vn]
* ^ ^
* Starred ( * ) points are where actions begin and end:
* (v0, v1, ind_0) is the first state and action begins here
* (vm, vn, ind_0) is the penultimate state; we act over the (vm, vn) edge
*
* The last action along a path is the StopAct,
* which serves to mark the penultimate state's final flag to true.
* The reason for this indirection is to make it easier to declare
* the final state along a path -- which is useful when encoding proofs.
*)
let rec path_actions : G.basic_path -> ufunc_ir -> action list =
fun path func_ir ->
match path with
| [] -> []
| [_] -> []
| (v0 :: v1 :: path_tl) ->
let main_vs = v1 :: path_tl in
match List.rev main_vs with
| (vn :: vm :: mid_rev) ->
let act_vs = List.rev (vm :: mid_rev) in
(PhiAct (v0, v1))
:: (path_actions_raw act_vs func_ir)
@ (edge_actions (vm, vn) func_ir)
@ [StopAct]
| _ -> []
let path_states : G.basic_path -> ufunc_ir -> string -> state list =
fun path func_ir tag ->
let state0 = path_start_state path tag in
let acts = path_actions path func_ir in
let (_, states_rev) =
List.fold_left
(fun (s0, accs) act ->
let s1 = next_state s0 act in (s1, s1 :: accs))
(state0, [state0]) acts in
List.rev states_rev
Generate a phi state for each entry of the phi map
let phi_state : string -> (G.vertex * int32) -> (G.vertex * int32) -> state =
fun tag (old_v, old_x) (new_v, new_x) ->
let state_tag = tag ^ "_Phi_" ^ Int32.to_string old_x ^ "_" ^ Int32.to_string new_x in
init_state0 (old_v, new_v) state_tag
let phi_states : ('a, 'b) func_ir -> string -> state list =
fun func_ir tag ->
G.VMap.fold
(fun this_v this_map acc_states ->
let this_states =
Int32Map.fold
(fun _ (new_x, phi_entry_for_x) accs ->
let xstates =
List.map
(fun (prev_v, old_x) -> phi_state tag (prev_v, old_x) (this_v, new_x))
phi_entry_for_x in
xstates @ accs)
this_map
[] in
this_states @ acc_states)
func_ir.phi_map
[]
let string_of_wasm_value : wasm_value -> string =
fun wal -> Values.string_of_value wal.it
let rec string_of_pointer : pointer -> string = function
| IntPtr i -> Printf.sprintf "IntPtr %d" i
| Int32Ptr i -> "Int32Ptr " ^ Int32.to_string i
| IdPtr pid -> "IdPtr (" ^ pid ^ ")"
| VarPtr x -> "VarPtr (" ^ string_of_var x ^ ")"
| OffsetPtr (ptr, x) ->
"OffsetPtr (" ^ string_of_pointer ptr ^ ", " ^ Int32.to_string x ^ ")"
| IntOffsetPtr (ptr, x) ->
"IntOffsetPtr (" ^ string_of_pointer ptr ^ ", " ^ (Printf.sprintf "%d" x) ^ ")"
| CastedPtr wal -> "CastedPtr (" ^ string_of_value wal ^ ")"
| NullPtr -> "NullPtr"
and string_of_value : value -> string = function
| IntVal i -> Printf.sprintf "%d" i
| Int32Val i -> Int32.to_string i
| WasmVal wal -> string_of_wasm_value wal
| TestOpVal (testop, wal) ->
"TestOpVal ("
^ string_of_testop testop ^ ", "
^ string_of_value wal ^ ")"
| RelOpVal (wal0, relop, wal1) ->
"RelOpVal ("
^ string_of_value wal0 ^ ", "
^ string_of_relop relop ^ ", "
^ string_of_value wal1 ^ ")"
| UnOpVal (unop, wal) ->
"UnOpVal ("
^ string_of_unop unop ^ ", "
^ string_of_value wal ^ ")"
| BinOpVal (wal0, binop, wal1) ->
"BinOpVal ("
^ string_of_value wal0 ^ ", "
^ string_of_binop binop ^ ", "
^ string_of_value wal1 ^ ")"
| CvtOpVal (cvtop, wal) ->
"CvtOpVal ("
^ string_of_cvtop cvtop ^ ", "
^ string_of_value wal ^ ")"
| SelectVal (arr, ptr) ->
"SelectVal ("
^ string_of_array arr ^ ", "
^ string_of_pointer ptr ^ ")"
| UFVal (funcid, args) ->
"UFVal ("
^ funcid
^ (List.fold_left
(fun r a -> r ^ "," ^ string_of_value a)
""
args)
^ ")"
| ArrayVal (arr) ->
"ArrayVal ("
^ string_of_array arr ^ ") "
and string_of_array : array_ -> string = function
| IdArr aid -> "IdArr (" ^ aid ^ ")"
| StoreArr (arr, ptr, wal) ->
"StoreArr ("
^ string_of_array arr ^ ", "
^ string_of_pointer ptr ^ ", "
^ string_of_value wal ^ ")"
let string_of_step_status : step_status -> string = function
| Active n -> "Active " ^ string_of_int n
| Stopped -> "Stopped"
let string_of_state : state -> string =
fun state ->
"{ state with "
^ "this_edge = " ^ (G.string_of_basic_edge state.this_edge) ^ "; "
^ "step_status = " ^ string_of_step_status state.step_status ^ "; "
^ "tag = " ^ state.tag ^ " }"
let string_of_action : action -> string = function
| BasicAct instr -> "BasicAct (" ^ string_of_instr_inline instr ^ ")"
| JumpAct (br, v) ->
"JumpAct ("
^ string_of_branch br ^ ", " ^ G.string_of_vertex v ^ ")"
| PhiAct (prev_v, this_v) ->
"PhiAct (" ^ G.string_of_vertex prev_v ^ "," ^ G.string_of_vertex this_v ^ ")"
| CallAct x -> "CallAct " ^ string_of_var x
| CallIndirectAct x -> "CallIndirectAct " ^ string_of_var x
| StopAct -> "StopAct"
let string_of_actions : action list -> string =
fun acts ->
String.string_of_list_inline acts string_of_action
let rec convert_to_int_pointer: pointer -> pointer =
used for the uninterpreted sym eval
fun p ->
match p with
| Int32Ptr k -> IntPtr(Int32.to_int k)
| OffsetPtr(q,k) -> IntOffsetPtr(convert_to_int_pointer q, Int32.to_int k)
| IntOffsetPtr(q,k) -> IntOffsetPtr(convert_to_int_pointer q, k)
| _ -> p
|
e6bcad4f5d92f8af99a9fa7feb4e013597004dae2fa99f03e23fc72a87cb2471 | penpot/penpot | undo.cljs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.main.data.workspace.path.undo
(:require
[app.common.data :as d]
[app.common.data.undo-stack :as u]
[app.common.uuid :as uuid]
[app.main.data.workspace.path.changes :as changes]
[app.main.data.workspace.path.state :as st]
[app.main.store :as store]
[beicon.core :as rx]
[okulary.core :as l]
[potok.core :as ptk]))
(defn undo-event?
[event]
(= :app.main.data.workspace.common/undo (ptk/type event)))
(defn redo-event?
[event]
(= :app.main.data.workspace.common/redo (ptk/type event)))
(defn- make-entry [state]
(let [id (st/get-path-id state)
shape (st/get-path state)]
{:content (:content shape)
:selrect (:selrect shape)
:points (:points shape)
:preview (get-in state [:workspace-local :edit-path id :preview])
:last-point (get-in state [:workspace-local :edit-path id :last-point])
:prev-handler (get-in state [:workspace-local :edit-path id :prev-handler])}))
(defn- load-entry [state {:keys [content selrect points preview last-point prev-handler]}]
(let [id (st/get-path-id state)
old-content (st/get-path state :content)]
(-> state
(d/assoc-in-when (st/get-path-location state :content) content)
(d/assoc-in-when (st/get-path-location state :selrect) selrect)
(d/assoc-in-when (st/get-path-location state :points) points)
(d/update-in-when
[:workspace-local :edit-path id]
assoc
:preview preview
:last-point last-point
:prev-handler prev-handler
:old-content old-content))))
(defn undo-path []
(ptk/reify ::undo-path
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
undo-stack (-> (get-in state [:workspace-local :edit-path id :undo-stack])
(u/undo))
entry (u/peek undo-stack)]
(cond-> state
(some? entry)
(-> (load-entry entry)
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
undo-stack)))))
ptk/WatchEvent
(watch [_ _ _]
(rx/of (changes/save-path-content {:preserve-move-to true})))))
(defn redo-path []
(ptk/reify ::redo-path
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
undo-stack (-> (get-in state [:workspace-local :edit-path id :undo-stack])
(u/redo))
entry (u/peek undo-stack)]
(-> state
(load-entry entry)
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
undo-stack))))
ptk/WatchEvent
(watch [_ _ _]
(rx/of (changes/save-path-content)))))
(defn merge-head
"Joins the head with the previous undo in one. This is done so when the user changes a
node handlers after adding it the undo merges both in one operation only"
[]
(ptk/reify ::merge-head
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
stack (get-in state [:workspace-local :edit-path id :undo-stack])
head (u/peek stack)
stack (-> stack (u/undo) (u/fixup head))]
(-> state
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
stack))))))
(defn add-undo-entry []
(ptk/reify ::add-undo-entry
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
entry (make-entry state)]
(-> state
(d/update-in-when
[:workspace-local :edit-path id :undo-stack]
u/append entry))))))
(defn end-path-undo
[]
(ptk/reify ::end-path-undo
ptk/UpdateEvent
(update [_ state]
(-> state
(d/update-in-when
[:workspace-local :edit-path (st/get-path-id state)]
dissoc :undo-lock :undo-stack)))))
(defn- stop-undo? [event]
(or (= :app.main.data.workspace.common/clear-edition-mode (ptk/type event))
(= :app.main.data.workspace/finalize-page (ptk/type event))))
(def path-content-ref
(letfn [(selector [state]
(st/get-path state :content))]
(l/derived selector store/state)))
(defn start-path-undo
[]
(let [lock (uuid/next)]
(ptk/reify ::start-path-undo
ptk/UpdateEvent
(update [_ state]
(let [undo-lock (get-in state [:workspace-local :edit-path (st/get-path-id state) :undo-lock])]
(cond-> state
(not undo-lock)
(update-in [:workspace-local :edit-path (st/get-path-id state)]
assoc
:undo-lock lock
:undo-stack (u/make-stack)))))
ptk/WatchEvent
(watch [_ state stream]
(let [undo-lock (get-in state [:workspace-local :edit-path (st/get-path-id state) :undo-lock])]
(when (= undo-lock lock)
(let [stop-undo-stream (->> stream
(rx/filter stop-undo?)
(rx/take 1))]
(rx/concat
(->> (rx/from-atom path-content-ref {:emit-current-value? true})
(rx/take-until stop-undo-stream)
(rx/filter (comp not nil?))
(rx/map #(add-undo-entry)))
(rx/of (end-path-undo))))))))))
| null | https://raw.githubusercontent.com/penpot/penpot/cc18f84d620e37d8efafc5bed1bcdbe70ec23c1e/frontend/src/app/main/data/workspace/path/undo.cljs | clojure |
Copyright (c) KALEIDOS INC | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.main.data.workspace.path.undo
(:require
[app.common.data :as d]
[app.common.data.undo-stack :as u]
[app.common.uuid :as uuid]
[app.main.data.workspace.path.changes :as changes]
[app.main.data.workspace.path.state :as st]
[app.main.store :as store]
[beicon.core :as rx]
[okulary.core :as l]
[potok.core :as ptk]))
(defn undo-event?
[event]
(= :app.main.data.workspace.common/undo (ptk/type event)))
(defn redo-event?
[event]
(= :app.main.data.workspace.common/redo (ptk/type event)))
(defn- make-entry [state]
(let [id (st/get-path-id state)
shape (st/get-path state)]
{:content (:content shape)
:selrect (:selrect shape)
:points (:points shape)
:preview (get-in state [:workspace-local :edit-path id :preview])
:last-point (get-in state [:workspace-local :edit-path id :last-point])
:prev-handler (get-in state [:workspace-local :edit-path id :prev-handler])}))
(defn- load-entry [state {:keys [content selrect points preview last-point prev-handler]}]
(let [id (st/get-path-id state)
old-content (st/get-path state :content)]
(-> state
(d/assoc-in-when (st/get-path-location state :content) content)
(d/assoc-in-when (st/get-path-location state :selrect) selrect)
(d/assoc-in-when (st/get-path-location state :points) points)
(d/update-in-when
[:workspace-local :edit-path id]
assoc
:preview preview
:last-point last-point
:prev-handler prev-handler
:old-content old-content))))
(defn undo-path []
(ptk/reify ::undo-path
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
undo-stack (-> (get-in state [:workspace-local :edit-path id :undo-stack])
(u/undo))
entry (u/peek undo-stack)]
(cond-> state
(some? entry)
(-> (load-entry entry)
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
undo-stack)))))
ptk/WatchEvent
(watch [_ _ _]
(rx/of (changes/save-path-content {:preserve-move-to true})))))
(defn redo-path []
(ptk/reify ::redo-path
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
undo-stack (-> (get-in state [:workspace-local :edit-path id :undo-stack])
(u/redo))
entry (u/peek undo-stack)]
(-> state
(load-entry entry)
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
undo-stack))))
ptk/WatchEvent
(watch [_ _ _]
(rx/of (changes/save-path-content)))))
(defn merge-head
"Joins the head with the previous undo in one. This is done so when the user changes a
node handlers after adding it the undo merges both in one operation only"
[]
(ptk/reify ::merge-head
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
stack (get-in state [:workspace-local :edit-path id :undo-stack])
head (u/peek stack)
stack (-> stack (u/undo) (u/fixup head))]
(-> state
(d/assoc-in-when
[:workspace-local :edit-path id :undo-stack]
stack))))))
(defn add-undo-entry []
(ptk/reify ::add-undo-entry
ptk/UpdateEvent
(update [_ state]
(let [id (st/get-path-id state)
entry (make-entry state)]
(-> state
(d/update-in-when
[:workspace-local :edit-path id :undo-stack]
u/append entry))))))
(defn end-path-undo
[]
(ptk/reify ::end-path-undo
ptk/UpdateEvent
(update [_ state]
(-> state
(d/update-in-when
[:workspace-local :edit-path (st/get-path-id state)]
dissoc :undo-lock :undo-stack)))))
(defn- stop-undo? [event]
(or (= :app.main.data.workspace.common/clear-edition-mode (ptk/type event))
(= :app.main.data.workspace/finalize-page (ptk/type event))))
(def path-content-ref
(letfn [(selector [state]
(st/get-path state :content))]
(l/derived selector store/state)))
(defn start-path-undo
[]
(let [lock (uuid/next)]
(ptk/reify ::start-path-undo
ptk/UpdateEvent
(update [_ state]
(let [undo-lock (get-in state [:workspace-local :edit-path (st/get-path-id state) :undo-lock])]
(cond-> state
(not undo-lock)
(update-in [:workspace-local :edit-path (st/get-path-id state)]
assoc
:undo-lock lock
:undo-stack (u/make-stack)))))
ptk/WatchEvent
(watch [_ state stream]
(let [undo-lock (get-in state [:workspace-local :edit-path (st/get-path-id state) :undo-lock])]
(when (= undo-lock lock)
(let [stop-undo-stream (->> stream
(rx/filter stop-undo?)
(rx/take 1))]
(rx/concat
(->> (rx/from-atom path-content-ref {:emit-current-value? true})
(rx/take-until stop-undo-stream)
(rx/filter (comp not nil?))
(rx/map #(add-undo-entry)))
(rx/of (end-path-undo))))))))))
|
e93cb6966defd781211dea192e815eebbd8590faba40215047d2e84d61dc636b | ndmitchell/build-shootout | Main.hs | #!/usr/bin/env runhaskell
-- | A test script to check those build systems claiming to implement a test
-- do in fact do so.
module Main(main) where
import Util
main :: IO ()
main = do
test "basic" basic
test "parallel" parallel
test "include" include
test "wildcard" wildcard
test "spaces" spaces
test "monad1" monad1
test "monad2" monad2
test "monad3" monad3
test "unchanged" unchanged
test "multiple" multiple
test "system1" system1
test "system2" system2
test "pool" pool
test "digest" digest
test "nofileout" nofileout
test "noleftover" noleftover
test "secondary" secondary
test "intermediate" intermediate
basic :: ([Opt] -> IO ()) -> IO ()
basic run = do
writeFile "input" "xyz"
run [Contents "output" "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc"]
run [NoChange]
parallel :: ([Opt] -> IO ()) -> IO ()
parallel run = do
writeFile "input1" "xyz"
writeFile "input2" "abc"
run [Parallel 2, Log "start start end end"]
run [NoChange]
include :: ([Opt] -> IO ()) -> IO ()
include run = do
run [Change "main.o"]
run [NoChange]
appendFile "include-2.h" "\n/* comment */"
run [Change "main.o"]
run [NoChange]
wildcard :: ([Opt] -> IO ()) -> IO ()
wildcard run = do
x <- randomRIO (1::Int,1000000)
let name = "name" ++ show x
writeFile (name ++ ".in") "abc"
run [Target $ name ++ ".out", Contents (name ++ ".out") "abc"]
run [Target $ name ++ ".out", NoChange]
writeFile (name ++ ".in") "xyz"
run [Target $ name ++ ".out", Contents (name ++ ".out") "xyz"]
run [Target $ name ++ ".out", NoChange]
spaces :: ([Opt] -> IO ()) -> IO ()
spaces run = do
writeFile "input file" "abc"
run [Target "output file", Contents "output file" "abc"]
run [Target "output file", NoChange]
writeFile "input file" "xyz"
run [Target "output file", Contents "output file" "xyz"]
run [Target "output file", NoChange]
monad1 :: ([Opt] -> IO ()) -> IO ()
monad1 run = do
writeBinary "list" "input1\ninput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain"]
run [Target "output", NoChange]
writeFile "input1" "more"
run [Target "output", Contents "output" "moreagain"]
run [Target "output", NoChange]
writeBinary "list" "input1\n"
run [Target "output", Contents "output" "more"]
run [Target "output", NoChange]
writeFile "input2" "x"
run [Target "output", NoChange]
monad2 :: ([Opt] -> IO ()) -> IO ()
monad2 run = do
writeBinary "source" "output1\noutput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain", Log "run"]
run [Target "output", NoChange, Log "run"]
writeFile "input1" "more"
run [Target "output", Contents "output" "moreagain"]
run [Target "output", NoChange]
writeBinary "source" "output1\n"
run [Target "output", Contents "output" "more", Log "run run"]
run [Target "output", NoChange]
writeFile "input2" "x"
run [Target "output", NoChange, Log "run run"]
monad3 :: ([Opt] -> IO ()) -> IO ()
monad3 run = do
writeBinary "source" "output1\noutput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain", Log "run"]
run [Target "output", NoChange, Log "run", Missing "gen"]
writeBinary "source" "gen\noutput2\n"
run [Target "output", Contents "output" "Generated\nagain"]
run [Target "output", NoChange]
unchanged :: ([Opt] -> IO ()) -> IO ()
unchanged run = do
writeFile "input" "foo is in here"
run [Target "output", Contents "source" "foo is out here", Contents "output" "foo xs out here", Log "run"]
run [Target "output", NoChange]
writeFile "input" "bar is in here"
run [Target "output", Contents "source" "bar is out here", Contents "output" "bar xs out here", Log "run run"]
run [Target "output", NoChange]
writeFile "input" "bar is out here"
run [Target "output", Contents "source" "bar is out here", Contents "output" "bar xs out here", Log "run run"]
run [Target "output", NoChange]
multiple :: ([Opt] -> IO ()) -> IO ()
multiple run = do
writeFile "input" "abbc"
run [Target "output1", Target "output2", Contents "output1" "AbbC", Contents "output2" "aBBC", Log "run run"]
run [Target "output1", Target "output2", NoChange]
writeFile "input" "aBBc"
run [Target "output1", Target "output2", Contents "output1" "ABBC", Contents "output2" "aBBC", Log "run run run"]
run [Target "output1", NoChange]
writeFile "input" "ab"
run [Target "output1", Contents "output1" "Ab", Contents "output2" "aBBC"]
run [Target "output2", Contents "output1" "Ab", Contents "output2" "aB"]
run [Target "output1", Target "output2", NoChange]
system1 :: ([Opt] -> IO ()) -> IO ()
system1 run = do
writeFile "system1-data" "foo"
writeFile "source" "none"
run [Target "output", Contents "output" "foo", Log "gen run"]
run [Target "output", Contents "output" "foo", Log "gen run gen"]
writeFile "system1-data" "bar"
run [Target "output", Contents "output" "bar", Log "gen run gen gen run"]
system2 :: ([Opt] -> IO ()) -> IO ()
system2 run = do
let varName = "SYSTEM2_DATA"
run [Contents "output" "", Log "run"]
run [NoChange]
run [Contents "output" "foo", Log "run run", Env varName "foo"]
run [NoChange, Env varName "foo"]
run [Contents "output" "bar", Log "run run run", Env varName "bar"]
run [NoChange, Env varName "bar"]
run [Contents "output" "", Log "run run run run"]
run [NoChange]
pool :: ([Opt] -> IO ()) -> IO ()
pool run = do
writeFile "input1" "xyz"
writeFile "input2" "abc"
writeFile "input3" "def"
run [Parallel 8, Log "start start end start end end"]
run [NoChange]
digest :: ([Opt] -> IO ()) -> IO ()
digest run = do
writeFile "input" "xyz"
run [Contents "output" "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc"]
run [NoChange]
writeFile "input" "abc"
run [NoChange]
nofileout :: ([Opt] -> IO ()) -> IO ()
nofileout run = do
writeFile "input" "xyz"
run [Log "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Log "xyzabc"]
run [NoChange]
noleftover :: ([Opt] -> IO ()) -> IO ()
noleftover run = do
writeFile "foo.in" "foo"
writeFile "bar.in" "bar"
run [Contents "foo.out" "foo", Contents "bar.out" "bar"]
run [NoChange]
removeFile "bar.in"
writeFile "baz.in" "baz"
run [Contents "foo.out" "foo", Contents "baz.out" "baz", Missing "bar.out"]
run [NoChange]
secondary :: ([Opt] -> IO ()) -> IO ()
secondary run = do
writeFile "input" "xyz"
run [Contents "output" "xyz * *", Contents "secondary" "xyz *", Log "run run"]
run [NoChange]
removeFile "secondary"
run [Contents "output" "xyz * *", Missing "secondary", Log "run run"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc * *", Contents "secondary" "abc *", Log "run run run run"]
run [NoChange]
intermediate :: ([Opt] -> IO ()) -> IO ()
intermediate run = do
writeFile "input" "xyz"
run [Contents "output" "xyz * *", Missing "intermediate", Log "run run"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc * *", Missing "intermediate", Log "run run run run"]
run [NoChange]
| null | https://raw.githubusercontent.com/ndmitchell/build-shootout/40b2fd3804d89f8c7cf0a37b3a9d45a863be04ce/Main.hs | haskell | | A test script to check those build systems claiming to implement a test
do in fact do so. | #!/usr/bin/env runhaskell
module Main(main) where
import Util
main :: IO ()
main = do
test "basic" basic
test "parallel" parallel
test "include" include
test "wildcard" wildcard
test "spaces" spaces
test "monad1" monad1
test "monad2" monad2
test "monad3" monad3
test "unchanged" unchanged
test "multiple" multiple
test "system1" system1
test "system2" system2
test "pool" pool
test "digest" digest
test "nofileout" nofileout
test "noleftover" noleftover
test "secondary" secondary
test "intermediate" intermediate
basic :: ([Opt] -> IO ()) -> IO ()
basic run = do
writeFile "input" "xyz"
run [Contents "output" "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc"]
run [NoChange]
parallel :: ([Opt] -> IO ()) -> IO ()
parallel run = do
writeFile "input1" "xyz"
writeFile "input2" "abc"
run [Parallel 2, Log "start start end end"]
run [NoChange]
include :: ([Opt] -> IO ()) -> IO ()
include run = do
run [Change "main.o"]
run [NoChange]
appendFile "include-2.h" "\n/* comment */"
run [Change "main.o"]
run [NoChange]
wildcard :: ([Opt] -> IO ()) -> IO ()
wildcard run = do
x <- randomRIO (1::Int,1000000)
let name = "name" ++ show x
writeFile (name ++ ".in") "abc"
run [Target $ name ++ ".out", Contents (name ++ ".out") "abc"]
run [Target $ name ++ ".out", NoChange]
writeFile (name ++ ".in") "xyz"
run [Target $ name ++ ".out", Contents (name ++ ".out") "xyz"]
run [Target $ name ++ ".out", NoChange]
spaces :: ([Opt] -> IO ()) -> IO ()
spaces run = do
writeFile "input file" "abc"
run [Target "output file", Contents "output file" "abc"]
run [Target "output file", NoChange]
writeFile "input file" "xyz"
run [Target "output file", Contents "output file" "xyz"]
run [Target "output file", NoChange]
monad1 :: ([Opt] -> IO ()) -> IO ()
monad1 run = do
writeBinary "list" "input1\ninput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain"]
run [Target "output", NoChange]
writeFile "input1" "more"
run [Target "output", Contents "output" "moreagain"]
run [Target "output", NoChange]
writeBinary "list" "input1\n"
run [Target "output", Contents "output" "more"]
run [Target "output", NoChange]
writeFile "input2" "x"
run [Target "output", NoChange]
monad2 :: ([Opt] -> IO ()) -> IO ()
monad2 run = do
writeBinary "source" "output1\noutput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain", Log "run"]
run [Target "output", NoChange, Log "run"]
writeFile "input1" "more"
run [Target "output", Contents "output" "moreagain"]
run [Target "output", NoChange]
writeBinary "source" "output1\n"
run [Target "output", Contents "output" "more", Log "run run"]
run [Target "output", NoChange]
writeFile "input2" "x"
run [Target "output", NoChange, Log "run run"]
monad3 :: ([Opt] -> IO ()) -> IO ()
monad3 run = do
writeBinary "source" "output1\noutput2\n"
writeFile "input1" "test"
writeFile "input2" "again"
run [Target "output", Contents "output" "testagain", Log "run"]
run [Target "output", NoChange, Log "run", Missing "gen"]
writeBinary "source" "gen\noutput2\n"
run [Target "output", Contents "output" "Generated\nagain"]
run [Target "output", NoChange]
unchanged :: ([Opt] -> IO ()) -> IO ()
unchanged run = do
writeFile "input" "foo is in here"
run [Target "output", Contents "source" "foo is out here", Contents "output" "foo xs out here", Log "run"]
run [Target "output", NoChange]
writeFile "input" "bar is in here"
run [Target "output", Contents "source" "bar is out here", Contents "output" "bar xs out here", Log "run run"]
run [Target "output", NoChange]
writeFile "input" "bar is out here"
run [Target "output", Contents "source" "bar is out here", Contents "output" "bar xs out here", Log "run run"]
run [Target "output", NoChange]
multiple :: ([Opt] -> IO ()) -> IO ()
multiple run = do
writeFile "input" "abbc"
run [Target "output1", Target "output2", Contents "output1" "AbbC", Contents "output2" "aBBC", Log "run run"]
run [Target "output1", Target "output2", NoChange]
writeFile "input" "aBBc"
run [Target "output1", Target "output2", Contents "output1" "ABBC", Contents "output2" "aBBC", Log "run run run"]
run [Target "output1", NoChange]
writeFile "input" "ab"
run [Target "output1", Contents "output1" "Ab", Contents "output2" "aBBC"]
run [Target "output2", Contents "output1" "Ab", Contents "output2" "aB"]
run [Target "output1", Target "output2", NoChange]
system1 :: ([Opt] -> IO ()) -> IO ()
system1 run = do
writeFile "system1-data" "foo"
writeFile "source" "none"
run [Target "output", Contents "output" "foo", Log "gen run"]
run [Target "output", Contents "output" "foo", Log "gen run gen"]
writeFile "system1-data" "bar"
run [Target "output", Contents "output" "bar", Log "gen run gen gen run"]
system2 :: ([Opt] -> IO ()) -> IO ()
system2 run = do
let varName = "SYSTEM2_DATA"
run [Contents "output" "", Log "run"]
run [NoChange]
run [Contents "output" "foo", Log "run run", Env varName "foo"]
run [NoChange, Env varName "foo"]
run [Contents "output" "bar", Log "run run run", Env varName "bar"]
run [NoChange, Env varName "bar"]
run [Contents "output" "", Log "run run run run"]
run [NoChange]
pool :: ([Opt] -> IO ()) -> IO ()
pool run = do
writeFile "input1" "xyz"
writeFile "input2" "abc"
writeFile "input3" "def"
run [Parallel 8, Log "start start end start end end"]
run [NoChange]
digest :: ([Opt] -> IO ()) -> IO ()
digest run = do
writeFile "input" "xyz"
run [Contents "output" "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc"]
run [NoChange]
writeFile "input" "abc"
run [NoChange]
nofileout :: ([Opt] -> IO ()) -> IO ()
nofileout run = do
writeFile "input" "xyz"
run [Log "xyz"]
run [NoChange]
writeFile "input" "abc"
run [Log "xyzabc"]
run [NoChange]
noleftover :: ([Opt] -> IO ()) -> IO ()
noleftover run = do
writeFile "foo.in" "foo"
writeFile "bar.in" "bar"
run [Contents "foo.out" "foo", Contents "bar.out" "bar"]
run [NoChange]
removeFile "bar.in"
writeFile "baz.in" "baz"
run [Contents "foo.out" "foo", Contents "baz.out" "baz", Missing "bar.out"]
run [NoChange]
secondary :: ([Opt] -> IO ()) -> IO ()
secondary run = do
writeFile "input" "xyz"
run [Contents "output" "xyz * *", Contents "secondary" "xyz *", Log "run run"]
run [NoChange]
removeFile "secondary"
run [Contents "output" "xyz * *", Missing "secondary", Log "run run"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc * *", Contents "secondary" "abc *", Log "run run run run"]
run [NoChange]
intermediate :: ([Opt] -> IO ()) -> IO ()
intermediate run = do
writeFile "input" "xyz"
run [Contents "output" "xyz * *", Missing "intermediate", Log "run run"]
run [NoChange]
writeFile "input" "abc"
run [Contents "output" "abc * *", Missing "intermediate", Log "run run run run"]
run [NoChange]
|
979b5e03110759eb65e30ac4941d602fffa73d016ae2e773eb35218e1ae0dc5e | ocaml-flambda/ocaml-jst | extensions.mli | * Syntax for our custom ocaml - jst language extensions . This module provides
two things :
1 . First - class ASTs for all syntax introduced by our language extensions ,
one for each OCaml AST we extend , divided up into one extension per
module and all available at once through modules named after the
syntactic category ( [ Expression.t ] , etc . ) .
2 . A way to interpret these values as terms of the coresponding OCaml ASTs ,
and to match on terms of those OCaml ASTs to see if they 're language
extension terms .
We keep our language extensions separate so that we can avoid having to
modify the existing AST , as this would break compatibility with every
existing ppx .
For details on the rationale behind this approach ( and for some of the gory
details ) , see [ Extensions_parsing ] .
two things:
1. First-class ASTs for all syntax introduced by our language extensions,
one for each OCaml AST we extend, divided up into one extension per
module and all available at once through modules named after the
syntactic category ([Expression.t], etc.).
2. A way to interpret these values as terms of the coresponding OCaml ASTs,
and to match on terms of those OCaml ASTs to see if they're language
extension terms.
We keep our language extensions separate so that we can avoid having to
modify the existing AST, as this would break compatibility with every
existing ppx.
For details on the rationale behind this approach (and for some of the gory
details), see [Extensions_parsing]. *)
(** The ASTs for list and array comprehensions *)
module Comprehensions : sig
type iterator =
| Range of { start : Parsetree.expression
; stop : Parsetree.expression
; direction : Asttypes.direction_flag }
* " = START to STOP " ( direction = Upto )
" = START downto STOP " ( direction = )
"= START downto STOP" (direction = Downto) *)
| In of Parsetree.expression
(** "in EXPR" *)
In , the [ pattern ] moves into the [ iterator ] .
type clause_binding =
{ pattern : Parsetree.pattern
; iterator : iterator
; attributes : Parsetree.attribute list }
(** PAT (in/=) ... [@...] *)
type clause =
| For of clause_binding list
(** "for PAT (in/=) ... and PAT (in/=) ... and ..."; must be nonempty *)
| When of Parsetree.expression
(** "when EXPR" *)
type comprehension =
{ body : Parsetree.expression
(** The body/generator of the comprehension *)
; clauses : clause list
(** The clauses of the comprehension; must be nonempty *) }
type expression =
| Cexp_list_comprehension of comprehension
(** [BODY ...CLAUSES...] *)
| Cexp_array_comprehension of Asttypes.mutable_flag * comprehension
(** [|BODY ...CLAUSES...|] (flag = Mutable)
[:BODY ...CLAUSES...:] (flag = Immutable)
(only allowed with [-extension immutable_arrays]) *)
val expr_of : loc:Location.t -> expression -> Parsetree.expression
end
(** The ASTs for immutable arrays. When we merge this upstream, we'll merge
these into the existing [P{exp,pat}_array] constructors by adding a
[mutable_flag] argument (just as we did with [T{exp,pat}_array]). *)
module Immutable_arrays : sig
type expression =
| Iaexp_immutable_array of Parsetree.expression list
(** [: E1; ...; En :] *)
(* CR aspectorzabusky: Or [Iaexp_iarray]? *)
type pattern =
| Iapat_immutable_array of Parsetree.pattern list
* [: P1 ; ... ; Pn :] *
(* CR aspectorzabusky: Or [Iapat_iarray]? *)
val expr_of : loc:Location.t -> expression -> Parsetree.expression
val pat_of : loc:Location.t -> pattern -> Parsetree.pattern
end
(** The module type of language extension ASTs, instantiated once for each
syntactic category. We tend to call the pattern-matching functions here
with unusual indentation, not indenting the [None] branch further so as to
avoid merge conflicts with upstream. *)
module type AST = sig
* The AST for all our ocaml - jst language extensions ; one constructor per
language extension that extends the expression language . Some extensions
are handled separately and thus are not listed here .
language extension that extends the expression language. Some extensions
are handled separately and thus are not listed here. *)
type t
(** The corresponding OCaml AST *)
type ast
* Given an OCaml AST node , check to see if it corresponds to a language
extension term . If it is , and the extension is enabled , then return it ;
if it 's not a language extension term , return [ None ] ; if it 's a disabled
language extension term , raise an error .
AN IMPORTANT NOTE : We indent calls to this function * very * strangely : we
* do not change the indentation level * when we match on its result !
E.g. from [ type_expect _ ] in [ typecore.ml ] :
{ [
match Extensions . Expression.of_ast sexp with
| Some eexp - >
type_expect_extension
~loc ~env ~expected_mode ~ty_expected ~explanation eexp
| None - > match sexp.pexp_desc with
| Pexp_ident lid - >
let path , mode , desc , kind = type_ident env lid in
( * ...
extension term. If it is, and the extension is enabled, then return it;
if it's not a language extension term, return [None]; if it's a disabled
language extension term, raise an error.
AN IMPORTANT NOTE: We indent calls to this function *very* strangely: we
*do not change the indentation level* when we match on its result!
E.g. from [type_expect_] in [typecore.ml]:
{[
match Extensions.Expression.of_ast sexp with
| Some eexp ->
type_expect_extension
~loc ~env ~expected_mode ~ty_expected ~explanation eexp
| None -> match sexp.pexp_desc with
| Pexp_ident lid ->
let path, mode, desc, kind = type_ident env ~recarg lid in
(* ... *)
| Pexp_constant(Pconst_string (str, _, _) as cst) ->
register_allocation expected_mode;
(* ... *)
| (* ... *)
| Pexp_unreachable ->
re { exp_desc = Texp_unreachable;
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_mode = expected_mode.mode;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
]}
Note that we match on the result of this function, forward to
[type_expect_extension] if we get something, and otherwise do the real
match on [sexp.pexp_desc] *without going up an indentation level*. This
is important to reduce the number of merge conflicts with upstream by
avoiding changing the body of every single important function in the type
checker to add pointless indentation. *)
val of_ast : ast -> t option
end
(** Language extensions in expressions *)
module Expression : sig
type t =
| Eexp_comprehension of Comprehensions.expression
| Eexp_immutable_array of Immutable_arrays.expression
include AST with type t := t and type ast := Parsetree.expression
end
(** Language extensions in patterns *)
module Pattern : sig
type t =
| Epat_immutable_array of Immutable_arrays.pattern
include AST with type t := t and type ast := Parsetree.pattern
end
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/b1f0cf9f9114128db609bdd5a1edfda1e3144a30/parsing/extensions.mli | ocaml | * The ASTs for list and array comprehensions
* "in EXPR"
* PAT (in/=) ... [@...]
* "for PAT (in/=) ... and PAT (in/=) ... and ..."; must be nonempty
* "when EXPR"
* The body/generator of the comprehension
* The clauses of the comprehension; must be nonempty
* [BODY ...CLAUSES...]
* [|BODY ...CLAUSES...|] (flag = Mutable)
[:BODY ...CLAUSES...:] (flag = Immutable)
(only allowed with [-extension immutable_arrays])
* The ASTs for immutable arrays. When we merge this upstream, we'll merge
these into the existing [P{exp,pat}_array] constructors by adding a
[mutable_flag] argument (just as we did with [T{exp,pat}_array]).
* [: E1; ...; En :]
CR aspectorzabusky: Or [Iaexp_iarray]?
CR aspectorzabusky: Or [Iapat_iarray]?
* The module type of language extension ASTs, instantiated once for each
syntactic category. We tend to call the pattern-matching functions here
with unusual indentation, not indenting the [None] branch further so as to
avoid merge conflicts with upstream.
* The corresponding OCaml AST
...
...
...
* Language extensions in expressions
* Language extensions in patterns | * Syntax for our custom ocaml - jst language extensions . This module provides
two things :
1 . First - class ASTs for all syntax introduced by our language extensions ,
one for each OCaml AST we extend , divided up into one extension per
module and all available at once through modules named after the
syntactic category ( [ Expression.t ] , etc . ) .
2 . A way to interpret these values as terms of the coresponding OCaml ASTs ,
and to match on terms of those OCaml ASTs to see if they 're language
extension terms .
We keep our language extensions separate so that we can avoid having to
modify the existing AST , as this would break compatibility with every
existing ppx .
For details on the rationale behind this approach ( and for some of the gory
details ) , see [ Extensions_parsing ] .
two things:
1. First-class ASTs for all syntax introduced by our language extensions,
one for each OCaml AST we extend, divided up into one extension per
module and all available at once through modules named after the
syntactic category ([Expression.t], etc.).
2. A way to interpret these values as terms of the coresponding OCaml ASTs,
and to match on terms of those OCaml ASTs to see if they're language
extension terms.
We keep our language extensions separate so that we can avoid having to
modify the existing AST, as this would break compatibility with every
existing ppx.
For details on the rationale behind this approach (and for some of the gory
details), see [Extensions_parsing]. *)
module Comprehensions : sig
type iterator =
| Range of { start : Parsetree.expression
; stop : Parsetree.expression
; direction : Asttypes.direction_flag }
* " = START to STOP " ( direction = Upto )
" = START downto STOP " ( direction = )
"= START downto STOP" (direction = Downto) *)
| In of Parsetree.expression
In , the [ pattern ] moves into the [ iterator ] .
type clause_binding =
{ pattern : Parsetree.pattern
; iterator : iterator
; attributes : Parsetree.attribute list }
type clause =
| For of clause_binding list
| When of Parsetree.expression
type comprehension =
{ body : Parsetree.expression
; clauses : clause list
type expression =
| Cexp_list_comprehension of comprehension
| Cexp_array_comprehension of Asttypes.mutable_flag * comprehension
val expr_of : loc:Location.t -> expression -> Parsetree.expression
end
module Immutable_arrays : sig
type expression =
| Iaexp_immutable_array of Parsetree.expression list
type pattern =
| Iapat_immutable_array of Parsetree.pattern list
* [: P1 ; ... ; Pn :] *
val expr_of : loc:Location.t -> expression -> Parsetree.expression
val pat_of : loc:Location.t -> pattern -> Parsetree.pattern
end
module type AST = sig
* The AST for all our ocaml - jst language extensions ; one constructor per
language extension that extends the expression language . Some extensions
are handled separately and thus are not listed here .
language extension that extends the expression language. Some extensions
are handled separately and thus are not listed here. *)
type t
type ast
* Given an OCaml AST node , check to see if it corresponds to a language
extension term . If it is , and the extension is enabled , then return it ;
if it 's not a language extension term , return [ None ] ; if it 's a disabled
language extension term , raise an error .
AN IMPORTANT NOTE : We indent calls to this function * very * strangely : we
* do not change the indentation level * when we match on its result !
E.g. from [ type_expect _ ] in [ typecore.ml ] :
{ [
match Extensions . Expression.of_ast sexp with
| Some eexp - >
type_expect_extension
~loc ~env ~expected_mode ~ty_expected ~explanation eexp
| None - > match sexp.pexp_desc with
| Pexp_ident lid - >
let path , mode , desc , kind = type_ident env lid in
( * ...
extension term. If it is, and the extension is enabled, then return it;
if it's not a language extension term, return [None]; if it's a disabled
language extension term, raise an error.
AN IMPORTANT NOTE: We indent calls to this function *very* strangely: we
*do not change the indentation level* when we match on its result!
E.g. from [type_expect_] in [typecore.ml]:
{[
match Extensions.Expression.of_ast sexp with
| Some eexp ->
type_expect_extension
~loc ~env ~expected_mode ~ty_expected ~explanation eexp
| None -> match sexp.pexp_desc with
| Pexp_ident lid ->
let path, mode, desc, kind = type_ident env ~recarg lid in
| Pexp_constant(Pconst_string (str, _, _) as cst) ->
register_allocation expected_mode;
| Pexp_unreachable ->
re { exp_desc = Texp_unreachable;
exp_loc = loc; exp_extra = [];
exp_type = instance ty_expected;
exp_mode = expected_mode.mode;
exp_attributes = sexp.pexp_attributes;
exp_env = env }
]}
Note that we match on the result of this function, forward to
[type_expect_extension] if we get something, and otherwise do the real
match on [sexp.pexp_desc] *without going up an indentation level*. This
is important to reduce the number of merge conflicts with upstream by
avoiding changing the body of every single important function in the type
checker to add pointless indentation. *)
val of_ast : ast -> t option
end
module Expression : sig
type t =
| Eexp_comprehension of Comprehensions.expression
| Eexp_immutable_array of Immutable_arrays.expression
include AST with type t := t and type ast := Parsetree.expression
end
module Pattern : sig
type t =
| Epat_immutable_array of Immutable_arrays.pattern
include AST with type t := t and type ast := Parsetree.pattern
end
|
be297e5b8d425b33fcf51d02f9c542ed5d5a1b8378f12b9a945dbe2ea33c67bd | fulcrologic/fulcro-inspect | main.cljs | (ns fulcro.inspect.chrome.content-script.main
(:require [goog.object :as gobj]
[cljs.core.async :as async :refer [go go-loop chan <! >! put!]]
[fulcro.inspect.remote.transit :as encode]))
(defonce active-messages* (atom {}))
(defn ack-message [msg]
(go
(let [id (gobj/get msg "__fulcro-insect-msg-id")]
(if-let [res (some-> (get @active-messages* id) (<!))]
(do
(swap! active-messages* dissoc id)
res)
nil))))
(defn envelope-ack [data]
(let [id (str (random-uuid))]
(gobj/set data "__fulcro-insect-msg-id" id)
(swap! active-messages* assoc id (async/promise-chan))
data))
(defn setup-new-port []
(let [port (js/chrome.runtime.connect #js {:name "fulcro-inspect-remote"})]
(.addListener (gobj/get port "onMessage")
(fn [msg]
(cond
(gobj/getValueByKeys msg "fulcro-inspect-devtool-message")
(.postMessage js/window msg "*")
:else
(when-let [ch (some->> (gobj/getValueByKeys msg "__fulcro-insect-msg-id")
(get @active-messages*))]
(put! ch msg)))))
port))
(defn event-loop []
(when (js/document.documentElement.getAttribute "__fulcro-inspect-remote-installed__")
(let [content-script->background-chan (chan (async/sliding-buffer 50000))
port* (atom (setup-new-port))]
; set browser icon
(.postMessage @port* #js {:fulcro-inspect-fulcro-detected true})
; clear inspector
(put! content-script->background-chan
(envelope-ack
#js {:fulcro-inspect-remote-message
(encode/write
{:type :fulcro.inspect.client/reset
:data {}})}))
(.addEventListener js/window "message"
(fn [event]
(when (and (identical? (.-source event) js/window)
(gobj/getValueByKeys event "data" "fulcro-inspect-remote-message"))
(put! content-script->background-chan (envelope-ack (gobj/get event "data"))))))
(.postMessage js/window #js {:fulcro-inspect-start-consume true} "*")
(go-loop []
(when-let [data (<! content-script->background-chan)]
; keep trying to send
(loop []
(.postMessage @port* data)
(let [timer (async/timeout 1000)
acker (ack-message data)
[_ c] (async/alts! [acker timer] :priority true)]
; restart the port in case of a timeout
(when (= c timer)
(reset! port* (setup-new-port))
(recur))))
(recur)))))
:ready)
(defonce start (event-loop))
| null | https://raw.githubusercontent.com/fulcrologic/fulcro-inspect/a03b61cbd95384c0f03aa936368bcf5cf573fa32/src/chrome/fulcro/inspect/chrome/content_script/main.cljs | clojure | set browser icon
clear inspector
keep trying to send
restart the port in case of a timeout | (ns fulcro.inspect.chrome.content-script.main
(:require [goog.object :as gobj]
[cljs.core.async :as async :refer [go go-loop chan <! >! put!]]
[fulcro.inspect.remote.transit :as encode]))
(defonce active-messages* (atom {}))
(defn ack-message [msg]
(go
(let [id (gobj/get msg "__fulcro-insect-msg-id")]
(if-let [res (some-> (get @active-messages* id) (<!))]
(do
(swap! active-messages* dissoc id)
res)
nil))))
(defn envelope-ack [data]
(let [id (str (random-uuid))]
(gobj/set data "__fulcro-insect-msg-id" id)
(swap! active-messages* assoc id (async/promise-chan))
data))
(defn setup-new-port []
(let [port (js/chrome.runtime.connect #js {:name "fulcro-inspect-remote"})]
(.addListener (gobj/get port "onMessage")
(fn [msg]
(cond
(gobj/getValueByKeys msg "fulcro-inspect-devtool-message")
(.postMessage js/window msg "*")
:else
(when-let [ch (some->> (gobj/getValueByKeys msg "__fulcro-insect-msg-id")
(get @active-messages*))]
(put! ch msg)))))
port))
(defn event-loop []
(when (js/document.documentElement.getAttribute "__fulcro-inspect-remote-installed__")
(let [content-script->background-chan (chan (async/sliding-buffer 50000))
port* (atom (setup-new-port))]
(.postMessage @port* #js {:fulcro-inspect-fulcro-detected true})
(put! content-script->background-chan
(envelope-ack
#js {:fulcro-inspect-remote-message
(encode/write
{:type :fulcro.inspect.client/reset
:data {}})}))
(.addEventListener js/window "message"
(fn [event]
(when (and (identical? (.-source event) js/window)
(gobj/getValueByKeys event "data" "fulcro-inspect-remote-message"))
(put! content-script->background-chan (envelope-ack (gobj/get event "data"))))))
(.postMessage js/window #js {:fulcro-inspect-start-consume true} "*")
(go-loop []
(when-let [data (<! content-script->background-chan)]
(loop []
(.postMessage @port* data)
(let [timer (async/timeout 1000)
acker (ack-message data)
[_ c] (async/alts! [acker timer] :priority true)]
(when (= c timer)
(reset! port* (setup-new-port))
(recur))))
(recur)))))
:ready)
(defonce start (event-loop))
|
2a87bb3741a167cec068a65f8fee596b05b054c572806ae3b75593378bd86325 | alaq/learning-clojure-in-public | project.clj | (defproject scratch "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.0"]
[cheshire "5.3.1"]])
| null | https://raw.githubusercontent.com/alaq/learning-clojure-in-public/27d11c1d8cad289b3fb9278082812a019d42b8d8/code/scratch/project.clj | clojure | (defproject scratch "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.0"]
[cheshire "5.3.1"]])
| |
10a9d1e28c593215f8aa7a80f4b7542ed2cb97f5d1855d1596b42a422ac015e3 | arichiardi/sicp-clojure | 1_2_exercises_1_21_1_28.clj | (ns sicp-clojure.1-2-exercises-1-21-1-28
(:require [clojure.test :as t]
[clojure.math.numeric-tower :as m]
[sicp-clojure.utils :as u]
[sicp-clojure.1-2-samples :as s]))
Exercise 1.21
;; Use the smallest-divisor procedure to find the smallest divisor of each of the following numbers:
199 , 1999 , 19999 .
; See tests at the bottom.
Exercise 1.22
(defn- search-helper [start end n primes]
(cond (= n 0) primes
(= start end) primes
(even? start) (search-helper (+ start 1) end n primes)
(s/fast-prime? start 2) (search-helper (+ start 1) end (- n 1) (conj primes start))
:else (search-helper (+ start 1) end n primes)))
(defn search-first-n-primes-in-range [start end n]
"Searches for the first *n* primes in the range (both ends inclusive). Returns a vector."
(search-helper start end n []))
(defn- report-prime [elapsed-time]
(print " *** ")
(print elapsed-time))
(defn- start-prime-test [n start-time]
(if (s/prime? n)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test [n]
(print "\n" n)
(start-prime-test n (System/nanoTime)))
(defn search-for-primes [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes (+ start 1) end)
:else (do (timed-prime-test start)
(search-for-primes (+ start 2) end))))
( search - first - n - primes - in - range 15 9999 3 )
;;(search-first-n-primes-in-range 10000 99999 3)
;;(search-first-n-primes-in-range 100000 999999 3)
( search - first - n - primes - in - range 1000000 9999999 3 )
( search - first - n - primes - in - range 10000000 99999999 3 )
( search - for - primes 1008 1020 ) ; = > ~ 2800
( search - for - primes 10006 10038 ) ; = > ~ 8500
( search - for - primes 100002 100044 ) ; = > ~ 17000
( search - for - primes 1000002 1000038 ) ; = > ~ 55000
;; Even if the recursion causes stack overflow most of the times, the average timing (notice that
;; the time unit is nanoseconds) of each search kind of shows an increase of around (sqrt 10),
especially between > 100000 and > 10000000 .
In any case , these kind of micro benchmarks are really difficult on the JVM in general ,
see .
( u / microbench 100 ( prime ? 1009 ) ) ; Average : 0.014561444444444441
( u / microbench 100 ( prime ? 1013 ) ) ;
( u / microbench 100 ( prime ? ) ) ;
( u / microbench 100 ( prime ? 10007 ) ) ;
( u / microbench 100 ( prime ? 10009 ) ) ;
( u / microbench 100 ( prime ? 10037 ) ) ; Average : 0.015071555555555553
( u / microbench 100 ( prime ? 100003 ) ) ;
( u / microbench 100 ( prime ? 100019 ) ) ;
( u / microbench 100 ( prime ? 100043 ) ) ; Average : 0.032170000000000004
( u / microbench 100 ( prime ? 1000003 ) ) ;
( u / microbench 100 ( prime ? 1000033 ) ) ; Average : 0.07697216666666665
( u / microbench 100 ( prime ? 1000037 ) ) ;
;; Trying with another micro benchmark tool changes a little bit the result. The average time doesn't
really show a ( sqrt 10 ) increase from one step to another .
Exercise 1.23
(defn- next* [n]
(if (= n 2) 3 (+ n 2)))
(defn- find-divisor* [n test-divisor]
(cond (> (u/square test-divisor) n) n
(s/divides? test-divisor n) test-divisor
:else (find-divisor* n (next* test-divisor))))
(defn smallest-divisor* [n]
(find-divisor* n 2))
(defn prime*? [n]
(= (smallest-divisor* n) n))
(defn- start-prime-test* [n start-time]
(if (prime*? n)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test* [n]
(print "\n" n)
(start-prime-test* n (System/nanoTime)))
(defn search-for-primes* [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes* (+ start 1) end)
:else (do (timed-prime-test* start)
(search-for-primes* (+ start 2) end))))
( search - for - primes * 1008 1020 ) ; = > ~ 2800
( search - for - primes * 10006 10038 ) ; = > ~ 8500
( search - for - primes * 100002 100044 ) ; = > ~ 17000
( search - for - primes * 1000002 1000038 ) ; = > ~ 55000
( u / microbench 100 ( prime * ? 1009 ) ) ; Average : 0.0106535
( u / microbench 100 ( prime * ? 1013 ) ) ;
( u / microbench 100 ( prime * ? ) ) ;
( u / microbench 100 ( prime * ? 10007 ) ) ;
( u / microbench 100 ( prime * ? 10009 ) ) ;
( u / microbench 100 ( prime * ? 10037 ) ) ; Average : 0.012052333333333335
( u / microbench 100 ( prime * ? 100003 ) ) ;
( u / microbench 100 ( prime * ? 100019 ) ) ;
( u / microbench 100 ( prime * ? 100043 ) ) ; Average : 0.02210261111111111
( u / microbench 100 ( prime * ? 1000003 ) ) ;
( u / microbench 100 ( prime * ? 1000033 ) ) ; Average : 0.05496777777777779
( u / microbench 100 ( prime * ? 1000037 ) ) ;
The difference in average time between prime ? and prime * ? ( using next * ) is not exactly 2 .
;; This can be explained noticing that next* introduces branching (if) that can have some
;; performance hit.
Exercise 1.24
(defn- start-prime-test** [n start-time]
(if (s/fast-prime? n 100)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test** [n]
(print "\n" n)
(start-prime-test** n (System/nanoTime)))
(defn search-for-primes** [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes** (+ start 1) end)
:else (do (timed-prime-test** start)
(search-for-primes** (+ start 2) end))))
( search - for - primes * * 1008 1020 )
;;(search-for-primes** 10006 10038)
( search - for - primes * * 100002 100044 )
;;(search-for-primes** 1000002 1000038)
;; There is no noticeable difference in the benchmarks of fast-prime?, even using some more powerful
tools . The numbers are probably too small or some JVM hocus - pocus is acting weird behind the scene .
Furthermore , some Clojure 's internal function produces stack overflow when using big numbers .
Exercise 1.25
(defn expmod-alyssa [base exp m]
(rem (s/fast-expt base exp) m))
As note 46 says , there is a huge difference in the expmod implementation of the book and the naive
version wrote by . The book 's implementation arrives at the result by applying mod m to
;; each partial value returned. This means that the values handled will not grow much more than m.
's version will easily produce big numbers that will of course slow down the execution
( Clojure will convert to BigInt , which allows for arbitrary precision but is slower than Long / Integer ) .
Exercise 1.26
By replacing ( square ... ) with ( * ... ) , has forced the applicative - order interpreter to
;; evaluate (expmod ...) twice per iteration, as soon as (remainder (* ...) ...) needs evaluation.
Therefore , now there are two recursive calls to expmod , which will unfold in exactly the same way ,
duplicating espressions like in the tree - recursive implementation .
Exercise 1.27
Demonstrate that the numbers listed in footnote 47 really do fool the Fermat test .
In order to avoid stack overflows we need to use Clojure 's optimized recur construct .
(defn- f-helper [n a]
(cond (= n a) true
(= (s/expmod a n n) a) (recur n (inc a))
:else false))
(defn fermat-test-check [n]
(f-helper n 2))
Exercise 1.28
Modify the expmod procedure to signal if it discovers a nontrivial square root of 1 , and use
this to implement the - Rabin test with a procedure analogous to fermat - test .
(defn- check-and-square [a m]
(def square-modulo (rem (u/square a) m))
(if (or (= a 1) (= a (- m 1)) (not (= square-modulo 1)))
square-modulo
0))
(defn- expmod-mr [base exp m]
(cond (= exp 0) 1
(even? exp) (check-and-square (expmod-mr base (quot exp 2) m) m)
:else (rem (*' base (expmod-mr base (- exp 1) m)) m)))
(defn miller-rabin-test [n]
(defn try-it [a n]
(= (expmod-mr a (- n 1) n) 1))
(try-it (+ 1 (m/round (m/floor (rand (- n 1))))) n))
(defn fast-prime*? [n times]
(cond (= times 0) true
(miller-rabin-test n) (fast-prime*? n (- times 1))
:else false))
;; Note that some of the tests below are probabilistic and they can produce a different
;; results (although with very low probability).
(t/deftest tests
(t/is (= 199 (s/smallest-divisor 199)))
(t/is (= 1999 (s/smallest-divisor 1999)))
(t/is (= 7 (s/smallest-divisor 19999)))
(t/is (= true (fermat-test-check 31)))
(t/is (= false (fermat-test-check 32)))
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
(t/is (= true (fast-prime*? 31 50)))
(t/is (= false (fast-prime*? 32 50)))
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
(t/run-tests)
| null | https://raw.githubusercontent.com/arichiardi/sicp-clojure/2dc128726406b12de3eaf38fea58dc469e3a60a6/src/sicp_clojure/1_2_exercises_1_21_1_28.clj | clojure | Use the smallest-divisor procedure to find the smallest divisor of each of the following numbers:
See tests at the bottom.
(search-first-n-primes-in-range 10000 99999 3)
(search-first-n-primes-in-range 100000 999999 3)
= > ~ 2800
= > ~ 8500
= > ~ 17000
= > ~ 55000
Even if the recursion causes stack overflow most of the times, the average timing (notice that
the time unit is nanoseconds) of each search kind of shows an increase of around (sqrt 10),
Average : 0.014561444444444441
Average : 0.015071555555555553
Average : 0.032170000000000004
Average : 0.07697216666666665
Trying with another micro benchmark tool changes a little bit the result. The average time doesn't
= > ~ 2800
= > ~ 8500
= > ~ 17000
= > ~ 55000
Average : 0.0106535
Average : 0.012052333333333335
Average : 0.02210261111111111
Average : 0.05496777777777779
This can be explained noticing that next* introduces branching (if) that can have some
performance hit.
(search-for-primes** 10006 10038)
(search-for-primes** 1000002 1000038)
There is no noticeable difference in the benchmarks of fast-prime?, even using some more powerful
each partial value returned. This means that the values handled will not grow much more than m.
evaluate (expmod ...) twice per iteration, as soon as (remainder (* ...) ...) needs evaluation.
Note that some of the tests below are probabilistic and they can produce a different
results (although with very low probability). | (ns sicp-clojure.1-2-exercises-1-21-1-28
(:require [clojure.test :as t]
[clojure.math.numeric-tower :as m]
[sicp-clojure.utils :as u]
[sicp-clojure.1-2-samples :as s]))
Exercise 1.21
199 , 1999 , 19999 .
Exercise 1.22
(defn- search-helper [start end n primes]
(cond (= n 0) primes
(= start end) primes
(even? start) (search-helper (+ start 1) end n primes)
(s/fast-prime? start 2) (search-helper (+ start 1) end (- n 1) (conj primes start))
:else (search-helper (+ start 1) end n primes)))
(defn search-first-n-primes-in-range [start end n]
"Searches for the first *n* primes in the range (both ends inclusive). Returns a vector."
(search-helper start end n []))
(defn- report-prime [elapsed-time]
(print " *** ")
(print elapsed-time))
(defn- start-prime-test [n start-time]
(if (s/prime? n)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test [n]
(print "\n" n)
(start-prime-test n (System/nanoTime)))
(defn search-for-primes [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes (+ start 1) end)
:else (do (timed-prime-test start)
(search-for-primes (+ start 2) end))))
( search - first - n - primes - in - range 15 9999 3 )
( search - first - n - primes - in - range 1000000 9999999 3 )
( search - first - n - primes - in - range 10000000 99999999 3 )
especially between > 100000 and > 10000000 .
In any case , these kind of micro benchmarks are really difficult on the JVM in general ,
see .
really show a ( sqrt 10 ) increase from one step to another .
Exercise 1.23
(defn- next* [n]
(if (= n 2) 3 (+ n 2)))
(defn- find-divisor* [n test-divisor]
(cond (> (u/square test-divisor) n) n
(s/divides? test-divisor n) test-divisor
:else (find-divisor* n (next* test-divisor))))
(defn smallest-divisor* [n]
(find-divisor* n 2))
(defn prime*? [n]
(= (smallest-divisor* n) n))
(defn- start-prime-test* [n start-time]
(if (prime*? n)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test* [n]
(print "\n" n)
(start-prime-test* n (System/nanoTime)))
(defn search-for-primes* [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes* (+ start 1) end)
:else (do (timed-prime-test* start)
(search-for-primes* (+ start 2) end))))
The difference in average time between prime ? and prime * ? ( using next * ) is not exactly 2 .
Exercise 1.24
(defn- start-prime-test** [n start-time]
(if (s/fast-prime? n 100)
(report-prime (- (System/nanoTime) start-time))))
(defn timed-prime-test** [n]
(print "\n" n)
(start-prime-test** n (System/nanoTime)))
(defn search-for-primes** [start end]
(cond (>= start end) (println "\nDone!")
(even? start) (search-for-primes** (+ start 1) end)
:else (do (timed-prime-test** start)
(search-for-primes** (+ start 2) end))))
( search - for - primes * * 1008 1020 )
( search - for - primes * * 100002 100044 )
tools . The numbers are probably too small or some JVM hocus - pocus is acting weird behind the scene .
Furthermore , some Clojure 's internal function produces stack overflow when using big numbers .
Exercise 1.25
(defn expmod-alyssa [base exp m]
(rem (s/fast-expt base exp) m))
As note 46 says , there is a huge difference in the expmod implementation of the book and the naive
version wrote by . The book 's implementation arrives at the result by applying mod m to
's version will easily produce big numbers that will of course slow down the execution
( Clojure will convert to BigInt , which allows for arbitrary precision but is slower than Long / Integer ) .
Exercise 1.26
By replacing ( square ... ) with ( * ... ) , has forced the applicative - order interpreter to
Therefore , now there are two recursive calls to expmod , which will unfold in exactly the same way ,
duplicating espressions like in the tree - recursive implementation .
Exercise 1.27
Demonstrate that the numbers listed in footnote 47 really do fool the Fermat test .
In order to avoid stack overflows we need to use Clojure 's optimized recur construct .
(defn- f-helper [n a]
(cond (= n a) true
(= (s/expmod a n n) a) (recur n (inc a))
:else false))
(defn fermat-test-check [n]
(f-helper n 2))
Exercise 1.28
Modify the expmod procedure to signal if it discovers a nontrivial square root of 1 , and use
this to implement the - Rabin test with a procedure analogous to fermat - test .
(defn- check-and-square [a m]
(def square-modulo (rem (u/square a) m))
(if (or (= a 1) (= a (- m 1)) (not (= square-modulo 1)))
square-modulo
0))
(defn- expmod-mr [base exp m]
(cond (= exp 0) 1
(even? exp) (check-and-square (expmod-mr base (quot exp 2) m) m)
:else (rem (*' base (expmod-mr base (- exp 1) m)) m)))
(defn miller-rabin-test [n]
(defn try-it [a n]
(= (expmod-mr a (- n 1) n) 1))
(try-it (+ 1 (m/round (m/floor (rand (- n 1))))) n))
(defn fast-prime*? [n times]
(cond (= times 0) true
(miller-rabin-test n) (fast-prime*? n (- times 1))
:else false))
(t/deftest tests
(t/is (= 199 (s/smallest-divisor 199)))
(t/is (= 1999 (s/smallest-divisor 1999)))
(t/is (= 7 (s/smallest-divisor 19999)))
(t/is (= true (fermat-test-check 31)))
(t/is (= false (fermat-test-check 32)))
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
(t/is (= true (fast-prime*? 31 50)))
(t/is (= false (fast-prime*? 32 50)))
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
Charmichael number
(t/run-tests)
|
255148730b46280d3f72bdd899e5bb81c58c0dd018a8aeaa84dbda50c318f222 | vert-x/mod-lang-clojure | buffer.clj | Copyright 2013 the original author or authors .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns vertx.buffer
"Functions for operating on Vert.x Buffers.
A Buffer represents a sequence of zero or more bytes that can be
written to or read from, and which expands as necessary to
accommodate any bytes written to it.
append!, set! and as-buffer take several different types of data
that can be written to a buffer these types are referred to
collectively as \"bufferable\", and are:
* Buffer
* Byte
* byte[]
* Double
* BigDecimal (coerced to a Double)
* Ratio (coerced to a Double)
* Float
* Integer
* Long
* BigInt (coerced to a Long)
* Short
* String"
(:require [clojure.string :as str]
[vertx.core :as core]
[vertx.utils :as u])
(:import org.vertx.java.core.buffer.Buffer
[org.vertx.java.core.parsetools RecordParser]
[clojure.lang BigInt Ratio]
java.math.BigDecimal
java.nio.ByteBuffer))
(defn buffer
"Creates a new Buffer instance.
arg can be:
* a String, which will be written to the buffer as UTF-8
* a byte[], which will be written to the buffer
* an int, which specifies an initial size hint
You can also provide a String along with a second argument
specifying the encoding."
([]
(Buffer.))
([arg]
(condp instance? arg
u/byte-arr-class (Buffer. ^bytes arg)
String (Buffer. ^String arg)
(Buffer. (int arg))))
([str enc]
(Buffer. str enc)))
(defn append!
"Appends bufferable data to the end of a buffer.
If data is a byte-array or Buffer, you can also specify an offset
to start copying from in data, and len of bytes to copy. Returns
the mutated buffer instance."
([^Buffer buf data]
(condp instance? data
Buffer (.appendBuffer buf data)
Byte (.appendByte buf data)
u/byte-arr-class (.appendBytes buf data)
Double (.appendDouble buf data)
BigDecimal (.appendDouble buf (double data))
Ratio (.appendDouble buf (double data))
Float (.appendFloat buf data)
Integer (.appendInt buf data)
Long (.appendLong buf data)
BigInt (.appendLong buf (long data))
Short (.appendShort buf data)
String (.appendString buf data)
(throw (IllegalArgumentException.
(str "Can't append data of class " (class data))))))
([^Buffer buf data-string encoding]
(.appendString buf data-string encoding))
([^Buffer buf data offset len]
(condp instance? data
Buffer (.appendBuffer buf data offset len)
u/byte-arr-class (.appendBytes buf data offset len)
(throw (IllegalArgumentException.
(str "Can't append data of class " (class data) " with offset"))))))
(defn set!
"Sets bufferable data in a buffer.
The data is set at the offset specified by loc. If data is a
byte-array or Buffer, you can also specify an offset to start
copying from in data, and len of bytes to copy. Returns
the mutated buffer instance."
([^Buffer buf ^Integer loc data]
(condp instance? data
Buffer (.setBuffer buf loc data)
Byte (.setByte buf loc data)
u/byte-arr-class (.setBytes buf loc ^bytes data)
ByteBuffer (.setBytes buf loc ^ByteBuffer data)
Double (.setDouble buf loc data)
BigDecimal (.setDouble buf loc (double data))
Ratio (.setDouble buf loc (double data))
Float (.setFloat buf loc data)
Integer (.setInt buf loc data)
Long (.setLong buf loc data)
BigInt (.setLong buf loc (long data))
Short (.setShort buf loc data)
String (.setString buf loc data)
(throw (IllegalArgumentException.
(str "Can't set data of class " (class data))))))
([^Buffer buf loc data-string encoding]
(.setString buf loc data-string encoding))
([^Buffer buf loc data offset len]
(condp instance? data
Buffer (.setBuffer buf loc data offset len)
u/byte-arr-class (.setBytes buf loc data offset len)
(throw (IllegalArgumentException.
(str "Can't set data of class " (class data) " with offset"))))))
(defn ^Buffer as-buffer
"Wraps bufferable data in a buffer unless it is already one."
[data]
(if (or (nil? data) (instance? Buffer data))
data
(doto (buffer)
(append! data))))
(defn get-buffer
"Returns a copy of a sub-sequence of buf as a Buffer starting
at start and ending at end - 1."
[^Buffer buf start end]
(.getBuffer buf start end))
(defmacro ^:private def-get [name len]
(let [fname (symbol (str "get-" name))
doc (format "Returns the %s at position pos in buf.\n Throws IndexOutOfBoundsException if the specified pos is less than 0\n or pos + %s is greater than the length of buf." name len)
method (symbol (str ".get" (clojure.string/capitalize name)))
buf (with-meta 'buf {:tag 'Buffer})]
`(defn ~fname ~doc [~buf ~'pos] (~method ~'buf ~'pos))))
(def-get byte 1)
(def-get int 4)
(def-get long 8)
(def-get double 8)
(def-get float 4)
(def-get short 2)
(defn get-bytes
"Returns a copy of all or a portion of buf as a java byte array.
If start and end are provided, it returns a copy of a sub-sequnce
starting at start and ending at end - 1, otherwise it returns a
copy of the entire buf."
([^Buffer buf]
(.getBytes buf))
([^Buffer buf start end]
(.getBytes buf start end)))
(defn get-string
"Returns a copy of a sub-sequence the buf as a String starting at
start and ending at end - 1 in the given encoding (defaulting to
UTF-8)."
([^Buffer buf start end]
(.getString buf start end))
([^Buffer buf start end encoding]
(.getString buf start end encoding)))
(defn ^RecordParser fixed-parser
"Creates a fixed-size RecordParser.
A fixed-size parser can be used to parse protocol data that may be
delivered across many buffers. For example, a fixed-size parser
with a size of 4 would take:
buffer1:1234567
buffer2:8
buffer3:90123456
and invoke the handler four times with:
buffer1:1234
buffer2:5678
buffer3:9012
buffer4:3456
handler can either be a single-arity fn or a Handler instance that
will be passed the Buffer for each parsed fragment. See
org.vertx.java.core.parsetools.RecordParser for more details."
[size handler]
(RecordParser/newFixed size (core/as-handler handler)))
(defn ^RecordParser delimited-parser
"Creates a delimited RecordParser.
A delimited parser can be used to parse protocol data that may be
delivered across many buffers. For example, a delimited parser
with a delimiter of \"\\n\" would take:
buffer1:Hello\\nHow are y
buffer2:ou?\\nI am
buffer3:fine.
buffer4:\\n
and invoke the handler three times with:
buffer1:Hello
buffer2:How are you?
buffer3:I am fine.
handler can either be a single-arity fn or a Handler instance that
will be passed the Buffer for each parsed fragment. See
org.vertx.java.core.parsetools.RecordParser for more details."
[^String delim handler]
(RecordParser/newDelimited (.getBytes delim)
(core/as-handler handler)))
(defn parse-buffer
"Parses buf with parser."
[^RecordParser parser ^Buffer buf]
(.handle parser buf))
(defn parse-fixed
"Convience function that creates a fixed-size parser and uses it to parse buf."
[buf size handler]
(-> (fixed-parser size handler) (parse-buffer buf)))
(defn parse-delimited
"Convience function that creates a delimited parser and uses it to parse buf."
[^Buffer buf delim handler]
(-> (delimited-parser delim handler) (.handle buf)))
| null | https://raw.githubusercontent.com/vert-x/mod-lang-clojure/dcf713460b8f46c08d0db6e7bf8537f1dd91f297/api/src/main/clojure/vertx/buffer.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2013 the original author or authors .
distributed under the License is distributed on an " AS IS " BASIS ,
(ns vertx.buffer
"Functions for operating on Vert.x Buffers.
A Buffer represents a sequence of zero or more bytes that can be
written to or read from, and which expands as necessary to
accommodate any bytes written to it.
append!, set! and as-buffer take several different types of data
that can be written to a buffer these types are referred to
collectively as \"bufferable\", and are:
* Buffer
* Byte
* byte[]
* Double
* BigDecimal (coerced to a Double)
* Ratio (coerced to a Double)
* Float
* Integer
* Long
* BigInt (coerced to a Long)
* Short
* String"
(:require [clojure.string :as str]
[vertx.core :as core]
[vertx.utils :as u])
(:import org.vertx.java.core.buffer.Buffer
[org.vertx.java.core.parsetools RecordParser]
[clojure.lang BigInt Ratio]
java.math.BigDecimal
java.nio.ByteBuffer))
(defn buffer
"Creates a new Buffer instance.
arg can be:
* a String, which will be written to the buffer as UTF-8
* a byte[], which will be written to the buffer
* an int, which specifies an initial size hint
You can also provide a String along with a second argument
specifying the encoding."
([]
(Buffer.))
([arg]
(condp instance? arg
u/byte-arr-class (Buffer. ^bytes arg)
String (Buffer. ^String arg)
(Buffer. (int arg))))
([str enc]
(Buffer. str enc)))
(defn append!
"Appends bufferable data to the end of a buffer.
If data is a byte-array or Buffer, you can also specify an offset
to start copying from in data, and len of bytes to copy. Returns
the mutated buffer instance."
([^Buffer buf data]
(condp instance? data
Buffer (.appendBuffer buf data)
Byte (.appendByte buf data)
u/byte-arr-class (.appendBytes buf data)
Double (.appendDouble buf data)
BigDecimal (.appendDouble buf (double data))
Ratio (.appendDouble buf (double data))
Float (.appendFloat buf data)
Integer (.appendInt buf data)
Long (.appendLong buf data)
BigInt (.appendLong buf (long data))
Short (.appendShort buf data)
String (.appendString buf data)
(throw (IllegalArgumentException.
(str "Can't append data of class " (class data))))))
([^Buffer buf data-string encoding]
(.appendString buf data-string encoding))
([^Buffer buf data offset len]
(condp instance? data
Buffer (.appendBuffer buf data offset len)
u/byte-arr-class (.appendBytes buf data offset len)
(throw (IllegalArgumentException.
(str "Can't append data of class " (class data) " with offset"))))))
(defn set!
"Sets bufferable data in a buffer.
The data is set at the offset specified by loc. If data is a
byte-array or Buffer, you can also specify an offset to start
copying from in data, and len of bytes to copy. Returns
the mutated buffer instance."
([^Buffer buf ^Integer loc data]
(condp instance? data
Buffer (.setBuffer buf loc data)
Byte (.setByte buf loc data)
u/byte-arr-class (.setBytes buf loc ^bytes data)
ByteBuffer (.setBytes buf loc ^ByteBuffer data)
Double (.setDouble buf loc data)
BigDecimal (.setDouble buf loc (double data))
Ratio (.setDouble buf loc (double data))
Float (.setFloat buf loc data)
Integer (.setInt buf loc data)
Long (.setLong buf loc data)
BigInt (.setLong buf loc (long data))
Short (.setShort buf loc data)
String (.setString buf loc data)
(throw (IllegalArgumentException.
(str "Can't set data of class " (class data))))))
([^Buffer buf loc data-string encoding]
(.setString buf loc data-string encoding))
([^Buffer buf loc data offset len]
(condp instance? data
Buffer (.setBuffer buf loc data offset len)
u/byte-arr-class (.setBytes buf loc data offset len)
(throw (IllegalArgumentException.
(str "Can't set data of class " (class data) " with offset"))))))
(defn ^Buffer as-buffer
"Wraps bufferable data in a buffer unless it is already one."
[data]
(if (or (nil? data) (instance? Buffer data))
data
(doto (buffer)
(append! data))))
(defn get-buffer
"Returns a copy of a sub-sequence of buf as a Buffer starting
at start and ending at end - 1."
[^Buffer buf start end]
(.getBuffer buf start end))
(defmacro ^:private def-get [name len]
(let [fname (symbol (str "get-" name))
doc (format "Returns the %s at position pos in buf.\n Throws IndexOutOfBoundsException if the specified pos is less than 0\n or pos + %s is greater than the length of buf." name len)
method (symbol (str ".get" (clojure.string/capitalize name)))
buf (with-meta 'buf {:tag 'Buffer})]
`(defn ~fname ~doc [~buf ~'pos] (~method ~'buf ~'pos))))
(def-get byte 1)
(def-get int 4)
(def-get long 8)
(def-get double 8)
(def-get float 4)
(def-get short 2)
(defn get-bytes
"Returns a copy of all or a portion of buf as a java byte array.
If start and end are provided, it returns a copy of a sub-sequnce
starting at start and ending at end - 1, otherwise it returns a
copy of the entire buf."
([^Buffer buf]
(.getBytes buf))
([^Buffer buf start end]
(.getBytes buf start end)))
(defn get-string
"Returns a copy of a sub-sequence the buf as a String starting at
start and ending at end - 1 in the given encoding (defaulting to
UTF-8)."
([^Buffer buf start end]
(.getString buf start end))
([^Buffer buf start end encoding]
(.getString buf start end encoding)))
(defn ^RecordParser fixed-parser
"Creates a fixed-size RecordParser.
A fixed-size parser can be used to parse protocol data that may be
delivered across many buffers. For example, a fixed-size parser
with a size of 4 would take:
buffer1:1234567
buffer2:8
buffer3:90123456
and invoke the handler four times with:
buffer1:1234
buffer2:5678
buffer3:9012
buffer4:3456
handler can either be a single-arity fn or a Handler instance that
will be passed the Buffer for each parsed fragment. See
org.vertx.java.core.parsetools.RecordParser for more details."
[size handler]
(RecordParser/newFixed size (core/as-handler handler)))
(defn ^RecordParser delimited-parser
"Creates a delimited RecordParser.
A delimited parser can be used to parse protocol data that may be
delivered across many buffers. For example, a delimited parser
with a delimiter of \"\\n\" would take:
buffer1:Hello\\nHow are y
buffer2:ou?\\nI am
buffer3:fine.
buffer4:\\n
and invoke the handler three times with:
buffer1:Hello
buffer2:How are you?
buffer3:I am fine.
handler can either be a single-arity fn or a Handler instance that
will be passed the Buffer for each parsed fragment. See
org.vertx.java.core.parsetools.RecordParser for more details."
[^String delim handler]
(RecordParser/newDelimited (.getBytes delim)
(core/as-handler handler)))
(defn parse-buffer
"Parses buf with parser."
[^RecordParser parser ^Buffer buf]
(.handle parser buf))
(defn parse-fixed
"Convience function that creates a fixed-size parser and uses it to parse buf."
[buf size handler]
(-> (fixed-parser size handler) (parse-buffer buf)))
(defn parse-delimited
"Convience function that creates a delimited parser and uses it to parse buf."
[^Buffer buf delim handler]
(-> (delimited-parser delim handler) (.handle buf)))
|
f0bfe9cce0105a424c76ff2fc55927a0b84aa0232bb181b2aa1ea73593a13bf6 | dmiller/clr.core.logic | nominal.clj | Copyright ( c ) , , contributors . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns clojure.core.logic.nominal
(:refer-clojure :exclude [== hash])
(:use [clojure.core.logic.protocols]
[clojure.core.logic :exclude [fresh] :as l])
(:require [clojure.pprint :as pp])
(:import [System.IO TextWriter] ;;; [java.io Writer]
[clojure.core.logic LVar LCons]
[clojure.core.logic.protocols IBindable ITreeTerm]))
;; =============================================================================
;; Nominal unification with fresh, hash and tie.
;;
;; Some references / inspiration:
;; alphaKanren - /~webyrd/alphamk/alphamk.pdf
;; Nominal Unification - /~amp12/papers/nomu/nomu-jv.pdf
;; -research/blob/master/lib/minikanren/nominal.sls
;; =============================================================================
;; Nominal unification protocols
(defprotocol INomSwap
(swap-noms [t swap s]))
(defn nom-swap [a swap]
(cond
(= a (first swap)) (second swap)
(= a (second swap)) (first swap)
:else a))
(declare suspc)
(extend-protocol INomSwap
nil
(swap-noms [t swap s] [t s])
Object
(swap-noms [t swap s] [t s])
LVar
(swap-noms [t swap s]
(let [t (walk s t)]
(if (lvar? t)
(let [v (with-meta (lvar) (meta t))
rt (root-val s t)
s (-> (if (subst-val? rt) (ext-no-check s v rt) s)
(entangle t v)
((suspc v t swap)))]
[v s])
(swap-noms t swap s))))
LCons
(swap-noms [t swap s]
(let [[tfirst s] (swap-noms (lfirst t) swap s)
[tnext s] (swap-noms (lnext t) swap s)]
[(with-meta (lcons tfirst tnext) (meta t))
s]))
clojure.lang.IPersistentCollection
(swap-noms [t swap s]
(if (seq t)
(let [[tfirst s] (swap-noms (first t) swap s)
[tnext s] (swap-noms (next t) swap s)]
[(with-meta (cons tfirst tnext) (meta t)) s])
[t s]))
clojure.lang.IPersistentVector
(swap-noms [t swap s]
(let [[ts s] (swap-noms (seq t) swap s)]
[(vec ts) s]))
clojure.lang.IPersistentMap
(swap-noms [t swap s]
(let [[tkvs s] (swap-noms (seq t) swap s)]
[(into {} tkvs) s])))
;; =============================================================================
Nom
(declare nom)
(deftype Nom [lvar]
IBindable
Object
(ToString [_] ;;; toString
(str "<nom:" (:name lvar) ">"))
(GetHashCode [_] ;;; hashCode
(.GetHashCode lvar)) ;;; .hashCode
(Equals [this o] ;;; equals
(and (.. this GetType (IsInstanceOfType o)) ;;; getClass isInstance
(= lvar (:lvar o))))
clojure.lang.IObj
(withMeta [this new-meta]
(nom (with-meta lvar new-meta)))
(meta [this]
(meta lvar))
clojure.lang.ILookup
(valAt [this k]
(.valAt this k nil))
(valAt [_ k not-found]
(case k
:lvar lvar
:name (:name lvar)
:oname (:oname lvar)
not-found))
IReifyTerm
(reify-term [v s]
(ext s v (symbol (str (if (-> s meta (:reify-noms true)) "a" (:oname v)) "_" (count s)))))
INomSwap
(swap-noms [t swap s]
[(nom-swap t swap) s]))
(defn nom [lvar]
(Nom. lvar))
(defn nom? [x]
(instance? clojure.core.logic.nominal.Nom x))
(defn- nom-bind [sym]
((juxt identity
(fn [s] `(nom (lvar '~s)))) sym))
(defn- nom-binds [syms]
(mapcat nom-bind syms))
(defmacro fresh
"Creates fresh noms. Goals occuring within form a logical conjunction."
[[& noms] & goals]
`(fn [a#]
(-inc
(let [~@(nom-binds noms)]
(bind* a# ~@goals)))))
(defmethod print-method Nom [x ^TextWriter writer] ;;; ^Writer
.write
;; =============================================================================
;; hash: ensure a nom is free in a term
(declare tie? hash)
(defn- -hash [a x]
(reify
Object
(ToString [_] ;;; toString
(str a "#" x))
IConstraintStep
(-step [this s]
(let [a (walk s a)
x (walk s x)]
(reify
clojure.lang.IFn
(invoke [_ s]
((composeg*
(remcg this)
(fn [s]
(cond
(and (lvar? a) (lvar? x) (= x a)) nil
(and (nom? a) (nom? x) (= x a)) nil
(and (not (lvar? a)) (not (nom? a))) nil
(and (nom? a) (tie? x) (= (:binding-nom x) a)) s
(and (tree-term? x)
(or (not (tie? x)) (nom? a)))
((constrain-tree x
(fn [t s] ((hash a t) s))) s)
:else s))) s))
IRunnable
(-runnable? [_]
(if (lvar? a)
(or (and (lvar? x) (= x a))
(and (tree-term? x) (not (tie? x))))
(or (not (nom? a))
(not (lvar? x))))))))
IConstraintOp
(-rator [_] `hash)
(-rands [_] [a x])
IReifiableConstraint
(-reifyc [_ v r s]
(let [x (walk* r (walk* s x))
a (walk* r (walk* s a))]
;; Filter constraints unrelated to reified variables.
(when (and (symbol? a) (empty? (->> (list x) flatten (filter lvar?))))
(symbol (str a "#" x)))))
IConstraintWatchedStores
(-watched-stores [this] #{::l/subst})))
(defn hash [a t]
(cgoal (-hash a t)))
;; =============================================================================
;; Suspensions as constraints
(defn- -do-suspc [t1 t2 swap a]
(let [x (loop [vs #{t2} seen #{}]
(let [vs (clojure.set/difference vs seen)]
(cond
(empty? vs) true
(some #(occurs-check a % t1) vs) false
:else (recur
(reduce
(fn [s0 s1]
(clojure.set/union s0 (:eset (root-val a s1))))
#{} vs)
(clojure.set/union vs seen)))))]
(when x
(let [[t1 a] (swap-noms t1 swap a)]
((== t1 t2) a)))))
(defn -suspc [v1 v2 swap]
(reify
Object
(ToString [_] ;;; toString
(str "suspc" v1 v2 swap))
IConstraintStep
(-step [this a]
(let [t1 (walk a v1)
t2 (walk a v2)]
(reify
clojure.lang.IFn
(invoke [_ a]
((composeg*
(remcg this)
(fn [a]
(cond
(not (lvar? t1)) (-do-suspc t1 t2 swap a)
(not (lvar? t2)) (-do-suspc t2 t1 swap a)
:else ;; (= t1 t2)
(loop [a* swap a a]
(if (empty? a*) a
(recur (rest a*) ((hash (first a*) t2) a))))))) a))
IRunnable
(-runnable? [_]
(or (not (lvar? t1)) (not (lvar? t2)) (= t1 t2))))))
IConstraintOp
(-rator [_] `suspc)
(-rands [_] [v1 v2])
IReifiableConstraint
(-reifyc [c v r a]
(let [t1 (walk* r (walk* a v1))
t2 (walk* r (walk* a v2))
swap (walk* r swap)]
(when (and
(not (lvar? t1))
(not (lvar? t2))
(symbol? (first swap))
(symbol? (second swap)))
`(~'swap ~swap ~t1 ~t2))))
IConstraintWatchedStores
(-watched-stores [this] #{::l/subst})))
(defn suspc [v1 v2 swap]
(cgoal (-suspc v1 v2 swap)))
;; =============================================================================
;; tie: bind a nom in a term
(declare tie)
(defrecord Tie [binding-nom body]
ITreeTerm
IUnifyTerms
(unify-terms [v u s]
(cond
(tie? u)
(if (= (:binding-nom v) (:binding-nom u))
(unify s (:body v) (:body u))
(let [[t s] (swap-noms (:body v) [(:binding-nom v) (:binding-nom u)] s)]
((composeg*
(hash (:binding-nom u) (:body v))
(== t (:body u))) s)))
:else nil))
IReifyTerm
(reify-term [v s]
(let [s (-reify* s binding-nom)]
(let [s (-reify* s body)]
s)))
IWalkTerm
(walk-term [v f]
(with-meta
(tie (walk-term (:binding-nom v) f)
(walk-term (:body v) f))
(meta v)))
IOccursCheckTerm
(occurs-check-term [v x s]
(occurs-check s x (:body v)))
IConstrainTree
(-constrain-tree [t fc s]
(fc (:body t) s))
IForceAnswerTerm
(-force-ans [v x]
(force-ans (:body v)))
INomSwap
(swap-noms [t swap s]
(let [[tbody s] (swap-noms (:body t) swap s)]
[(with-meta (tie (nom-swap (:binding-nom t) swap) tbody) (meta t)) s])))
(defn tie [binding-nom body]
(Tie. binding-nom body))
(defn tie? [x]
(instance? clojure.core.logic.nominal.Tie x))
(defmethod print-method Tie [x ^TextWriter writer] ;;; ^Writer
.write
(print-method (:binding-nom x) writer)
.write
(print-method (:body x) writer))
(defn- pprint-tie [x]
(pp/pprint-logical-block
(.Write ^TextWriter *out* "[") ;;; ^Writer .write
(pp/write-out (:binding-nom x))
(.Write ^TextWriter *out* "] ") ;;; ^Writer .write
(pp/write-out (:body x))))
(. ^clojure.lang.MultiFn pp/simple-dispatch addMethod Tie pprint-tie) ;;; Added type hint | null | https://raw.githubusercontent.com/dmiller/clr.core.logic/385e105b887e769ea934fcd127e7f520c0e101dd/src/main/clojure/clojure/core/logic/nominal.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
[java.io Writer]
=============================================================================
Nominal unification with fresh, hash and tie.
Some references / inspiration:
alphaKanren - /~webyrd/alphamk/alphamk.pdf
Nominal Unification - /~amp12/papers/nomu/nomu-jv.pdf
-research/blob/master/lib/minikanren/nominal.sls
=============================================================================
Nominal unification protocols
=============================================================================
toString
hashCode
.hashCode
equals
getClass isInstance
^Writer
=============================================================================
hash: ensure a nom is free in a term
toString
Filter constraints unrelated to reified variables.
=============================================================================
Suspensions as constraints
toString
(= t1 t2)
=============================================================================
tie: bind a nom in a term
^Writer
^Writer .write
^Writer .write
Added type hint | Copyright ( c ) , , contributors . All rights reserved .
(ns clojure.core.logic.nominal
(:refer-clojure :exclude [== hash])
(:use [clojure.core.logic.protocols]
[clojure.core.logic :exclude [fresh] :as l])
(:require [clojure.pprint :as pp])
[clojure.core.logic LVar LCons]
[clojure.core.logic.protocols IBindable ITreeTerm]))
(defprotocol INomSwap
(swap-noms [t swap s]))
(defn nom-swap [a swap]
(cond
(= a (first swap)) (second swap)
(= a (second swap)) (first swap)
:else a))
(declare suspc)
(extend-protocol INomSwap
nil
(swap-noms [t swap s] [t s])
Object
(swap-noms [t swap s] [t s])
LVar
(swap-noms [t swap s]
(let [t (walk s t)]
(if (lvar? t)
(let [v (with-meta (lvar) (meta t))
rt (root-val s t)
s (-> (if (subst-val? rt) (ext-no-check s v rt) s)
(entangle t v)
((suspc v t swap)))]
[v s])
(swap-noms t swap s))))
LCons
(swap-noms [t swap s]
(let [[tfirst s] (swap-noms (lfirst t) swap s)
[tnext s] (swap-noms (lnext t) swap s)]
[(with-meta (lcons tfirst tnext) (meta t))
s]))
clojure.lang.IPersistentCollection
(swap-noms [t swap s]
(if (seq t)
(let [[tfirst s] (swap-noms (first t) swap s)
[tnext s] (swap-noms (next t) swap s)]
[(with-meta (cons tfirst tnext) (meta t)) s])
[t s]))
clojure.lang.IPersistentVector
(swap-noms [t swap s]
(let [[ts s] (swap-noms (seq t) swap s)]
[(vec ts) s]))
clojure.lang.IPersistentMap
(swap-noms [t swap s]
(let [[tkvs s] (swap-noms (seq t) swap s)]
[(into {} tkvs) s])))
Nom
(declare nom)
(deftype Nom [lvar]
IBindable
Object
(str "<nom:" (:name lvar) ">"))
(= lvar (:lvar o))))
clojure.lang.IObj
(withMeta [this new-meta]
(nom (with-meta lvar new-meta)))
(meta [this]
(meta lvar))
clojure.lang.ILookup
(valAt [this k]
(.valAt this k nil))
(valAt [_ k not-found]
(case k
:lvar lvar
:name (:name lvar)
:oname (:oname lvar)
not-found))
IReifyTerm
(reify-term [v s]
(ext s v (symbol (str (if (-> s meta (:reify-noms true)) "a" (:oname v)) "_" (count s)))))
INomSwap
(swap-noms [t swap s]
[(nom-swap t swap) s]))
(defn nom [lvar]
(Nom. lvar))
(defn nom? [x]
(instance? clojure.core.logic.nominal.Nom x))
(defn- nom-bind [sym]
((juxt identity
(fn [s] `(nom (lvar '~s)))) sym))
(defn- nom-binds [syms]
(mapcat nom-bind syms))
(defmacro fresh
"Creates fresh noms. Goals occuring within form a logical conjunction."
[[& noms] & goals]
`(fn [a#]
(-inc
(let [~@(nom-binds noms)]
(bind* a# ~@goals)))))
.write
(declare tie? hash)
(defn- -hash [a x]
(reify
Object
(str a "#" x))
IConstraintStep
(-step [this s]
(let [a (walk s a)
x (walk s x)]
(reify
clojure.lang.IFn
(invoke [_ s]
((composeg*
(remcg this)
(fn [s]
(cond
(and (lvar? a) (lvar? x) (= x a)) nil
(and (nom? a) (nom? x) (= x a)) nil
(and (not (lvar? a)) (not (nom? a))) nil
(and (nom? a) (tie? x) (= (:binding-nom x) a)) s
(and (tree-term? x)
(or (not (tie? x)) (nom? a)))
((constrain-tree x
(fn [t s] ((hash a t) s))) s)
:else s))) s))
IRunnable
(-runnable? [_]
(if (lvar? a)
(or (and (lvar? x) (= x a))
(and (tree-term? x) (not (tie? x))))
(or (not (nom? a))
(not (lvar? x))))))))
IConstraintOp
(-rator [_] `hash)
(-rands [_] [a x])
IReifiableConstraint
(-reifyc [_ v r s]
(let [x (walk* r (walk* s x))
a (walk* r (walk* s a))]
(when (and (symbol? a) (empty? (->> (list x) flatten (filter lvar?))))
(symbol (str a "#" x)))))
IConstraintWatchedStores
(-watched-stores [this] #{::l/subst})))
(defn hash [a t]
(cgoal (-hash a t)))
(defn- -do-suspc [t1 t2 swap a]
(let [x (loop [vs #{t2} seen #{}]
(let [vs (clojure.set/difference vs seen)]
(cond
(empty? vs) true
(some #(occurs-check a % t1) vs) false
:else (recur
(reduce
(fn [s0 s1]
(clojure.set/union s0 (:eset (root-val a s1))))
#{} vs)
(clojure.set/union vs seen)))))]
(when x
(let [[t1 a] (swap-noms t1 swap a)]
((== t1 t2) a)))))
(defn -suspc [v1 v2 swap]
(reify
Object
(str "suspc" v1 v2 swap))
IConstraintStep
(-step [this a]
(let [t1 (walk a v1)
t2 (walk a v2)]
(reify
clojure.lang.IFn
(invoke [_ a]
((composeg*
(remcg this)
(fn [a]
(cond
(not (lvar? t1)) (-do-suspc t1 t2 swap a)
(not (lvar? t2)) (-do-suspc t2 t1 swap a)
(loop [a* swap a a]
(if (empty? a*) a
(recur (rest a*) ((hash (first a*) t2) a))))))) a))
IRunnable
(-runnable? [_]
(or (not (lvar? t1)) (not (lvar? t2)) (= t1 t2))))))
IConstraintOp
(-rator [_] `suspc)
(-rands [_] [v1 v2])
IReifiableConstraint
(-reifyc [c v r a]
(let [t1 (walk* r (walk* a v1))
t2 (walk* r (walk* a v2))
swap (walk* r swap)]
(when (and
(not (lvar? t1))
(not (lvar? t2))
(symbol? (first swap))
(symbol? (second swap)))
`(~'swap ~swap ~t1 ~t2))))
IConstraintWatchedStores
(-watched-stores [this] #{::l/subst})))
(defn suspc [v1 v2 swap]
(cgoal (-suspc v1 v2 swap)))
(declare tie)
(defrecord Tie [binding-nom body]
ITreeTerm
IUnifyTerms
(unify-terms [v u s]
(cond
(tie? u)
(if (= (:binding-nom v) (:binding-nom u))
(unify s (:body v) (:body u))
(let [[t s] (swap-noms (:body v) [(:binding-nom v) (:binding-nom u)] s)]
((composeg*
(hash (:binding-nom u) (:body v))
(== t (:body u))) s)))
:else nil))
IReifyTerm
(reify-term [v s]
(let [s (-reify* s binding-nom)]
(let [s (-reify* s body)]
s)))
IWalkTerm
(walk-term [v f]
(with-meta
(tie (walk-term (:binding-nom v) f)
(walk-term (:body v) f))
(meta v)))
IOccursCheckTerm
(occurs-check-term [v x s]
(occurs-check s x (:body v)))
IConstrainTree
(-constrain-tree [t fc s]
(fc (:body t) s))
IForceAnswerTerm
(-force-ans [v x]
(force-ans (:body v)))
INomSwap
(swap-noms [t swap s]
(let [[tbody s] (swap-noms (:body t) swap s)]
[(with-meta (tie (nom-swap (:binding-nom t) swap) tbody) (meta t)) s])))
(defn tie [binding-nom body]
(Tie. binding-nom body))
(defn tie? [x]
(instance? clojure.core.logic.nominal.Tie x))
.write
(print-method (:binding-nom x) writer)
.write
(print-method (:body x) writer))
(defn- pprint-tie [x]
(pp/pprint-logical-block
(pp/write-out (:binding-nom x))
(pp/write-out (:body x))))
|
300da33bc38592a37bfcfdbe288722efe03e7171322f826d3aa2914cd734c9b9 | SumitPadhiyar/confuzz | lwt_process.ml | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Lwt.Infix
type command = string * string array
let shell =
if Sys.win32 then
fun cmd -> ("", [|"cmd.exe"; "/c"; "\000" ^ cmd|])
else
fun cmd -> ("", [|"/bin/sh"; "-c"; cmd|])
type redirection =
[ `Keep
| `Dev_null
| `Close
| `FD_copy of Unix.file_descr
| `FD_move of Unix.file_descr ]
(* +-----------------------------------------------------------------+
| OS-depentent command spawning |
+-----------------------------------------------------------------+ *)
type proc = {
id : int;
(* The process id. *)
fd : Unix.file_descr;
(* A handle on windows, and a dummy value of Unix. *)
}
let win32_get_fd fd redirection =
match redirection with
| `Keep ->
Some fd
| `Dev_null ->
Some (Unix.openfile "nul" [Unix.O_RDWR] 0o666)
| `Close ->
None
| `FD_copy fd' ->
Some fd'
| `FD_move fd' ->
Some fd'
external win32_create_process :
string option -> string -> string option ->
(Unix.file_descr option * Unix.file_descr option * Unix.file_descr option) ->
proc = "lwt_process_create_process"
let win32_quote arg =
if String.length arg > 0 && arg.[0] = '\000' then
String.sub arg 1 (String.length arg - 1)
else
Filename.quote arg
let win32_spawn
(prog, args) env
?(stdin:redirection=`Keep)
?(stdout:redirection=`Keep)
?(stderr:redirection=`Keep)
toclose =
let cmdline = String.concat " " (List.map win32_quote (Array.to_list args)) in
let env =
match env with
| None ->
None
| Some env ->
let len =
Array.fold_left (fun len str -> String.length str + len + 1) 1 env in
let res = Bytes.create len in
let ofs =
Array.fold_left
(fun ofs str ->
let len = String.length str in
String.blit str 0 res ofs len;
Bytes.set res (ofs + len) '\000';
ofs + len + 1)
0 env
in
Bytes.set res ofs '\000';
Some (Bytes.unsafe_to_string res)
in
List.iter Unix.set_close_on_exec toclose;
let stdin_fd = win32_get_fd Unix.stdin stdin
and stdout_fd = win32_get_fd Unix.stdout stdout
and stderr_fd = win32_get_fd Unix.stderr stderr in
let proc =
win32_create_process
(if prog = "" then None else Some prog) cmdline env
(stdin_fd, stdout_fd, stderr_fd)
in
let close = function
| `FD_move fd ->
Unix.close fd
| _ ->
()
in
close stdin;
close stdout;
close stderr;
proc
external win32_wait_job : Unix.file_descr -> int Lwt_unix.job =
"lwt_process_wait_job"
let win32_waitproc proc =
Lwt_unix.run_job (win32_wait_job proc.fd) >>= fun code ->
Lwt.return
(proc.id,
Lwt_unix.WEXITED code,
{Lwt_unix.ru_utime = 0.; Lwt_unix.ru_stime = 0.})
external win32_terminate_process : Unix.file_descr -> int -> unit =
"lwt_process_terminate_process"
let win32_terminate proc =
win32_terminate_process proc.fd 1
let unix_redirect fd redirection = match redirection with
| `Keep ->
()
| `Dev_null ->
Unix.close fd;
let dev_null = Unix.openfile "/dev/null" [Unix.O_RDWR] 0o666 in
if fd <> dev_null then begin
Unix.dup2 dev_null fd;
Unix.close dev_null
end
| `Close ->
Unix.close fd
| `FD_copy fd' ->
Unix.dup2 fd' fd
| `FD_move fd' ->
Unix.dup2 fd' fd;
Unix.close fd'
external sys_exit : int -> 'a = "caml_sys_exit"
let unix_spawn
(prog, args) env
?(stdin:redirection=`Keep)
?(stdout:redirection=`Keep)
?(stderr:redirection=`Keep)
toclose =
let prog = if prog = "" && Array.length args > 0 then args.(0) else prog in
match Lwt_unix.fork () with
| 0 ->
unix_redirect Unix.stdin stdin;
unix_redirect Unix.stdout stdout;
unix_redirect Unix.stderr stderr;
List.iter Unix.close toclose;
begin
try
match env with
| None ->
Unix.execvp prog args
| Some env ->
Unix.execvpe prog args env
with _ ->
(* Do not run at_exit hooks *)
sys_exit 127
end
| id ->
let close = function
| `FD_move fd ->
Unix.close fd
| _ ->
()
in
close stdin;
close stdout;
close stderr;
{id; fd = Unix.stdin}
let unix_waitproc proc = Lwt_unix.wait4 [] proc.id
let unix_terminate proc =
Unix.kill proc.id Sys.sigkill
let spawn = if Sys.win32 then win32_spawn else unix_spawn
let waitproc = if Sys.win32 then win32_waitproc else unix_waitproc
let terminate = if Sys.win32 then win32_terminate else unix_terminate
(* +-----------------------------------------------------------------+
| Objects |
+-----------------------------------------------------------------+ *)
type state =
| Running
| Exited of Unix.process_status
let status (_pid, status, _rusage) = status
let rusage (_pid, _status, rusage) = rusage
external cast_chan : 'a Lwt_io.channel -> unit Lwt_io.channel = "%identity"
Transform a channel into a channel that only support closing .
let ignore_close chan = ignore (Lwt_io.close chan)
class virtual common timeout proc channels =
let wait = waitproc proc in
object(self)
val mutable closed = false
method pid = proc.id
method state =
match Lwt.poll wait with
| None -> Running
| Some (_pid, status, _rusage) -> Exited status
method kill signum =
if Lwt.state wait = Lwt.Sleep then
Unix.kill proc.id signum
method terminate =
if Lwt.state wait = Lwt.Sleep then
terminate proc
method close =
if closed then self#status
else (
closed <- true;
Lwt.protected (Lwt.join (List.map Lwt_io.close channels))
>>= fun () -> self#status
)
method status = Lwt.protected wait >|= status
method rusage = Lwt.protected wait >|= rusage
initializer
(* Ensure channels are closed when no longer used. *)
List.iter (Gc.finalise ignore_close) channels;
(* Handle timeout. *)
match timeout with
| None ->
()
| Some dt ->
ignore (
(* Ignore errors since they can be obtained by
self#close. *)
Lwt.try_bind
(fun () ->
Lwt.choose [(Lwt_unix.sleep dt >>= fun () -> Lwt.return_false);
(wait >>= fun _ -> Lwt.return_true)])
(function
| true ->
Lwt.return_unit
| false ->
self#terminate;
self#close >>= fun _ -> Lwt.return_unit)
(fun _ ->
(* The exception is dropped because it can be
obtained with self#close. *)
Lwt.return_unit)
)
end
class process_none ?timeout ?env ?stdin ?stdout ?stderr cmd =
let proc = spawn cmd env ?stdin ?stdout ?stderr [] in
object
inherit common timeout proc []
end
class process_in ?timeout ?env ?stdin ?stderr cmd =
let stdout_r, stdout_w = Unix.pipe () in
let proc =
spawn cmd env ?stdin ~stdout:(`FD_move stdout_w) ?stderr [stdout_r] in
let stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r in
object
inherit common timeout proc [cast_chan stdout]
method stdout = stdout
end
class process_out ?timeout ?env ?stdout ?stderr cmd =
let stdin_r, stdin_w = Unix.pipe () in
let proc =
spawn cmd env ~stdin:(`FD_move stdin_r) ?stdout ?stderr [stdin_w] in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w in
object
inherit common timeout proc [cast_chan stdin]
method stdin = stdin
end
class process ?timeout ?env ?stderr cmd =
let stdin_r, stdin_w = Unix.pipe ()
and stdout_r, stdout_w = Unix.pipe () in
let proc =
spawn
cmd env ~stdin:(`FD_move stdin_r) ~stdout:(`FD_move stdout_w) ?stderr
[stdin_w; stdout_r]
in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w
and stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r in
object
inherit common timeout proc [cast_chan stdin; cast_chan stdout]
method stdin = stdin
method stdout = stdout
end
class process_full ?timeout ?env cmd =
let stdin_r, stdin_w = Unix.pipe ()
and stdout_r, stdout_w = Unix.pipe ()
and stderr_r, stderr_w = Unix.pipe () in
let proc =
spawn
cmd env
~stdin:(`FD_move stdin_r)
~stdout:(`FD_move stdout_w)
~stderr:(`FD_move stderr_w)
[stdin_w; stdout_r; stderr_r]
in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w
and stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r
and stderr = Lwt_io.of_unix_fd ~mode:Lwt_io.input stderr_r in
object
inherit
common timeout proc [cast_chan stdin; cast_chan stdout; cast_chan stderr]
method stdin = stdin
method stdout = stdout
method stderr = stderr
end
let open_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd =
new process_none ?timeout ?env ?stdin ?stdout ?stderr cmd
let open_process_in ?timeout ?env ?stdin ?stderr cmd =
new process_in ?timeout ?env ?stdin ?stderr cmd
let open_process_out ?timeout ?env ?stdout ?stderr cmd =
new process_out ?timeout ?env ?stdout ?stderr cmd
let open_process ?timeout ?env ?stderr cmd =
new process ?timeout ?env ?stderr cmd
let open_process_full ?timeout ?env cmd =
new process_full ?timeout ?env cmd
let make_with backend ?timeout ?env cmd f =
let process = backend ?timeout ?env cmd in
Lwt.finalize
(fun () -> f process)
(fun () ->
process#close >>= fun _ ->
Lwt.return_unit)
let with_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd f =
make_with (open_process_none ?stdin ?stdout ?stderr) ?timeout ?env cmd f
let with_process_in ?timeout ?env ?stdin ?stderr cmd f =
make_with (open_process_in ?stdin ?stderr) ?timeout ?env cmd f
let with_process_out ?timeout ?env ?stdout ?stderr cmd f =
make_with (open_process_out ?stdout ?stderr) ?timeout ?env cmd f
let with_process ?timeout ?env ?stderr cmd f =
make_with (open_process ?stderr) ?timeout ?env cmd f
let with_process_full ?timeout ?env cmd f =
make_with open_process_full ?timeout ?env cmd f
(* +-----------------------------------------------------------------+
| High-level functions |
+-----------------------------------------------------------------+ *)
let exec ?timeout ?env ?stdin ?stdout ?stderr cmd =
(open_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd)#close
let ignore_close ch =
ignore (Lwt_io.close ch)
let read_opt read ic =
Lwt.catch
(fun () -> read ic >|= fun x -> Some x)
(function
| Unix.Unix_error (Unix.EPIPE, _, _) | End_of_file ->
Lwt.return_none
| exn -> Lwt.fail exn) [@ocaml.warning "-4"]
let recv_chars pr =
let ic = pr#stdout in
Gc.finalise ignore_close ic;
Lwt_stream.from (fun _ ->
read_opt Lwt_io.read_char ic >>= fun x ->
if x = None then begin
Lwt_io.close ic >>= fun () ->
Lwt.return x
end else
Lwt.return x)
let recv_lines pr =
let ic = pr#stdout in
Gc.finalise ignore_close ic;
Lwt_stream.from (fun _ ->
read_opt Lwt_io.read_line ic >>= fun x ->
if x = None then begin
Lwt_io.close ic >>= fun () ->
Lwt.return x
end else
Lwt.return x)
let recv pr =
let ic = pr#stdout in
Lwt.finalize
(fun () -> Lwt_io.read ic)
(fun () -> Lwt_io.close ic)
let recv_line pr =
let ic = pr#stdout in
Lwt.finalize
(fun () -> Lwt_io.read_line ic)
(fun () -> Lwt_io.close ic)
let send f pr data =
let oc = pr#stdin in
Lwt.finalize
(fun () -> f oc data)
(fun () -> Lwt_io.close oc)
(* Receiving *)
let pread ?timeout ?env ?stdin ?stderr cmd =
recv (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_chars ?timeout ?env ?stdin ?stderr cmd =
recv_chars (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_line ?timeout ?env ?stdin ?stderr cmd =
recv_line (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_lines ?timeout ?env ?stdin ?stderr cmd =
recv_lines (open_process_in ?timeout ?env ?stdin ?stderr cmd)
(* Sending *)
let pwrite ?timeout ?env ?stdout ?stderr cmd text =
send Lwt_io.write (open_process_out ?timeout ?env ?stdout ?stderr cmd) text
let pwrite_chars ?timeout ?env ?stdout ?stderr cmd chars =
send
Lwt_io.write_chars
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
chars
let pwrite_line ?timeout ?env ?stdout ?stderr cmd line =
send
Lwt_io.write_line
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
line
let pwrite_lines ?timeout ?env ?stdout ?stderr cmd lines =
send
Lwt_io.write_lines
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
lines
(* Mapping *)
type 'a map_state =
| Init
| Save of 'a option Lwt.t
| Done
(* Monitor the thread [sender] in the stream [st] so write errors are
reported. *)
let monitor sender st =
let sender = sender >|= fun () -> None in
let state = ref Init in
Lwt_stream.from
(fun () ->
match !state with
| Init ->
let getter = Lwt.apply Lwt_stream.get st in
let result _ =
match Lwt.state sender with
| Lwt.Sleep ->
(* The sender is still sleeping, behave as the
getter. *)
getter
| Lwt.Return _ ->
(* The sender terminated successfully, we are
done monitoring it. *)
state := Done;
getter
| Lwt.Fail _ ->
(* The sender failed, behave as the sender for
this element and save current getter. *)
state := Save getter;
sender
in
Lwt.try_bind (fun () -> Lwt.choose [sender; getter]) result result
| Save t ->
state := Done;
t
| Done ->
Lwt_stream.get st)
let pmap ?timeout ?env ?stderr cmd text =
let pr = open_process ?timeout ?env ?stderr cmd in
(* Start the sender and getter at the same time. *)
let sender = send Lwt_io.write pr text in
let getter = recv pr in
Lwt.catch
(fun () ->
(* Wait for both to terminate, returning the result of the
getter. *)
sender >>= fun () -> getter)
(function
| Lwt.Canceled as exn ->
(* Cancel the getter if the sender was canceled. *)
Lwt.cancel getter;
Lwt.fail exn
| exn -> Lwt.fail exn)
let pmap_chars ?timeout ?env ?stderr cmd chars =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write_chars pr chars in
monitor sender (recv_chars pr)
let pmap_line ?timeout ?env ?stderr cmd line =
let pr = open_process ?timeout ?env ?stderr cmd in
(* Start the sender and getter at the same time. *)
let sender = send Lwt_io.write_line pr line in
let getter = recv_line pr in
Lwt.catch
(fun () ->
(* Wait for both to terminate, returning the result of the
getter. *)
sender >>= fun () -> getter)
(function
| Lwt.Canceled as exn ->
(* Cancel the getter if the sender was canceled. *)
Lwt.cancel getter;
Lwt.fail exn
| exn -> Lwt.fail exn)
let pmap_lines ?timeout ?env ?stderr cmd lines =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write_lines pr lines in
monitor sender (recv_lines pr)
| null | https://raw.githubusercontent.com/SumitPadhiyar/confuzz/7d6b2af51d7135025f9ed1e013a9ae0940f3663e/src/unix/lwt_process.ml | ocaml | +-----------------------------------------------------------------+
| OS-depentent command spawning |
+-----------------------------------------------------------------+
The process id.
A handle on windows, and a dummy value of Unix.
Do not run at_exit hooks
+-----------------------------------------------------------------+
| Objects |
+-----------------------------------------------------------------+
Ensure channels are closed when no longer used.
Handle timeout.
Ignore errors since they can be obtained by
self#close.
The exception is dropped because it can be
obtained with self#close.
+-----------------------------------------------------------------+
| High-level functions |
+-----------------------------------------------------------------+
Receiving
Sending
Mapping
Monitor the thread [sender] in the stream [st] so write errors are
reported.
The sender is still sleeping, behave as the
getter.
The sender terminated successfully, we are
done monitoring it.
The sender failed, behave as the sender for
this element and save current getter.
Start the sender and getter at the same time.
Wait for both to terminate, returning the result of the
getter.
Cancel the getter if the sender was canceled.
Start the sender and getter at the same time.
Wait for both to terminate, returning the result of the
getter.
Cancel the getter if the sender was canceled. | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Lwt.Infix
type command = string * string array
let shell =
if Sys.win32 then
fun cmd -> ("", [|"cmd.exe"; "/c"; "\000" ^ cmd|])
else
fun cmd -> ("", [|"/bin/sh"; "-c"; cmd|])
type redirection =
[ `Keep
| `Dev_null
| `Close
| `FD_copy of Unix.file_descr
| `FD_move of Unix.file_descr ]
type proc = {
id : int;
fd : Unix.file_descr;
}
let win32_get_fd fd redirection =
match redirection with
| `Keep ->
Some fd
| `Dev_null ->
Some (Unix.openfile "nul" [Unix.O_RDWR] 0o666)
| `Close ->
None
| `FD_copy fd' ->
Some fd'
| `FD_move fd' ->
Some fd'
external win32_create_process :
string option -> string -> string option ->
(Unix.file_descr option * Unix.file_descr option * Unix.file_descr option) ->
proc = "lwt_process_create_process"
let win32_quote arg =
if String.length arg > 0 && arg.[0] = '\000' then
String.sub arg 1 (String.length arg - 1)
else
Filename.quote arg
let win32_spawn
(prog, args) env
?(stdin:redirection=`Keep)
?(stdout:redirection=`Keep)
?(stderr:redirection=`Keep)
toclose =
let cmdline = String.concat " " (List.map win32_quote (Array.to_list args)) in
let env =
match env with
| None ->
None
| Some env ->
let len =
Array.fold_left (fun len str -> String.length str + len + 1) 1 env in
let res = Bytes.create len in
let ofs =
Array.fold_left
(fun ofs str ->
let len = String.length str in
String.blit str 0 res ofs len;
Bytes.set res (ofs + len) '\000';
ofs + len + 1)
0 env
in
Bytes.set res ofs '\000';
Some (Bytes.unsafe_to_string res)
in
List.iter Unix.set_close_on_exec toclose;
let stdin_fd = win32_get_fd Unix.stdin stdin
and stdout_fd = win32_get_fd Unix.stdout stdout
and stderr_fd = win32_get_fd Unix.stderr stderr in
let proc =
win32_create_process
(if prog = "" then None else Some prog) cmdline env
(stdin_fd, stdout_fd, stderr_fd)
in
let close = function
| `FD_move fd ->
Unix.close fd
| _ ->
()
in
close stdin;
close stdout;
close stderr;
proc
external win32_wait_job : Unix.file_descr -> int Lwt_unix.job =
"lwt_process_wait_job"
let win32_waitproc proc =
Lwt_unix.run_job (win32_wait_job proc.fd) >>= fun code ->
Lwt.return
(proc.id,
Lwt_unix.WEXITED code,
{Lwt_unix.ru_utime = 0.; Lwt_unix.ru_stime = 0.})
external win32_terminate_process : Unix.file_descr -> int -> unit =
"lwt_process_terminate_process"
let win32_terminate proc =
win32_terminate_process proc.fd 1
let unix_redirect fd redirection = match redirection with
| `Keep ->
()
| `Dev_null ->
Unix.close fd;
let dev_null = Unix.openfile "/dev/null" [Unix.O_RDWR] 0o666 in
if fd <> dev_null then begin
Unix.dup2 dev_null fd;
Unix.close dev_null
end
| `Close ->
Unix.close fd
| `FD_copy fd' ->
Unix.dup2 fd' fd
| `FD_move fd' ->
Unix.dup2 fd' fd;
Unix.close fd'
external sys_exit : int -> 'a = "caml_sys_exit"
let unix_spawn
(prog, args) env
?(stdin:redirection=`Keep)
?(stdout:redirection=`Keep)
?(stderr:redirection=`Keep)
toclose =
let prog = if prog = "" && Array.length args > 0 then args.(0) else prog in
match Lwt_unix.fork () with
| 0 ->
unix_redirect Unix.stdin stdin;
unix_redirect Unix.stdout stdout;
unix_redirect Unix.stderr stderr;
List.iter Unix.close toclose;
begin
try
match env with
| None ->
Unix.execvp prog args
| Some env ->
Unix.execvpe prog args env
with _ ->
sys_exit 127
end
| id ->
let close = function
| `FD_move fd ->
Unix.close fd
| _ ->
()
in
close stdin;
close stdout;
close stderr;
{id; fd = Unix.stdin}
let unix_waitproc proc = Lwt_unix.wait4 [] proc.id
let unix_terminate proc =
Unix.kill proc.id Sys.sigkill
let spawn = if Sys.win32 then win32_spawn else unix_spawn
let waitproc = if Sys.win32 then win32_waitproc else unix_waitproc
let terminate = if Sys.win32 then win32_terminate else unix_terminate
type state =
| Running
| Exited of Unix.process_status
let status (_pid, status, _rusage) = status
let rusage (_pid, _status, rusage) = rusage
external cast_chan : 'a Lwt_io.channel -> unit Lwt_io.channel = "%identity"
Transform a channel into a channel that only support closing .
let ignore_close chan = ignore (Lwt_io.close chan)
class virtual common timeout proc channels =
let wait = waitproc proc in
object(self)
val mutable closed = false
method pid = proc.id
method state =
match Lwt.poll wait with
| None -> Running
| Some (_pid, status, _rusage) -> Exited status
method kill signum =
if Lwt.state wait = Lwt.Sleep then
Unix.kill proc.id signum
method terminate =
if Lwt.state wait = Lwt.Sleep then
terminate proc
method close =
if closed then self#status
else (
closed <- true;
Lwt.protected (Lwt.join (List.map Lwt_io.close channels))
>>= fun () -> self#status
)
method status = Lwt.protected wait >|= status
method rusage = Lwt.protected wait >|= rusage
initializer
List.iter (Gc.finalise ignore_close) channels;
match timeout with
| None ->
()
| Some dt ->
ignore (
Lwt.try_bind
(fun () ->
Lwt.choose [(Lwt_unix.sleep dt >>= fun () -> Lwt.return_false);
(wait >>= fun _ -> Lwt.return_true)])
(function
| true ->
Lwt.return_unit
| false ->
self#terminate;
self#close >>= fun _ -> Lwt.return_unit)
(fun _ ->
Lwt.return_unit)
)
end
class process_none ?timeout ?env ?stdin ?stdout ?stderr cmd =
let proc = spawn cmd env ?stdin ?stdout ?stderr [] in
object
inherit common timeout proc []
end
class process_in ?timeout ?env ?stdin ?stderr cmd =
let stdout_r, stdout_w = Unix.pipe () in
let proc =
spawn cmd env ?stdin ~stdout:(`FD_move stdout_w) ?stderr [stdout_r] in
let stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r in
object
inherit common timeout proc [cast_chan stdout]
method stdout = stdout
end
class process_out ?timeout ?env ?stdout ?stderr cmd =
let stdin_r, stdin_w = Unix.pipe () in
let proc =
spawn cmd env ~stdin:(`FD_move stdin_r) ?stdout ?stderr [stdin_w] in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w in
object
inherit common timeout proc [cast_chan stdin]
method stdin = stdin
end
class process ?timeout ?env ?stderr cmd =
let stdin_r, stdin_w = Unix.pipe ()
and stdout_r, stdout_w = Unix.pipe () in
let proc =
spawn
cmd env ~stdin:(`FD_move stdin_r) ~stdout:(`FD_move stdout_w) ?stderr
[stdin_w; stdout_r]
in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w
and stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r in
object
inherit common timeout proc [cast_chan stdin; cast_chan stdout]
method stdin = stdin
method stdout = stdout
end
class process_full ?timeout ?env cmd =
let stdin_r, stdin_w = Unix.pipe ()
and stdout_r, stdout_w = Unix.pipe ()
and stderr_r, stderr_w = Unix.pipe () in
let proc =
spawn
cmd env
~stdin:(`FD_move stdin_r)
~stdout:(`FD_move stdout_w)
~stderr:(`FD_move stderr_w)
[stdin_w; stdout_r; stderr_r]
in
let stdin = Lwt_io.of_unix_fd ~mode:Lwt_io.output stdin_w
and stdout = Lwt_io.of_unix_fd ~mode:Lwt_io.input stdout_r
and stderr = Lwt_io.of_unix_fd ~mode:Lwt_io.input stderr_r in
object
inherit
common timeout proc [cast_chan stdin; cast_chan stdout; cast_chan stderr]
method stdin = stdin
method stdout = stdout
method stderr = stderr
end
let open_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd =
new process_none ?timeout ?env ?stdin ?stdout ?stderr cmd
let open_process_in ?timeout ?env ?stdin ?stderr cmd =
new process_in ?timeout ?env ?stdin ?stderr cmd
let open_process_out ?timeout ?env ?stdout ?stderr cmd =
new process_out ?timeout ?env ?stdout ?stderr cmd
let open_process ?timeout ?env ?stderr cmd =
new process ?timeout ?env ?stderr cmd
let open_process_full ?timeout ?env cmd =
new process_full ?timeout ?env cmd
let make_with backend ?timeout ?env cmd f =
let process = backend ?timeout ?env cmd in
Lwt.finalize
(fun () -> f process)
(fun () ->
process#close >>= fun _ ->
Lwt.return_unit)
let with_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd f =
make_with (open_process_none ?stdin ?stdout ?stderr) ?timeout ?env cmd f
let with_process_in ?timeout ?env ?stdin ?stderr cmd f =
make_with (open_process_in ?stdin ?stderr) ?timeout ?env cmd f
let with_process_out ?timeout ?env ?stdout ?stderr cmd f =
make_with (open_process_out ?stdout ?stderr) ?timeout ?env cmd f
let with_process ?timeout ?env ?stderr cmd f =
make_with (open_process ?stderr) ?timeout ?env cmd f
let with_process_full ?timeout ?env cmd f =
make_with open_process_full ?timeout ?env cmd f
let exec ?timeout ?env ?stdin ?stdout ?stderr cmd =
(open_process_none ?timeout ?env ?stdin ?stdout ?stderr cmd)#close
let ignore_close ch =
ignore (Lwt_io.close ch)
let read_opt read ic =
Lwt.catch
(fun () -> read ic >|= fun x -> Some x)
(function
| Unix.Unix_error (Unix.EPIPE, _, _) | End_of_file ->
Lwt.return_none
| exn -> Lwt.fail exn) [@ocaml.warning "-4"]
let recv_chars pr =
let ic = pr#stdout in
Gc.finalise ignore_close ic;
Lwt_stream.from (fun _ ->
read_opt Lwt_io.read_char ic >>= fun x ->
if x = None then begin
Lwt_io.close ic >>= fun () ->
Lwt.return x
end else
Lwt.return x)
let recv_lines pr =
let ic = pr#stdout in
Gc.finalise ignore_close ic;
Lwt_stream.from (fun _ ->
read_opt Lwt_io.read_line ic >>= fun x ->
if x = None then begin
Lwt_io.close ic >>= fun () ->
Lwt.return x
end else
Lwt.return x)
let recv pr =
let ic = pr#stdout in
Lwt.finalize
(fun () -> Lwt_io.read ic)
(fun () -> Lwt_io.close ic)
let recv_line pr =
let ic = pr#stdout in
Lwt.finalize
(fun () -> Lwt_io.read_line ic)
(fun () -> Lwt_io.close ic)
let send f pr data =
let oc = pr#stdin in
Lwt.finalize
(fun () -> f oc data)
(fun () -> Lwt_io.close oc)
let pread ?timeout ?env ?stdin ?stderr cmd =
recv (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_chars ?timeout ?env ?stdin ?stderr cmd =
recv_chars (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_line ?timeout ?env ?stdin ?stderr cmd =
recv_line (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pread_lines ?timeout ?env ?stdin ?stderr cmd =
recv_lines (open_process_in ?timeout ?env ?stdin ?stderr cmd)
let pwrite ?timeout ?env ?stdout ?stderr cmd text =
send Lwt_io.write (open_process_out ?timeout ?env ?stdout ?stderr cmd) text
let pwrite_chars ?timeout ?env ?stdout ?stderr cmd chars =
send
Lwt_io.write_chars
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
chars
let pwrite_line ?timeout ?env ?stdout ?stderr cmd line =
send
Lwt_io.write_line
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
line
let pwrite_lines ?timeout ?env ?stdout ?stderr cmd lines =
send
Lwt_io.write_lines
(open_process_out ?timeout ?env ?stdout ?stderr cmd)
lines
type 'a map_state =
| Init
| Save of 'a option Lwt.t
| Done
let monitor sender st =
let sender = sender >|= fun () -> None in
let state = ref Init in
Lwt_stream.from
(fun () ->
match !state with
| Init ->
let getter = Lwt.apply Lwt_stream.get st in
let result _ =
match Lwt.state sender with
| Lwt.Sleep ->
getter
| Lwt.Return _ ->
state := Done;
getter
| Lwt.Fail _ ->
state := Save getter;
sender
in
Lwt.try_bind (fun () -> Lwt.choose [sender; getter]) result result
| Save t ->
state := Done;
t
| Done ->
Lwt_stream.get st)
let pmap ?timeout ?env ?stderr cmd text =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write pr text in
let getter = recv pr in
Lwt.catch
(fun () ->
sender >>= fun () -> getter)
(function
| Lwt.Canceled as exn ->
Lwt.cancel getter;
Lwt.fail exn
| exn -> Lwt.fail exn)
let pmap_chars ?timeout ?env ?stderr cmd chars =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write_chars pr chars in
monitor sender (recv_chars pr)
let pmap_line ?timeout ?env ?stderr cmd line =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write_line pr line in
let getter = recv_line pr in
Lwt.catch
(fun () ->
sender >>= fun () -> getter)
(function
| Lwt.Canceled as exn ->
Lwt.cancel getter;
Lwt.fail exn
| exn -> Lwt.fail exn)
let pmap_lines ?timeout ?env ?stderr cmd lines =
let pr = open_process ?timeout ?env ?stderr cmd in
let sender = send Lwt_io.write_lines pr lines in
monitor sender (recv_lines pr)
|
1d1492788b541e320c7a6cee5c9c231d0247de4bf188e60505c1297bfb3e8e50 | typedclojure/typedclojure | check_form.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.cljs.checker.check-form
(:require [cljs.compiler :as comp]
[cljs.env :as env]
[clojure.core.typed.ast-utils :as ast-u]
[clojure.core.typed.current-impl :as impl]
[typed.cljc.checker.check-form :as chk-form2]
TODO untested
[typed.cljs.analyzer :as ana]
[typed.cljs.checker.check :as chk-cljs]
[typed.cljs.checker.util :as ucljs]))
(defn config-map2 []
{:impl impl/clojurescript
:check-top-level chk-cljs/check-top-level
:unparse-ns (ucljs/cljs-ns)
;:runtime-check-expr rt-chk/runtime-check-expr
;:runtime-infer-expr (fn [& args]
; (apply @runtime-infer-expr args))
:eval-out-ast (fn eval-out-ast
([ast] (eval-out-ast ast {}))
([ast opts] (assert nil "TODO eval cljs") nil #_(ana-clj/eval-ast ast opts)))
:custom-expansions? true
:emit-form ast-u/emit-form-fn
:check-form-info chk-form2/check-form-info
:check-form* chk-form2/check-form*
})
(defn maybe-with-analyzer-bindings [{:keys [skip-cljs-analyzer-bindings] :as _opt} f]
(if skip-cljs-analyzer-bindings
(f)
(ucljs/with-analyzer-bindings* (ucljs/cljs-ns) "NO_FILE" f)))
(defn check-form-info
[form & {:as opt}]
(with-bindings (ana/default-thread-bindings)
(maybe-with-analyzer-bindings opt
(fn []
(chk-form2/check-form-info-with-config
(config-map2) form opt)))))
(defn check-form
"Check a single form with an optional expected type.
Intended to be called from Clojure. For evaluation at the ClojureScript
REPL see cf."
[form expected expected-provided? opt]
(with-bindings (ana/default-thread-bindings)
(maybe-with-analyzer-bindings opt
(fn []
(ucljs/with-cljs-typed-env
(comp/with-core-cljs
nil
#(chk-form2/check-form*-with-config
(config-map2) form expected expected-provided? opt)))))))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/b67795220d68f4f8282485dfa6fb3f6444d7b1ae/typed/cljs.checker/src/typed/cljs/checker/check_form.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
:runtime-check-expr rt-chk/runtime-check-expr
:runtime-infer-expr (fn [& args]
(apply @runtime-infer-expr args)) | Copyright ( c ) , contributors .
(ns typed.cljs.checker.check-form
(:require [cljs.compiler :as comp]
[cljs.env :as env]
[clojure.core.typed.ast-utils :as ast-u]
[clojure.core.typed.current-impl :as impl]
[typed.cljc.checker.check-form :as chk-form2]
TODO untested
[typed.cljs.analyzer :as ana]
[typed.cljs.checker.check :as chk-cljs]
[typed.cljs.checker.util :as ucljs]))
(defn config-map2 []
{:impl impl/clojurescript
:check-top-level chk-cljs/check-top-level
:unparse-ns (ucljs/cljs-ns)
:eval-out-ast (fn eval-out-ast
([ast] (eval-out-ast ast {}))
([ast opts] (assert nil "TODO eval cljs") nil #_(ana-clj/eval-ast ast opts)))
:custom-expansions? true
:emit-form ast-u/emit-form-fn
:check-form-info chk-form2/check-form-info
:check-form* chk-form2/check-form*
})
(defn maybe-with-analyzer-bindings [{:keys [skip-cljs-analyzer-bindings] :as _opt} f]
(if skip-cljs-analyzer-bindings
(f)
(ucljs/with-analyzer-bindings* (ucljs/cljs-ns) "NO_FILE" f)))
(defn check-form-info
[form & {:as opt}]
(with-bindings (ana/default-thread-bindings)
(maybe-with-analyzer-bindings opt
(fn []
(chk-form2/check-form-info-with-config
(config-map2) form opt)))))
(defn check-form
"Check a single form with an optional expected type.
Intended to be called from Clojure. For evaluation at the ClojureScript
REPL see cf."
[form expected expected-provided? opt]
(with-bindings (ana/default-thread-bindings)
(maybe-with-analyzer-bindings opt
(fn []
(ucljs/with-cljs-typed-env
(comp/with-core-cljs
nil
#(chk-form2/check-form*-with-config
(config-map2) form expected expected-provided? opt)))))))
|
f078203127fe2490a97d117141519a18ae7cf04fb85827eae7c2fe70b1694feb | jrh13/hol-light | gcdrecurrence.ml | (* ========================================================================= *)
(* Some divisibility properties of certain linear integer recurrences. *)
(* ========================================================================= *)
needs "Library/prime.ml";;
needs "Library/integer.ml";;
prioritize_int();;
(* ------------------------------------------------------------------------- *)
(* A customized induction principle. *)
(* ------------------------------------------------------------------------- *)
let INDUCT_SPECIAL = prove
(`!P. (!n. P 0 n) /\
(!m n. P m n <=> P n m) /\
(!m n. P m n ==> P n (m + n))
==> !m n. P m n`,
GEN_TAC THEN STRIP_TAC THEN
REPEAT GEN_TAC THEN WF_INDUCT_TAC `m + n:num` THEN
ASM_CASES_TAC `m = 0` THENL [ASM_MESON_TAC[]; ALL_TAC] THEN
ASM_CASES_TAC `n = 0` THENL [ASM_MESON_TAC[]; ALL_TAC] THEN
DISJ_CASES_THEN MP_TAC (ARITH_RULE `m <= n:num \/ n <= m`) THEN
REWRITE_TAC[LE_EXISTS] THEN ONCE_REWRITE_TAC[ADD_SYM] THEN
DISCH_THEN(X_CHOOSE_THEN `p:num` SUBST_ALL_TAC) THENL
[ALL_TAC; ASM (GEN_REWRITE_TAC I) []] THEN
MATCH_MP_TAC(ASSUME `!m n:num. P m n ==> P n (m + n)`) THEN
FIRST_X_ASSUM MATCH_MP_TAC THEN ASM_ARITH_TAC);;
(* ------------------------------------------------------------------------- *)
(* The main results; to literally apply integer gcd we need nonnegativity. *)
(* ------------------------------------------------------------------------- *)
let INT_DIVISORS_RECURRENCE = prove
(`!G a b. G(0) = &0 /\ G(1) = &1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !d m n. d divides (G m) /\ d divides (G n) <=>
d divides G(gcd(m,n))`,
REPEAT GEN_TAC THEN STRIP_TAC THEN
SUBGOAL_THEN `!n. coprime(G(n + 1),b)` ASSUME_TAC THENL
[INDUCT_TAC THEN ASM_REWRITE_TAC[ARITH; ARITH_RULE `SUC n + 1 = n + 2`] THEN
REPEAT(POP_ASSUM MP_TAC) THEN NUMBER_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!n. coprime(G(n + 1),G n)` ASSUME_TAC THENL
[INDUCT_TAC THENL [ASM_REWRITE_TAC[ARITH] THEN NUMBER_TAC; ALL_TAC] THEN
REPEAT(FIRST_X_ASSUM(ASSUME_TAC o SPEC `n:num`)) THEN
ASM_REWRITE_TAC[ADD1; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
REPEAT(POP_ASSUM MP_TAC) THEN INTEGER_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!m p. G(m + 1 + p) = G(m + 1) * G(p + 1) + b * G(m) * G(p)`
ASSUME_TAC THENL
[INDUCT_TAC THENL
[ASM_REWRITE_TAC[ADD_CLAUSES; ADD_AC] THEN INTEGER_TAC; ALL_TAC] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC m + 1 + p = (m + p) + 2`] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC m + 1 = m + 2`] THEN
ASM_REWRITE_TAC[ARITH_RULE `(m + p) + 1 = m + 1 + p`] THEN
INDUCT_TAC THEN ASM_REWRITE_TAC[ARITH; ADD_CLAUSES] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC(m + p) = m + 1 + p`] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC(m + 1) = m + 2`; ARITH] THEN
REWRITE_TAC[ADD1] THEN ARITH_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!m p:num. gcd(G(m + p),G m) = gcd(G m,G p)` ASSUME_TAC THENL
[INDUCT_TAC THEN
REWRITE_TAC[ADD_CLAUSES; EQT_INTRO(SPEC_ALL INT_GCD_SYM)] THEN
ASM_REWRITE_TAC[ADD1; ARITH_RULE `(m + p) + 1 = m + 1 + p`] THEN
GEN_TAC THEN SIMP_TAC[INT_GCD_POS; GSYM INT_DIVIDES_ANTISYM_POS] THEN
MP_TAC(SPEC `m:num` (ASSUME `!n. coprime(G(n + 1),b)`)) THEN
MP_TAC(SPEC `m:num` (ASSUME `!n. coprime(G(n + 1),G n)`)) THEN
INTEGER_TAC;
ALL_TAC] THEN
GEN_TAC THEN MATCH_MP_TAC INDUCT_SPECIAL THEN REPEAT CONJ_TAC THENL
[ASM_REWRITE_TAC[GCD_0; INT_DIVIDES_0]; MESON_TAC[GCD_SYM]; ALL_TAC] THEN
ASM_MESON_TAC[GCD_ADD; INT_DIVIDES_GCD; INT_GCD_SYM; ADD_SYM; GCD_SYM]);;
let INT_GCD_RECURRENCE = prove
(`!G a b. G(0) = &0 /\ G(1) = &1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n)) /\
(!n. &0 <= G n)
==> !m n. gcd(G m,G n) = G(gcd(m,n))`,
REPEAT GEN_TAC THEN DISCH_TAC THEN
ASM_SIMP_TAC[GSYM INT_DIVIDES_ANTISYM_POS; INT_GCD_POS] THEN
REWRITE_TAC[INT_DIVIDES_ANTISYM_DIVISORS; INT_DIVIDES_GCD] THEN
ASM_MESON_TAC[INT_DIVISORS_RECURRENCE]);;
(* ------------------------------------------------------------------------- *)
(* Natural number variants of the same results. *)
(* ------------------------------------------------------------------------- *)
let GCD_RECURRENCE = prove
(`!G a b. G(0) = 0 /\ G(1) = 1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !m n. gcd(G m,G n) = G(gcd(m,n))`,
REPEAT STRIP_TAC THEN
MP_TAC(SPECL [`& o (G:num->num)`; `&a:int`; `&b:int`]
INT_GCD_RECURRENCE) THEN
ASM_REWRITE_TAC[o_THM; GSYM INT_OF_NUM_ADD; GSYM INT_OF_NUM_MUL] THEN
ASM_SIMP_TAC[GSYM num_coprime; INT_POS; GSYM NUM_GCD; INT_OF_NUM_EQ]);;
let DIVISORS_RECURRENCE = prove
(`!G a b. G(0) = 0 /\ G(1) = 1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !d m n. d divides (G m) /\ d divides (G n) <=>
d divides G(gcd(m,n))`,
REWRITE_TAC[GSYM DIVIDES_GCD] THEN MESON_TAC[DIVISORS_EQ; GCD_RECURRENCE]);;
(* ------------------------------------------------------------------------- *)
Application 1 : numbers .
(* ------------------------------------------------------------------------- *)
let GCD_MERSENNE = prove
(`!m n. gcd(2 EXP m - 1,2 EXP n - 1) = 2 EXP (gcd(m,n)) - 1`,
SIMP_TAC[GSYM INT_OF_NUM_EQ; NUM_GCD; GSYM INT_OF_NUM_SUB;
GSYM INT_OF_NUM_POW; EXP_LT_0; ARITH;
ARITH_RULE `1 <= n <=> 0 < n`] THEN
MATCH_MP_TAC INT_GCD_RECURRENCE THEN
MAP_EVERY EXISTS_TAC [`&3`; `-- &2`] THEN
REWRITE_TAC[INT_POW_ADD; INT_LE_SUB_LADD] THEN
CONV_TAC INT_REDUCE_CONV THEN REPEAT CONJ_TAC THENL
[REWRITE_TAC[GSYM(INT_REDUCE_CONV `&2 * &2 - &1`)] THEN
SPEC_TAC(`&2`,`t:int`) THEN INTEGER_TAC;
INT_ARITH_TAC;
GEN_TAC THEN MATCH_MP_TAC INT_POW_LE_1 THEN INT_ARITH_TAC]);;
let DIVIDES_MERSENNE = prove
(`!m n. (2 EXP m - 1) divides (2 EXP n - 1) <=> m divides n`,
REPEAT GEN_TAC THEN
REWRITE_TAC[DIVIDES_GCD_LEFT; GCD_MERSENNE] THEN
SIMP_TAC[EXP_EQ_0; EQ_EXP; ARITH_EQ; ARITH_RULE
`~(x = 0) /\ ~(y = 0) ==> (x - 1 = y - 1 <=> x = y)`]);;
(* ------------------------------------------------------------------------- *)
(* Application 2: the Fibonacci series. *)
(* ------------------------------------------------------------------------- *)
let fib = define
`fib 0 = 0 /\ fib 1 = 1 /\ !n. fib(n + 2) = fib(n + 1) + fib(n)`;;
let GCD_FIB = prove
(`!m n. gcd(fib m,fib n) = fib(gcd(m,n))`,
MATCH_MP_TAC GCD_RECURRENCE THEN
REPEAT(EXISTS_TAC `1`) THEN REWRITE_TAC[fib; COPRIME_1] THEN ARITH_TAC);;
let FIB_EQ_0 = prove
(`!n. fib n = 0 <=> n = 0`,
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib] THEN
MATCH_MP_TAC num_INDUCTION THEN
REWRITE_TAC[fib; ARITH_RULE `SUC(SUC n) = n + 2`; ADD_EQ_0] THEN
SIMP_TAC[ADD1; ADD_EQ_0; ARITH_EQ] THEN
CONV_TAC NUM_REDUCE_CONV THEN REWRITE_TAC[fib; ARITH_EQ]);;
let FIB_INCREASES_LE = prove
(`!m n. m <= n ==> fib m <= fib n`,
MATCH_MP_TAC TRANSITIVE_STEPWISE_LE THEN
REWRITE_TAC[LE_REFL; LE_TRANS] THEN
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
REWRITE_TAC[ADD1; fib; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
ARITH_TAC);;
let FIB_INCREASES_LT = prove
(`!m n. 2 <= m /\ m < n ==> fib m < fib n`,
INDUCT_TAC THEN REWRITE_TAC[ARITH] THEN
REPEAT STRIP_TAC THEN TRANS_TAC LTE_TRANS `fib(m + 2)` THEN
ASM_SIMP_TAC[FIB_INCREASES_LE; ARITH_RULE `m + 2 <= n <=> SUC m < n`] THEN
REWRITE_TAC[fib; ADD1; ARITH_RULE `m < m + n <=> ~(n = 0)`; FIB_EQ_0] THEN
ASM_ARITH_TAC);;
let FIB_EQ_1 = prove
(`!n. fib n = 1 <=> n = 1 \/ n = 2`,
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
REWRITE_TAC[fib; ARITH_RULE `SUC(SUC n) = n + 2`] THEN
REWRITE_TAC[FIB_EQ_0; ADD_EQ_0; ARITH; ARITH_RULE
`m + n = 1 <=> m = 0 /\ n = 1 \/ m = 1 /\ n = 0`] THEN
ARITH_TAC);;
let DIVIDES_FIB = prove
(`!m n. (fib m) divides (fib n) <=> m divides n \/ n = 0 \/ m = 2`,
REPEAT GEN_TAC THEN REWRITE_TAC[DIVIDES_GCD_LEFT; GCD_FIB] THEN
MP_TAC(SPECL [`gcd(m:num,n)`; `m:num`] DIVIDES_LE) THEN REWRITE_TAC[GCD] THEN
ASM_CASES_TAC `m = 0` THEN ASM_REWRITE_TAC[GCD_0; fib; FIB_EQ_0; ARITH] THEN
ASM_CASES_TAC `n = 0` THEN ASM_REWRITE_TAC[GCD_0] THEN
ASM_CASES_TAC `gcd(m:num,n) = m` THEN ASM_REWRITE_TAC[LE_LT] THEN
ASM_CASES_TAC `gcd(m:num,n) = 0` THENL
[ASM_MESON_TAC[GCD_ZERO]; ALL_TAC] THEN
ASM_CASES_TAC `m:num = n` THEN ASM_REWRITE_TAC[GCD_REFL; LT_REFL] THEN
ASM_CASES_TAC `2 <= gcd(m,n)` THENL
[MP_TAC(SPECL [`gcd(m:num,n)`; `m:num`] FIB_INCREASES_LT) THEN
ASM_ARITH_TAC;
ASM_CASES_TAC `gcd(m,n) = 1` THENL [ASM_REWRITE_TAC[]; ASM_ARITH_TAC] THEN
DISCH_TAC THEN CONV_TAC(LAND_CONV SYM_CONV) THEN
REWRITE_TAC[FIB_EQ_1; fib] THEN ASM_ARITH_TAC]);;
(* ------------------------------------------------------------------------- *)
Application 3 : solutions of the Pell equation x^2 = ( a^2 - 1 ) y^2 + 1 .
All solutions are of the form ( pellx a n , pelly a n ) ; see Examples / pell.ml
(* ------------------------------------------------------------------------- *)
let pellx = define
`(!a. pellx a 0 = 1) /\
(!a. pellx a 1 = a) /\
(!a n. pellx a (n + 2) = 2 * a * pellx a (n + 1) - pellx a n)`;;
let pelly = define
`(!a. pelly a 0 = 0) /\
(!a. pelly a 1 = 1) /\
(!a n. pelly a (n + 2) = 2 * a * pelly a (n + 1) - pelly a (n))`;;
let PELLY_INCREASES = prove
(`!a n. ~(a = 0) ==> pelly a n <= pelly a (n + 1)`,
GEN_TAC THEN REWRITE_TAC[RIGHT_FORALL_IMP_THM] THEN DISCH_TAC THEN
INDUCT_TAC THEN
ASM_SIMP_TAC[pelly; ARITH; LE_1; ADD1; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
TRANS_TAC LE_TRANS `2 * pelly a (n + 1) - pelly a n` THEN
CONJ_TAC THENL [ASM_ARITH_TAC; ALL_TAC] THEN
MATCH_MP_TAC(ARITH_RULE `a:num <= b ==> a - c <= b - c`) THEN
REWRITE_TAC[MULT_ASSOC; LE_MULT_RCANCEL] THEN ASM_ARITH_TAC);;
let GCD_PELLY = prove
(`!a m n. ~(a = 0) ==> gcd(pelly a m,pelly a n) = pelly a (gcd(m,n))`,
GEN_TAC THEN REWRITE_TAC[RIGHT_FORALL_IMP_THM] THEN DISCH_TAC THEN
REWRITE_TAC[GSYM INT_OF_NUM_EQ; NUM_GCD] THEN
MATCH_MP_TAC INT_GCD_RECURRENCE THEN
MAP_EVERY EXISTS_TAC [`&2 * &a:int`; `-- &1:int`] THEN
REWRITE_TAC[pelly; INT_POS; INT_COPRIME_NEG; INT_COPRIME_1] THEN
GEN_TAC THEN REWRITE_TAC[INT_OF_NUM_MUL; MULT_ASSOC] THEN
REWRITE_TAC[INT_ARITH `a + -- &1 * b:int = a - b`] THEN
MATCH_MP_TAC(GSYM INT_OF_NUM_SUB) THEN
TRANS_TAC LE_TRANS `1 * pelly a (n + 1)` THEN
REWRITE_TAC[LE_MULT_RCANCEL] THEN
ASM_SIMP_TAC[MULT_CLAUSES; PELLY_INCREASES] THEN ASM_ARITH_TAC);;
| null | https://raw.githubusercontent.com/jrh13/hol-light/ea44a4cacd238d7fa5a397f043f3e3321eb66543/Examples/gcdrecurrence.ml | ocaml | =========================================================================
Some divisibility properties of certain linear integer recurrences.
=========================================================================
-------------------------------------------------------------------------
A customized induction principle.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
The main results; to literally apply integer gcd we need nonnegativity.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Natural number variants of the same results.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Application 2: the Fibonacci series.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
------------------------------------------------------------------------- |
needs "Library/prime.ml";;
needs "Library/integer.ml";;
prioritize_int();;
let INDUCT_SPECIAL = prove
(`!P. (!n. P 0 n) /\
(!m n. P m n <=> P n m) /\
(!m n. P m n ==> P n (m + n))
==> !m n. P m n`,
GEN_TAC THEN STRIP_TAC THEN
REPEAT GEN_TAC THEN WF_INDUCT_TAC `m + n:num` THEN
ASM_CASES_TAC `m = 0` THENL [ASM_MESON_TAC[]; ALL_TAC] THEN
ASM_CASES_TAC `n = 0` THENL [ASM_MESON_TAC[]; ALL_TAC] THEN
DISJ_CASES_THEN MP_TAC (ARITH_RULE `m <= n:num \/ n <= m`) THEN
REWRITE_TAC[LE_EXISTS] THEN ONCE_REWRITE_TAC[ADD_SYM] THEN
DISCH_THEN(X_CHOOSE_THEN `p:num` SUBST_ALL_TAC) THENL
[ALL_TAC; ASM (GEN_REWRITE_TAC I) []] THEN
MATCH_MP_TAC(ASSUME `!m n:num. P m n ==> P n (m + n)`) THEN
FIRST_X_ASSUM MATCH_MP_TAC THEN ASM_ARITH_TAC);;
let INT_DIVISORS_RECURRENCE = prove
(`!G a b. G(0) = &0 /\ G(1) = &1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !d m n. d divides (G m) /\ d divides (G n) <=>
d divides G(gcd(m,n))`,
REPEAT GEN_TAC THEN STRIP_TAC THEN
SUBGOAL_THEN `!n. coprime(G(n + 1),b)` ASSUME_TAC THENL
[INDUCT_TAC THEN ASM_REWRITE_TAC[ARITH; ARITH_RULE `SUC n + 1 = n + 2`] THEN
REPEAT(POP_ASSUM MP_TAC) THEN NUMBER_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!n. coprime(G(n + 1),G n)` ASSUME_TAC THENL
[INDUCT_TAC THENL [ASM_REWRITE_TAC[ARITH] THEN NUMBER_TAC; ALL_TAC] THEN
REPEAT(FIRST_X_ASSUM(ASSUME_TAC o SPEC `n:num`)) THEN
ASM_REWRITE_TAC[ADD1; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
REPEAT(POP_ASSUM MP_TAC) THEN INTEGER_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!m p. G(m + 1 + p) = G(m + 1) * G(p + 1) + b * G(m) * G(p)`
ASSUME_TAC THENL
[INDUCT_TAC THENL
[ASM_REWRITE_TAC[ADD_CLAUSES; ADD_AC] THEN INTEGER_TAC; ALL_TAC] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC m + 1 + p = (m + p) + 2`] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC m + 1 = m + 2`] THEN
ASM_REWRITE_TAC[ARITH_RULE `(m + p) + 1 = m + 1 + p`] THEN
INDUCT_TAC THEN ASM_REWRITE_TAC[ARITH; ADD_CLAUSES] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC(m + p) = m + 1 + p`] THEN
ASM_REWRITE_TAC[ARITH_RULE `SUC(m + 1) = m + 2`; ARITH] THEN
REWRITE_TAC[ADD1] THEN ARITH_TAC;
ALL_TAC] THEN
SUBGOAL_THEN `!m p:num. gcd(G(m + p),G m) = gcd(G m,G p)` ASSUME_TAC THENL
[INDUCT_TAC THEN
REWRITE_TAC[ADD_CLAUSES; EQT_INTRO(SPEC_ALL INT_GCD_SYM)] THEN
ASM_REWRITE_TAC[ADD1; ARITH_RULE `(m + p) + 1 = m + 1 + p`] THEN
GEN_TAC THEN SIMP_TAC[INT_GCD_POS; GSYM INT_DIVIDES_ANTISYM_POS] THEN
MP_TAC(SPEC `m:num` (ASSUME `!n. coprime(G(n + 1),b)`)) THEN
MP_TAC(SPEC `m:num` (ASSUME `!n. coprime(G(n + 1),G n)`)) THEN
INTEGER_TAC;
ALL_TAC] THEN
GEN_TAC THEN MATCH_MP_TAC INDUCT_SPECIAL THEN REPEAT CONJ_TAC THENL
[ASM_REWRITE_TAC[GCD_0; INT_DIVIDES_0]; MESON_TAC[GCD_SYM]; ALL_TAC] THEN
ASM_MESON_TAC[GCD_ADD; INT_DIVIDES_GCD; INT_GCD_SYM; ADD_SYM; GCD_SYM]);;
let INT_GCD_RECURRENCE = prove
(`!G a b. G(0) = &0 /\ G(1) = &1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n)) /\
(!n. &0 <= G n)
==> !m n. gcd(G m,G n) = G(gcd(m,n))`,
REPEAT GEN_TAC THEN DISCH_TAC THEN
ASM_SIMP_TAC[GSYM INT_DIVIDES_ANTISYM_POS; INT_GCD_POS] THEN
REWRITE_TAC[INT_DIVIDES_ANTISYM_DIVISORS; INT_DIVIDES_GCD] THEN
ASM_MESON_TAC[INT_DIVISORS_RECURRENCE]);;
let GCD_RECURRENCE = prove
(`!G a b. G(0) = 0 /\ G(1) = 1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !m n. gcd(G m,G n) = G(gcd(m,n))`,
REPEAT STRIP_TAC THEN
MP_TAC(SPECL [`& o (G:num->num)`; `&a:int`; `&b:int`]
INT_GCD_RECURRENCE) THEN
ASM_REWRITE_TAC[o_THM; GSYM INT_OF_NUM_ADD; GSYM INT_OF_NUM_MUL] THEN
ASM_SIMP_TAC[GSYM num_coprime; INT_POS; GSYM NUM_GCD; INT_OF_NUM_EQ]);;
let DIVISORS_RECURRENCE = prove
(`!G a b. G(0) = 0 /\ G(1) = 1 /\
coprime(a,b) /\ (!n. G(n + 2) = a * G(n + 1) + b * G(n))
==> !d m n. d divides (G m) /\ d divides (G n) <=>
d divides G(gcd(m,n))`,
REWRITE_TAC[GSYM DIVIDES_GCD] THEN MESON_TAC[DIVISORS_EQ; GCD_RECURRENCE]);;
Application 1 : numbers .
let GCD_MERSENNE = prove
(`!m n. gcd(2 EXP m - 1,2 EXP n - 1) = 2 EXP (gcd(m,n)) - 1`,
SIMP_TAC[GSYM INT_OF_NUM_EQ; NUM_GCD; GSYM INT_OF_NUM_SUB;
GSYM INT_OF_NUM_POW; EXP_LT_0; ARITH;
ARITH_RULE `1 <= n <=> 0 < n`] THEN
MATCH_MP_TAC INT_GCD_RECURRENCE THEN
MAP_EVERY EXISTS_TAC [`&3`; `-- &2`] THEN
REWRITE_TAC[INT_POW_ADD; INT_LE_SUB_LADD] THEN
CONV_TAC INT_REDUCE_CONV THEN REPEAT CONJ_TAC THENL
[REWRITE_TAC[GSYM(INT_REDUCE_CONV `&2 * &2 - &1`)] THEN
SPEC_TAC(`&2`,`t:int`) THEN INTEGER_TAC;
INT_ARITH_TAC;
GEN_TAC THEN MATCH_MP_TAC INT_POW_LE_1 THEN INT_ARITH_TAC]);;
let DIVIDES_MERSENNE = prove
(`!m n. (2 EXP m - 1) divides (2 EXP n - 1) <=> m divides n`,
REPEAT GEN_TAC THEN
REWRITE_TAC[DIVIDES_GCD_LEFT; GCD_MERSENNE] THEN
SIMP_TAC[EXP_EQ_0; EQ_EXP; ARITH_EQ; ARITH_RULE
`~(x = 0) /\ ~(y = 0) ==> (x - 1 = y - 1 <=> x = y)`]);;
let fib = define
`fib 0 = 0 /\ fib 1 = 1 /\ !n. fib(n + 2) = fib(n + 1) + fib(n)`;;
let GCD_FIB = prove
(`!m n. gcd(fib m,fib n) = fib(gcd(m,n))`,
MATCH_MP_TAC GCD_RECURRENCE THEN
REPEAT(EXISTS_TAC `1`) THEN REWRITE_TAC[fib; COPRIME_1] THEN ARITH_TAC);;
let FIB_EQ_0 = prove
(`!n. fib n = 0 <=> n = 0`,
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib] THEN
MATCH_MP_TAC num_INDUCTION THEN
REWRITE_TAC[fib; ARITH_RULE `SUC(SUC n) = n + 2`; ADD_EQ_0] THEN
SIMP_TAC[ADD1; ADD_EQ_0; ARITH_EQ] THEN
CONV_TAC NUM_REDUCE_CONV THEN REWRITE_TAC[fib; ARITH_EQ]);;
let FIB_INCREASES_LE = prove
(`!m n. m <= n ==> fib m <= fib n`,
MATCH_MP_TAC TRANSITIVE_STEPWISE_LE THEN
REWRITE_TAC[LE_REFL; LE_TRANS] THEN
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
REWRITE_TAC[ADD1; fib; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
ARITH_TAC);;
let FIB_INCREASES_LT = prove
(`!m n. 2 <= m /\ m < n ==> fib m < fib n`,
INDUCT_TAC THEN REWRITE_TAC[ARITH] THEN
REPEAT STRIP_TAC THEN TRANS_TAC LTE_TRANS `fib(m + 2)` THEN
ASM_SIMP_TAC[FIB_INCREASES_LE; ARITH_RULE `m + 2 <= n <=> SUC m < n`] THEN
REWRITE_TAC[fib; ADD1; ARITH_RULE `m < m + n <=> ~(n = 0)`; FIB_EQ_0] THEN
ASM_ARITH_TAC);;
let FIB_EQ_1 = prove
(`!n. fib n = 1 <=> n = 1 \/ n = 2`,
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
MATCH_MP_TAC num_INDUCTION THEN REWRITE_TAC[fib; ARITH] THEN
REWRITE_TAC[fib; ARITH_RULE `SUC(SUC n) = n + 2`] THEN
REWRITE_TAC[FIB_EQ_0; ADD_EQ_0; ARITH; ARITH_RULE
`m + n = 1 <=> m = 0 /\ n = 1 \/ m = 1 /\ n = 0`] THEN
ARITH_TAC);;
let DIVIDES_FIB = prove
(`!m n. (fib m) divides (fib n) <=> m divides n \/ n = 0 \/ m = 2`,
REPEAT GEN_TAC THEN REWRITE_TAC[DIVIDES_GCD_LEFT; GCD_FIB] THEN
MP_TAC(SPECL [`gcd(m:num,n)`; `m:num`] DIVIDES_LE) THEN REWRITE_TAC[GCD] THEN
ASM_CASES_TAC `m = 0` THEN ASM_REWRITE_TAC[GCD_0; fib; FIB_EQ_0; ARITH] THEN
ASM_CASES_TAC `n = 0` THEN ASM_REWRITE_TAC[GCD_0] THEN
ASM_CASES_TAC `gcd(m:num,n) = m` THEN ASM_REWRITE_TAC[LE_LT] THEN
ASM_CASES_TAC `gcd(m:num,n) = 0` THENL
[ASM_MESON_TAC[GCD_ZERO]; ALL_TAC] THEN
ASM_CASES_TAC `m:num = n` THEN ASM_REWRITE_TAC[GCD_REFL; LT_REFL] THEN
ASM_CASES_TAC `2 <= gcd(m,n)` THENL
[MP_TAC(SPECL [`gcd(m:num,n)`; `m:num`] FIB_INCREASES_LT) THEN
ASM_ARITH_TAC;
ASM_CASES_TAC `gcd(m,n) = 1` THENL [ASM_REWRITE_TAC[]; ASM_ARITH_TAC] THEN
DISCH_TAC THEN CONV_TAC(LAND_CONV SYM_CONV) THEN
REWRITE_TAC[FIB_EQ_1; fib] THEN ASM_ARITH_TAC]);;
Application 3 : solutions of the Pell equation x^2 = ( a^2 - 1 ) y^2 + 1 .
All solutions are of the form ( pellx a n , pelly a n ) ; see Examples / pell.ml
let pellx = define
`(!a. pellx a 0 = 1) /\
(!a. pellx a 1 = a) /\
(!a n. pellx a (n + 2) = 2 * a * pellx a (n + 1) - pellx a n)`;;
let pelly = define
`(!a. pelly a 0 = 0) /\
(!a. pelly a 1 = 1) /\
(!a n. pelly a (n + 2) = 2 * a * pelly a (n + 1) - pelly a (n))`;;
let PELLY_INCREASES = prove
(`!a n. ~(a = 0) ==> pelly a n <= pelly a (n + 1)`,
GEN_TAC THEN REWRITE_TAC[RIGHT_FORALL_IMP_THM] THEN DISCH_TAC THEN
INDUCT_TAC THEN
ASM_SIMP_TAC[pelly; ARITH; LE_1; ADD1; ARITH_RULE `(n + 1) + 1 = n + 2`] THEN
TRANS_TAC LE_TRANS `2 * pelly a (n + 1) - pelly a n` THEN
CONJ_TAC THENL [ASM_ARITH_TAC; ALL_TAC] THEN
MATCH_MP_TAC(ARITH_RULE `a:num <= b ==> a - c <= b - c`) THEN
REWRITE_TAC[MULT_ASSOC; LE_MULT_RCANCEL] THEN ASM_ARITH_TAC);;
let GCD_PELLY = prove
(`!a m n. ~(a = 0) ==> gcd(pelly a m,pelly a n) = pelly a (gcd(m,n))`,
GEN_TAC THEN REWRITE_TAC[RIGHT_FORALL_IMP_THM] THEN DISCH_TAC THEN
REWRITE_TAC[GSYM INT_OF_NUM_EQ; NUM_GCD] THEN
MATCH_MP_TAC INT_GCD_RECURRENCE THEN
MAP_EVERY EXISTS_TAC [`&2 * &a:int`; `-- &1:int`] THEN
REWRITE_TAC[pelly; INT_POS; INT_COPRIME_NEG; INT_COPRIME_1] THEN
GEN_TAC THEN REWRITE_TAC[INT_OF_NUM_MUL; MULT_ASSOC] THEN
REWRITE_TAC[INT_ARITH `a + -- &1 * b:int = a - b`] THEN
MATCH_MP_TAC(GSYM INT_OF_NUM_SUB) THEN
TRANS_TAC LE_TRANS `1 * pelly a (n + 1)` THEN
REWRITE_TAC[LE_MULT_RCANCEL] THEN
ASM_SIMP_TAC[MULT_CLAUSES; PELLY_INCREASES] THEN ASM_ARITH_TAC);;
|
bc7235d8c296ed13c38b0c6d026ca84c5f5bcb2124f6e35e67831e118e144198 | liquidz/antq | git.clj | (ns antq.util.git
(:require
[antq.constant :as const]
[antq.log :as log]
[antq.util.async :as u.async]
[clojure.java.shell :as sh]
[clojure.string :as str]))
(defn- extract-tags
[ls-remote-resp]
(some->> (:out ls-remote-resp)
(str/split-lines)
(keep #(second (str/split % #"\t" 2)))
(filter #(= 0 (.indexOf ^String % "refs/tags")))
(map #(str/replace % #"^refs/tags/" ""))
;; Remove annotated tags
(remove #(str/ends-with? % "^{}"))))
(defn- same-tag?
[ref-name s]
(or (= ref-name s)
;; Annotated tag
(= ref-name (str s "^{}"))))
(defn- extract-sha-by-ref-name
[ls-remote-resp target-ref-name]
(some->> (:out ls-remote-resp)
(str/split-lines)
(map #(str/split % #"\t" 2))
(filter (fn [[_ ref-name]]
(same-tag? ref-name target-ref-name)))
(seq)
;; NOTE: The annotated tag have priority
(sort-by second)
(last)
(first)))
(defn- ls-remote*
[url]
(loop [i 0]
(when (< i const/retry-limit)
(let [{:keys [exit err] :as res} (sh/sh "git" "ls-remote" url)]
(cond
(= 0 exit)
res
(and (< 0 exit) (not (str/includes? err "Operation timed out")))
(do (log/warning (str "git ls-remote failed on: " url))
nil)
:else
(do (log/warning "git ls-remote timed out, retrying")
(recur (inc i))))))))
(def ^:private ls-remote*-with-timeout
(u.async/fn-with-timeout
ls-remote*
const/ls-remote-timeout-msec))
(def ^:private ls-remote
(memoize ls-remote*-with-timeout))
(defn- tags-by-ls-remote*
[url]
(-> (ls-remote url)
(extract-tags)))
(def tags-by-ls-remote
(memoize tags-by-ls-remote*))
(defn- head-sha-by-ls-remote*
[url]
(-> (ls-remote url)
(extract-sha-by-ref-name "HEAD")))
(defn- tag-sha-by-ls-remote*
[url tag-name]
(-> (ls-remote url)
(extract-sha-by-ref-name (str "refs/tags/" tag-name))))
(def head-sha-by-ls-remote
(memoize head-sha-by-ls-remote*))
(def tag-sha-by-ls-remote
(memoize tag-sha-by-ls-remote*))
(defn find-tag
[url s]
(when (and (seq url) (seq s))
(let [tags (tags-by-ls-remote url)]
(or
;; If there is an exact match, it is preferred
(some #(and (= % s) %) tags)
(some #(and (str/includes? % s) %) tags)))))
| null | https://raw.githubusercontent.com/liquidz/antq/ca8472b28702f5e568492001bc476fb09e5b2e6b/src/antq/util/git.clj | clojure | Remove annotated tags
Annotated tag
NOTE: The annotated tag have priority
If there is an exact match, it is preferred | (ns antq.util.git
(:require
[antq.constant :as const]
[antq.log :as log]
[antq.util.async :as u.async]
[clojure.java.shell :as sh]
[clojure.string :as str]))
(defn- extract-tags
[ls-remote-resp]
(some->> (:out ls-remote-resp)
(str/split-lines)
(keep #(second (str/split % #"\t" 2)))
(filter #(= 0 (.indexOf ^String % "refs/tags")))
(map #(str/replace % #"^refs/tags/" ""))
(remove #(str/ends-with? % "^{}"))))
(defn- same-tag?
[ref-name s]
(or (= ref-name s)
(= ref-name (str s "^{}"))))
(defn- extract-sha-by-ref-name
[ls-remote-resp target-ref-name]
(some->> (:out ls-remote-resp)
(str/split-lines)
(map #(str/split % #"\t" 2))
(filter (fn [[_ ref-name]]
(same-tag? ref-name target-ref-name)))
(seq)
(sort-by second)
(last)
(first)))
(defn- ls-remote*
[url]
(loop [i 0]
(when (< i const/retry-limit)
(let [{:keys [exit err] :as res} (sh/sh "git" "ls-remote" url)]
(cond
(= 0 exit)
res
(and (< 0 exit) (not (str/includes? err "Operation timed out")))
(do (log/warning (str "git ls-remote failed on: " url))
nil)
:else
(do (log/warning "git ls-remote timed out, retrying")
(recur (inc i))))))))
(def ^:private ls-remote*-with-timeout
(u.async/fn-with-timeout
ls-remote*
const/ls-remote-timeout-msec))
(def ^:private ls-remote
(memoize ls-remote*-with-timeout))
(defn- tags-by-ls-remote*
[url]
(-> (ls-remote url)
(extract-tags)))
(def tags-by-ls-remote
(memoize tags-by-ls-remote*))
(defn- head-sha-by-ls-remote*
[url]
(-> (ls-remote url)
(extract-sha-by-ref-name "HEAD")))
(defn- tag-sha-by-ls-remote*
[url tag-name]
(-> (ls-remote url)
(extract-sha-by-ref-name (str "refs/tags/" tag-name))))
(def head-sha-by-ls-remote
(memoize head-sha-by-ls-remote*))
(def tag-sha-by-ls-remote
(memoize tag-sha-by-ls-remote*))
(defn find-tag
[url s]
(when (and (seq url) (seq s))
(let [tags (tags-by-ls-remote url)]
(or
(some #(and (= % s) %) tags)
(some #(and (str/includes? % s) %) tags)))))
|
549e6d2a1c1f97fa7b0d3ba2c586d55a87e3cfad5ebd20336c66a4137cc14175 | 2600hz/kazoo | cb_search.erl | %%%-----------------------------------------------------------------------------
( C ) 2011 - 2020 , 2600Hz
%%% @doc Crossbar API for search.
@author
%%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(cb_search).
-export([init/0
,allowed_methods/0 ,allowed_methods/1
,resource_exists/0 ,resource_exists/1
,validate/1 ,validate/2
,authorize/1, authorize/2
]).
-include("crossbar.hrl").
-define(QUERY_TPL, <<"search/search_by_">>).
-define(MULTI, <<"multi">>).
-define(SEARCHABLE, [<<"account">>, <<"user">>, <<"callflow">>, <<"device">>]).
-define(ACCOUNT_QUERY_OPTIONS, [<<"name">>, <<"number">>, <<"name_and_number">>]).
-define(ACCOUNTS_QUERY_OPTIONS, [<<"name">>, <<"realm">>, <<"id">>]).
%%%=============================================================================
%%% API
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc Initializes the bindings this module will respond to.
%% @end
%%------------------------------------------------------------------------------
-spec init() -> 'ok'.
init() ->
_ = crossbar_bindings:bind(<<"*.allowed_methods.search">>, ?MODULE, 'allowed_methods'),
_ = crossbar_bindings:bind(<<"*.resource_exists.search">>, ?MODULE, 'resource_exists'),
_ = crossbar_bindings:bind(<<"*.authorize.search">>, ?MODULE, 'authorize'),
_ = crossbar_bindings:bind(<<"*.validate.search">>, ?MODULE, 'validate').
%%------------------------------------------------------------------------------
%% @doc Given the path tokens related to this module, what HTTP methods are
%% going to be responded to.
%% @end
%%------------------------------------------------------------------------------
-spec allowed_methods() -> http_methods().
allowed_methods() ->
[?HTTP_GET].
-spec allowed_methods(path_token()) -> http_methods().
allowed_methods(?MULTI) ->
[?HTTP_GET].
%%------------------------------------------------------------------------------
%% @doc Does the path point to a valid resource.
%% For example:
%%
%% ```
%% /skels => []
%% /skels/foo => [<<"foo">>]
%% /skels/foo/bar => [<<"foo">>, <<"bar">>]
%% '''
%% @end
%%------------------------------------------------------------------------------
-spec resource_exists() -> 'true'.
resource_exists() -> 'true'.
-spec resource_exists(path_token()) -> 'true'.
resource_exists(?MULTI) -> 'true'.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec authorize(cb_context:context()) -> boolean().
authorize(Context) ->
cb_context:auth_account_id(Context) =/= 'undefined'.
-spec authorize(cb_context:context(), path_token()) -> boolean().
authorize(Context, ?MULTI) ->
cb_context:auth_account_id(Context) =/= 'undefined'.
%%------------------------------------------------------------------------------
%% @doc Check the request (request body, query string params, path tokens, etc)
%% and load necessary information.
%% /skels might load a list of skel objects
/skels/123 might load the skel object 123
Generally , use crossbar_doc to manipulate the cb_context { } record
%% @end
%%------------------------------------------------------------------------------
-spec validate(cb_context:context()) -> cb_context:context().
validate(Context) ->
Type = cb_context:req_value(Context, <<"t">>),
validate_search(Context, Type).
-spec validate(cb_context:context(), path_token()) -> cb_context:context().
validate(Context, ?MULTI) ->
Type = cb_context:req_value(Context, <<"t">>),
validate_multi(Context, Type).
%%%=============================================================================
Internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec validate_search(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
validate_search(Context, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"search needs a document type to search on">>}
,{<<"target">>, ?SEARCHABLE}
]),
lager:debug("'t' required"),
cb_context:add_validation_error(<<"t">>, <<"required">>, Message, Context);
validate_search(Context, <<"account">>=Type) ->
lager:debug("validating search on accounts"),
validate_search(cb_context:set_db_name(Context, ?KZ_ACCOUNTS_DB)
,Type
,cb_context:req_value(Context, <<"q">>)
);
validate_search(Context, Type) ->
validate_search(Context, Type, cb_context:req_value(Context, <<"q">>)).
-spec validate_search(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
validate_search(Context, _Type, 'undefined') ->
lager:debug("'q' required"),
NeedViewMsg = kz_json:from_list([{<<"message">>, <<"search needs a view to search in">>}
,{<<"target">>, available_query_options(cb_context:db_name(Context))}
]),
cb_context:add_validation_error(<<"q">>, <<"required">>, NeedViewMsg, Context);
validate_search(Context, Type, Query) ->
Context1 = validate_query(Context, Query),
case cb_context:resp_status(Context1) of
'success' ->
validate_search(Context, Type, Query, cb_context:req_value(Context, <<"v">>));
_Status ->
Context1
end.
-spec validate_search(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
validate_search(Context, _Type, _Query, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"search needs a value to search for">>}]),
cb_context:add_validation_error(<<"v">>, <<"required">>, Message, Context);
validate_search(Context, Type, Query, <<_/binary>> = Value) ->
search(Context, Type, Query, Value, []);
validate_search(Context, Type, Query, Value) ->
case kz_term:is_true(Value) of
'true' -> validate_search(Context, Type, Query, <<>>);
'false' -> validate_search(Context, Type, Query, 'undefined')
end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec validate_multi(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
validate_multi(Context, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"Search needs a document type to search on">>}
,{<<"target">>, ?SEARCHABLE}
]),
cb_context:add_validation_error(<<"t">>, <<"required">>, Message, Context);
validate_multi(Context, <<"account">>=Type) ->
lager:debug("validating search on accounts"),
validate_multi(cb_context:set_db_name(Context, ?KZ_ACCOUNTS_DB)
,Type
,kz_json:to_proplist(cb_context:query_string(Context))
);
validate_multi(Context, Type) ->
validate_multi(Context, Type, kz_json:to_proplist(cb_context:query_string(Context))).
-spec validate_multi(cb_context:context(), kz_term:ne_binary(), kz_term:proplist()) -> cb_context:context().
validate_multi(Context, Type, Query) ->
Context1 = validate_query(Context, Query),
case cb_context:resp_status(Context1) of
'success' -> multi_search(Context1, Type, Query);
_Status -> Context1
end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec validate_query(cb_context:context(), kz_term:proplist() | kz_term:ne_binary()) -> cb_context:context().
validate_query(Context, Query) ->
QueryOptions = available_query_options(cb_context:db_name(Context)),
validate_query(Context, QueryOptions, Query).
-spec validate_query(cb_context:context(), kz_term:proplist(), kz_term:proplist() | kz_term:ne_binary()) -> cb_context:context().
validate_query(Context, Available, []) ->
case cb_context:resp_status(Context) of
'success' -> Context;
_ ->
lager:debug("multisearch has ~p available", [Available]),
Message = kz_json:from_list([{<<"message">>, <<"multi search needs some values to search for">>}
,{<<"target">>, Available}
]),
cb_context:add_validation_error(<<"multi">>, <<"enum">>, Message, Context)
end;
validate_query(Context, Available, [{<<"by_", Query/binary>>, _}|Props]) ->
Context1 = validate_query(Context, Available, Query),
case cb_context:resp_status(Context1) of
'success' ->
lager:debug("query ~s is valid", [Query]),
validate_query(Context1, Available, Props);
_Status ->
lager:debug("query ~s is not valid", [Query]),
Context1
end;
validate_query(Context, Available, [{Query, _}|Props]) ->
lager:debug("ignoring query string ~s", [Query]),
validate_query(Context, Available, Props);
validate_query(Context, Available, Query) when is_binary(Query) ->
case lists:member(Query, Available) of
'true' -> cb_context:set_resp_status(Context, 'success');
'false' ->
lager:debug("query ~s not allowed", [Query]),
Message = kz_json:from_list([{<<"message">>, <<"value not found in enumerated list of values">>}
,{<<"cause">>, Query}
]),
cb_context:add_validation_error(<<"q">>, <<"enum">>, Message, Context)
end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec available_query_options(kz_term:api_ne_binary()) -> kz_term:ne_binaries().
available_query_options(AccountDb) ->
case kz_datamgr:open_cache_doc(AccountDb, <<"_design/search">>) of
{'ok', JObj} ->
lager:debug("got ~s views from ~s", [kz_json:get_keys(<<"views">>, JObj), AccountDb]),
format_query_options(kz_json:get_keys(<<"views">>, JObj));
{'error', _E} when AccountDb =:= ?KZ_ACCOUNTS_DB ->
lager:debug("using default query options"),
?ACCOUNTS_QUERY_OPTIONS;
{'error', _E} ->
lager:debug("using default query options after error ~p", [_E]),
?ACCOUNT_QUERY_OPTIONS
end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec format_query_options(kz_term:ne_binaries()) -> kz_term:ne_binaries().
format_query_options([]) ->
lager:debug("no query options found on design doc, using default"),
?ACCOUNTS_QUERY_OPTIONS;
format_query_options(Views) ->
[format_query_option(View) || View <- Views].
-spec format_query_option(kz_term:ne_binary()) -> kz_term:ne_binary().
format_query_option(<<"search_by_", Name/binary>>) -> Name;
format_query_option(Name) -> Name.
%%------------------------------------------------------------------------------
%% @doc Attempt to load a summarized listing of all instances of this
%% resource.
%% @end
%%------------------------------------------------------------------------------
-spec search(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary(), binary(), kz_term:proplist()) -> cb_context:context().
search(Context, Type, Query, Val, Opts) ->
ViewName = <<?QUERY_TPL/binary, Query/binary>>,
Value = cb_modules_util:normalize_alphanum_name(Val),
Options =
[{'startkey', get_start_key(Context, Type, Value)}
,{'endkey', get_end_key(Context, Type, Value)}
,{'mapper', crossbar_view:get_value_fun()}
| Opts
],
crossbar_view:load(Context, ViewName, Options).
%%------------------------------------------------------------------------------
%% @doc Attempt to load a summarized listing of all instances of this
%% resource.
%% @end
%%------------------------------------------------------------------------------
-spec multi_search(cb_context:context(), kz_term:ne_binary(), kz_term:proplist()) -> cb_context:context().
multi_search(Context, Type, Props) ->
Context1 = cb_context:set_should_paginate(Context, 'false'),
multi_search(Context1, Type, Props , kz_json:new()).
-spec multi_search(cb_context:context(), kz_term:ne_binary(), kz_term:proplist(), kz_json:object()) -> cb_context:context().
multi_search(Context, _Type, [], Acc) ->
cb_context:set_resp_data(Context, Acc);
multi_search(Context, Type, [{<<"by_", Query/binary>>, Val}|Props], Acc) ->
Context1 = search(Context, Type, Query, Val, [{'unchunkable', 'true'}]),
case cb_context:resp_status(Context1) of
'success' ->
RespData = cb_context:resp_data(Context1),
Acc1 = kz_json:set_value(Query, RespData, Acc),
multi_search(Context1, Type, Props, Acc1);
_ -> Context1
end;
multi_search(Context, Type, [_|Props], Acc) ->
multi_search(Context, Type, Props, Acc).
%%------------------------------------------------------------------------------
%% @doc resource.
%% @end
%%------------------------------------------------------------------------------
-spec get_start_key(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary()) -> kz_term:ne_binaries().
get_start_key(Context, <<"account">>=Type, Value) ->
[cb_context:auth_account_id(Context), Type, cb_context:req_value(Context, <<"start_key">>, Value)];
get_start_key(Context, Type, Value) ->
[Type, cb_context:req_value(Context, <<"start_key">>, Value)].
%%------------------------------------------------------------------------------
%% @doc resource.
%% @end
%%------------------------------------------------------------------------------
-spec get_end_key(cb_context:context(), kz_term:ne_binary(), binary()) -> kz_term:ne_binaries().
get_end_key(Context, <<"account">>=Type, Value) ->
[cb_context:auth_account_id(Context), Type, next_binary_key(Value)];
get_end_key(_, Type, Value) ->
[Type, next_binary_key(Value)].
%%------------------------------------------------------------------------------
%% @doc replaces last character in binary with next character
%% @end
%%------------------------------------------------------------------------------
-spec next_binary_key(binary()) -> kz_term:ne_binary().
next_binary_key(<<>>) ->
<<"Z">>;
next_binary_key(Bin) ->
<<Bin/binary, "Z">>.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/crossbar/src/modules/cb_search.erl | erlang | -----------------------------------------------------------------------------
@doc Crossbar API for search.
@end
-----------------------------------------------------------------------------
=============================================================================
API
=============================================================================
------------------------------------------------------------------------------
@doc Initializes the bindings this module will respond to.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Given the path tokens related to this module, what HTTP methods are
going to be responded to.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Does the path point to a valid resource.
For example:
```
/skels => []
/skels/foo => [<<"foo">>]
/skels/foo/bar => [<<"foo">>, <<"bar">>]
'''
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Check the request (request body, query string params, path tokens, etc)
and load necessary information.
/skels might load a list of skel objects
@end
------------------------------------------------------------------------------
=============================================================================
=============================================================================
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Attempt to load a summarized listing of all instances of this
resource.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Attempt to load a summarized listing of all instances of this
resource.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc resource.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc resource.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc replaces last character in binary with next character
@end
------------------------------------------------------------------------------ | ( C ) 2011 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(cb_search).
-export([init/0
,allowed_methods/0 ,allowed_methods/1
,resource_exists/0 ,resource_exists/1
,validate/1 ,validate/2
,authorize/1, authorize/2
]).
-include("crossbar.hrl").
-define(QUERY_TPL, <<"search/search_by_">>).
-define(MULTI, <<"multi">>).
-define(SEARCHABLE, [<<"account">>, <<"user">>, <<"callflow">>, <<"device">>]).
-define(ACCOUNT_QUERY_OPTIONS, [<<"name">>, <<"number">>, <<"name_and_number">>]).
-define(ACCOUNTS_QUERY_OPTIONS, [<<"name">>, <<"realm">>, <<"id">>]).
-spec init() -> 'ok'.
init() ->
_ = crossbar_bindings:bind(<<"*.allowed_methods.search">>, ?MODULE, 'allowed_methods'),
_ = crossbar_bindings:bind(<<"*.resource_exists.search">>, ?MODULE, 'resource_exists'),
_ = crossbar_bindings:bind(<<"*.authorize.search">>, ?MODULE, 'authorize'),
_ = crossbar_bindings:bind(<<"*.validate.search">>, ?MODULE, 'validate').
-spec allowed_methods() -> http_methods().
allowed_methods() ->
[?HTTP_GET].
-spec allowed_methods(path_token()) -> http_methods().
allowed_methods(?MULTI) ->
[?HTTP_GET].
-spec resource_exists() -> 'true'.
resource_exists() -> 'true'.
-spec resource_exists(path_token()) -> 'true'.
resource_exists(?MULTI) -> 'true'.
-spec authorize(cb_context:context()) -> boolean().
authorize(Context) ->
cb_context:auth_account_id(Context) =/= 'undefined'.
-spec authorize(cb_context:context(), path_token()) -> boolean().
authorize(Context, ?MULTI) ->
cb_context:auth_account_id(Context) =/= 'undefined'.
/skels/123 might load the skel object 123
Generally , use crossbar_doc to manipulate the cb_context { } record
-spec validate(cb_context:context()) -> cb_context:context().
validate(Context) ->
Type = cb_context:req_value(Context, <<"t">>),
validate_search(Context, Type).
-spec validate(cb_context:context(), path_token()) -> cb_context:context().
validate(Context, ?MULTI) ->
Type = cb_context:req_value(Context, <<"t">>),
validate_multi(Context, Type).
Internal functions
-spec validate_search(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
validate_search(Context, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"search needs a document type to search on">>}
,{<<"target">>, ?SEARCHABLE}
]),
lager:debug("'t' required"),
cb_context:add_validation_error(<<"t">>, <<"required">>, Message, Context);
validate_search(Context, <<"account">>=Type) ->
lager:debug("validating search on accounts"),
validate_search(cb_context:set_db_name(Context, ?KZ_ACCOUNTS_DB)
,Type
,cb_context:req_value(Context, <<"q">>)
);
validate_search(Context, Type) ->
validate_search(Context, Type, cb_context:req_value(Context, <<"q">>)).
-spec validate_search(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
validate_search(Context, _Type, 'undefined') ->
lager:debug("'q' required"),
NeedViewMsg = kz_json:from_list([{<<"message">>, <<"search needs a view to search in">>}
,{<<"target">>, available_query_options(cb_context:db_name(Context))}
]),
cb_context:add_validation_error(<<"q">>, <<"required">>, NeedViewMsg, Context);
validate_search(Context, Type, Query) ->
Context1 = validate_query(Context, Query),
case cb_context:resp_status(Context1) of
'success' ->
validate_search(Context, Type, Query, cb_context:req_value(Context, <<"v">>));
_Status ->
Context1
end.
-spec validate_search(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
validate_search(Context, _Type, _Query, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"search needs a value to search for">>}]),
cb_context:add_validation_error(<<"v">>, <<"required">>, Message, Context);
validate_search(Context, Type, Query, <<_/binary>> = Value) ->
search(Context, Type, Query, Value, []);
validate_search(Context, Type, Query, Value) ->
case kz_term:is_true(Value) of
'true' -> validate_search(Context, Type, Query, <<>>);
'false' -> validate_search(Context, Type, Query, 'undefined')
end.
-spec validate_multi(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
validate_multi(Context, 'undefined') ->
Message = kz_json:from_list([{<<"message">>, <<"Search needs a document type to search on">>}
,{<<"target">>, ?SEARCHABLE}
]),
cb_context:add_validation_error(<<"t">>, <<"required">>, Message, Context);
validate_multi(Context, <<"account">>=Type) ->
lager:debug("validating search on accounts"),
validate_multi(cb_context:set_db_name(Context, ?KZ_ACCOUNTS_DB)
,Type
,kz_json:to_proplist(cb_context:query_string(Context))
);
validate_multi(Context, Type) ->
validate_multi(Context, Type, kz_json:to_proplist(cb_context:query_string(Context))).
-spec validate_multi(cb_context:context(), kz_term:ne_binary(), kz_term:proplist()) -> cb_context:context().
validate_multi(Context, Type, Query) ->
Context1 = validate_query(Context, Query),
case cb_context:resp_status(Context1) of
'success' -> multi_search(Context1, Type, Query);
_Status -> Context1
end.
-spec validate_query(cb_context:context(), kz_term:proplist() | kz_term:ne_binary()) -> cb_context:context().
validate_query(Context, Query) ->
QueryOptions = available_query_options(cb_context:db_name(Context)),
validate_query(Context, QueryOptions, Query).
-spec validate_query(cb_context:context(), kz_term:proplist(), kz_term:proplist() | kz_term:ne_binary()) -> cb_context:context().
validate_query(Context, Available, []) ->
case cb_context:resp_status(Context) of
'success' -> Context;
_ ->
lager:debug("multisearch has ~p available", [Available]),
Message = kz_json:from_list([{<<"message">>, <<"multi search needs some values to search for">>}
,{<<"target">>, Available}
]),
cb_context:add_validation_error(<<"multi">>, <<"enum">>, Message, Context)
end;
validate_query(Context, Available, [{<<"by_", Query/binary>>, _}|Props]) ->
Context1 = validate_query(Context, Available, Query),
case cb_context:resp_status(Context1) of
'success' ->
lager:debug("query ~s is valid", [Query]),
validate_query(Context1, Available, Props);
_Status ->
lager:debug("query ~s is not valid", [Query]),
Context1
end;
validate_query(Context, Available, [{Query, _}|Props]) ->
lager:debug("ignoring query string ~s", [Query]),
validate_query(Context, Available, Props);
validate_query(Context, Available, Query) when is_binary(Query) ->
case lists:member(Query, Available) of
'true' -> cb_context:set_resp_status(Context, 'success');
'false' ->
lager:debug("query ~s not allowed", [Query]),
Message = kz_json:from_list([{<<"message">>, <<"value not found in enumerated list of values">>}
,{<<"cause">>, Query}
]),
cb_context:add_validation_error(<<"q">>, <<"enum">>, Message, Context)
end.
-spec available_query_options(kz_term:api_ne_binary()) -> kz_term:ne_binaries().
available_query_options(AccountDb) ->
case kz_datamgr:open_cache_doc(AccountDb, <<"_design/search">>) of
{'ok', JObj} ->
lager:debug("got ~s views from ~s", [kz_json:get_keys(<<"views">>, JObj), AccountDb]),
format_query_options(kz_json:get_keys(<<"views">>, JObj));
{'error', _E} when AccountDb =:= ?KZ_ACCOUNTS_DB ->
lager:debug("using default query options"),
?ACCOUNTS_QUERY_OPTIONS;
{'error', _E} ->
lager:debug("using default query options after error ~p", [_E]),
?ACCOUNT_QUERY_OPTIONS
end.
-spec format_query_options(kz_term:ne_binaries()) -> kz_term:ne_binaries().
format_query_options([]) ->
lager:debug("no query options found on design doc, using default"),
?ACCOUNTS_QUERY_OPTIONS;
format_query_options(Views) ->
[format_query_option(View) || View <- Views].
-spec format_query_option(kz_term:ne_binary()) -> kz_term:ne_binary().
format_query_option(<<"search_by_", Name/binary>>) -> Name;
format_query_option(Name) -> Name.
-spec search(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary(), binary(), kz_term:proplist()) -> cb_context:context().
search(Context, Type, Query, Val, Opts) ->
ViewName = <<?QUERY_TPL/binary, Query/binary>>,
Value = cb_modules_util:normalize_alphanum_name(Val),
Options =
[{'startkey', get_start_key(Context, Type, Value)}
,{'endkey', get_end_key(Context, Type, Value)}
,{'mapper', crossbar_view:get_value_fun()}
| Opts
],
crossbar_view:load(Context, ViewName, Options).
-spec multi_search(cb_context:context(), kz_term:ne_binary(), kz_term:proplist()) -> cb_context:context().
multi_search(Context, Type, Props) ->
Context1 = cb_context:set_should_paginate(Context, 'false'),
multi_search(Context1, Type, Props , kz_json:new()).
-spec multi_search(cb_context:context(), kz_term:ne_binary(), kz_term:proplist(), kz_json:object()) -> cb_context:context().
multi_search(Context, _Type, [], Acc) ->
cb_context:set_resp_data(Context, Acc);
multi_search(Context, Type, [{<<"by_", Query/binary>>, Val}|Props], Acc) ->
Context1 = search(Context, Type, Query, Val, [{'unchunkable', 'true'}]),
case cb_context:resp_status(Context1) of
'success' ->
RespData = cb_context:resp_data(Context1),
Acc1 = kz_json:set_value(Query, RespData, Acc),
multi_search(Context1, Type, Props, Acc1);
_ -> Context1
end;
multi_search(Context, Type, [_|Props], Acc) ->
multi_search(Context, Type, Props, Acc).
-spec get_start_key(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary()) -> kz_term:ne_binaries().
get_start_key(Context, <<"account">>=Type, Value) ->
[cb_context:auth_account_id(Context), Type, cb_context:req_value(Context, <<"start_key">>, Value)];
get_start_key(Context, Type, Value) ->
[Type, cb_context:req_value(Context, <<"start_key">>, Value)].
-spec get_end_key(cb_context:context(), kz_term:ne_binary(), binary()) -> kz_term:ne_binaries().
get_end_key(Context, <<"account">>=Type, Value) ->
[cb_context:auth_account_id(Context), Type, next_binary_key(Value)];
get_end_key(_, Type, Value) ->
[Type, next_binary_key(Value)].
-spec next_binary_key(binary()) -> kz_term:ne_binary().
next_binary_key(<<>>) ->
<<"Z">>;
next_binary_key(Bin) ->
<<Bin/binary, "Z">>.
|
a148db8dd10108ab9014c84a69c678e3fdc7b10fc004ab81beab571c55a63942 | nilenso/goose | client.clj | (ns goose.client
"Functions for executing job in async, scheduled or periodic manner."
(:require
[goose.broker :as b]
[goose.defaults :as d]
[goose.job :as j]
[goose.retry :as retry]
[goose.utils :as u])
(:import
(java.time Instant)))
(def default-opts
"Map of sample configs for producing jobs.
### Keys
`:broker` : Message broker that transfers message from Producer to Consumer.\\
Given value must implement [[goose.broker/Broker]] protocol.\\
[Message Broker wiki](-Brokers)
`:queue` : Destination where client produces to & worker consumes from.\\
Example : [[d/default-queue]]
`:retry-opts` : Configuration for handling Job failure.\\
Example : [[retry/default-opts]]\\
[Error Handling & Retries wiki](-Handling-&-Retries)"
{:queue d/default-queue
:retry-opts retry/default-opts})
(defn- register-cron-schedule
[{:keys [broker queue retry-opts] :as _opts}
cron-opts
execute-fn-sym
args]
(let [retry-opts (retry/prefix-queue-if-present retry-opts)
ready-queue (d/prefix-queue queue)
job-description (j/description execute-fn-sym args queue ready-queue retry-opts)
cron-entry (b/register-cron broker cron-opts job-description)]
(select-keys cron-entry [:cron-name :cron-schedule :timezone])))
(defn- enqueue
[{:keys [broker queue retry-opts]}
schedule-epoch-ms
execute-fn-sym
args]
(let [retry-opts (retry/prefix-queue-if-present retry-opts)
ready-queue (d/prefix-queue queue)
job (j/new execute-fn-sym args queue ready-queue retry-opts)]
(if schedule-epoch-ms
(b/schedule broker schedule-epoch-ms job)
(b/enqueue broker job))))
(defn perform-async
"Enqueues a function for async execution.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(perform-async client-opts `send-emails \"subject\" \"body\" [:user-1 :user-2])
```
- [Getting Started wiki](-Started)."
[opts execute-fn-sym & args]
(enqueue opts nil execute-fn-sym args))
(defn perform-at
"Schedules a function for execution at given date & time.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`^Instant instant` : `java.time.Instant` at which job should be executed.
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(let [instant (java.time.Instant/parse \"2022-10-31T18:46:09.00Z\")]
(perform-at client-opts instant `send-emails \"subject\" \"body\" [:user-1 :user-2]))
```
- [Scheduled Jobs wiki](-Jobs)"
[opts ^Instant instant execute-fn-sym & args]
(enqueue opts (u/epoch-time-ms instant) execute-fn-sym args))
(defn perform-in-sec
"Schedules a function for execution with a delay of given seconds.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`sec` : Delay of Job execution in seconds.
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(perform-in-sec default-opts 300 `send-emails \"subject\" \"body\" [:user-1 :user-2])
```
- [Scheduled Jobs wiki](-Jobs)"
[opts sec execute-fn-sym & args]
(enqueue opts (u/sec+current-epoch-ms sec) execute-fn-sym args))
(defn perform-every
"Registers a function for periodic execution in cron-jobs style.\\
`perform-every` is idempotent.\\
If a cron entry already exists with the same name, it will be overwritten with new data.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`cron-opts` : Map of `:cron-name`, `:cron-schedule`, `:timezone`
- `:cron-name` (Mandatory)
- Unique identifier of a periodic job
- `:cron-schedule` (Mandatory)
- Unix-style schedule
- `:timezone` (Optional)
- Timezone for executing the Job at schedule
- Acceptable timezones: `(java.time.ZoneId/getAvailableZoneIds)`
- Defaults to system timezone
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(let [cron-opts {:cron-name \"my-periodic-job\"
:cron-schedule \"0 10 15 * *\"
:timezone \"US/Pacific\"}]
(perform-every client-opts cron-opts `send-emails \"subject\" \"body\" [:user-1 :user-2]))
```
- [Periodic Jobs wiki](-Jobs)"
[opts cron-opts execute-fn-sym & args]
(register-cron-schedule opts cron-opts execute-fn-sym args))
| null | https://raw.githubusercontent.com/nilenso/goose/cf42be9a79f4d0a8c65a93d8cb850bdc666bccb2/src/goose/client.clj | clojure | (ns goose.client
"Functions for executing job in async, scheduled or periodic manner."
(:require
[goose.broker :as b]
[goose.defaults :as d]
[goose.job :as j]
[goose.retry :as retry]
[goose.utils :as u])
(:import
(java.time Instant)))
(def default-opts
"Map of sample configs for producing jobs.
### Keys
`:broker` : Message broker that transfers message from Producer to Consumer.\\
Given value must implement [[goose.broker/Broker]] protocol.\\
[Message Broker wiki](-Brokers)
`:queue` : Destination where client produces to & worker consumes from.\\
Example : [[d/default-queue]]
`:retry-opts` : Configuration for handling Job failure.\\
Example : [[retry/default-opts]]\\
[Error Handling & Retries wiki](-Handling-&-Retries)"
{:queue d/default-queue
:retry-opts retry/default-opts})
(defn- register-cron-schedule
[{:keys [broker queue retry-opts] :as _opts}
cron-opts
execute-fn-sym
args]
(let [retry-opts (retry/prefix-queue-if-present retry-opts)
ready-queue (d/prefix-queue queue)
job-description (j/description execute-fn-sym args queue ready-queue retry-opts)
cron-entry (b/register-cron broker cron-opts job-description)]
(select-keys cron-entry [:cron-name :cron-schedule :timezone])))
(defn- enqueue
[{:keys [broker queue retry-opts]}
schedule-epoch-ms
execute-fn-sym
args]
(let [retry-opts (retry/prefix-queue-if-present retry-opts)
ready-queue (d/prefix-queue queue)
job (j/new execute-fn-sym args queue ready-queue retry-opts)]
(if schedule-epoch-ms
(b/schedule broker schedule-epoch-ms job)
(b/enqueue broker job))))
(defn perform-async
"Enqueues a function for async execution.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(perform-async client-opts `send-emails \"subject\" \"body\" [:user-1 :user-2])
```
- [Getting Started wiki](-Started)."
[opts execute-fn-sym & args]
(enqueue opts nil execute-fn-sym args))
(defn perform-at
"Schedules a function for execution at given date & time.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`^Instant instant` : `java.time.Instant` at which job should be executed.
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(let [instant (java.time.Instant/parse \"2022-10-31T18:46:09.00Z\")]
(perform-at client-opts instant `send-emails \"subject\" \"body\" [:user-1 :user-2]))
```
- [Scheduled Jobs wiki](-Jobs)"
[opts ^Instant instant execute-fn-sym & args]
(enqueue opts (u/epoch-time-ms instant) execute-fn-sym args))
(defn perform-in-sec
"Schedules a function for execution with a delay of given seconds.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`sec` : Delay of Job execution in seconds.
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(perform-in-sec default-opts 300 `send-emails \"subject\" \"body\" [:user-1 :user-2])
```
- [Scheduled Jobs wiki](-Jobs)"
[opts sec execute-fn-sym & args]
(enqueue opts (u/sec+current-epoch-ms sec) execute-fn-sym args))
(defn perform-every
"Registers a function for periodic execution in cron-jobs style.\\
`perform-every` is idempotent.\\
If a cron entry already exists with the same name, it will be overwritten with new data.
### Args
`client-opts` : Map of `:broker`, `:queue` & `:retry-opts`.\\
Example : [[default-opts]]
`cron-opts` : Map of `:cron-name`, `:cron-schedule`, `:timezone`
- `:cron-name` (Mandatory)
- Unique identifier of a periodic job
- `:cron-schedule` (Mandatory)
- Unix-style schedule
- `:timezone` (Optional)
- Timezone for executing the Job at schedule
- Acceptable timezones: `(java.time.ZoneId/getAvailableZoneIds)`
- Defaults to system timezone
`execute-fn-sym` : A fully-qualified function symbol called by worker.\\
Example : ```my-fn`, ```ns-alias/my-fn`, `'fully-qualified-ns/my-fn`
`args` : Variadic values provided in given order when invoking `execute-fn-sym`.\\
Given values must be serializable by `ptaoussanis/nippy`.
### Usage
```Clojure
(let [cron-opts {:cron-name \"my-periodic-job\"
:cron-schedule \"0 10 15 * *\"
:timezone \"US/Pacific\"}]
(perform-every client-opts cron-opts `send-emails \"subject\" \"body\" [:user-1 :user-2]))
```
- [Periodic Jobs wiki](-Jobs)"
[opts cron-opts execute-fn-sym & args]
(register-cron-schedule opts cron-opts execute-fn-sym args))
| |
3be5dc3d1b66c0677789d075cabd6f648dbbc90296594ea558f61b26052c5cda | dyoo/ffi-tutorial | info.rkt | #lang setup/infotab
(define name "A simple f2c temperature conversion example")
(define pre-install-collection "pre-installer.rkt")
| null | https://raw.githubusercontent.com/dyoo/ffi-tutorial/1efba84503adf4a062c4855225eabe992a06a34f/ffi/tutorial/examples/f2c/info.rkt | racket | #lang setup/infotab
(define name "A simple f2c temperature conversion example")
(define pre-install-collection "pre-installer.rkt")
| |
cf41aa31706997999ee8b1ea3fedbfe1cf45a627673143d1cb79333bd7a6f2ce | clojure-interop/aws-api | project.clj | (defproject clojure-interop/com.amazonaws.services.servicecatalog "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.servicecatalog"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.servicecatalog/project.clj | clojure | (defproject clojure-interop/com.amazonaws.services.servicecatalog "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.servicecatalog"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
| |
65e9646ea7d661887a23590dd022c90469fcf6dbedf03891ed5739ff9eaa0cb3 | hellonico/origami-fun | warping.clj | gorilla-repl.fileformat = 1
;; **
;;; # Warping
;; **
;; @@
(ns talented-silence
(:require
[opencv4.core :refer :all]
[opencv4.video :as v]
[opencv4.colors.rgb :as rgb]
[opencv4.utils :as u]
[opencv4.gorilla :as g]))
;; @@
;; =>
;;; {"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
;; <=
;; **
# # # warping
;; **
;; @@
(defn my-fn[mat]
(-> mat
(put-text! (str (java.util.Date.)) (new-point 100 50) FONT_HERSHEY_PLAIN 1 rgb/white 1)
(apply-color-map! COLORMAP_AUTUMN)))
;; @@
;; =>
;;; {"type":"html","content":"<span class='clj-var'>#'talented-silence/my-fn</span>","value":"#'talented-silence/my-fn"}
;; <=
;; @@
(def mt
(atom nil))
;; @@
;; =>
;;; {"type":"html","content":"<span class='clj-var'>#'talented-silence/mt</span>","value":"#'talented-silence/mt"}
;; <=
;; @@
(def points1
[[100 10]
[200 100]
[28 200]
[389 390]])
(def points2
[[70 10]
[200 140]
[20 200]
[389 390]])
(reset! mt
(get-perspective-transform
(u/matrix-to-matofpoint2f points1)
(u/matrix-to-matofpoint2f points2)))
(dump @mt)
;; @@
;; ->
[ 1.789337561985906 0.3234215275201738 -94.5799621372129 ]
[ 0.7803091692375479 1.293303360247406 -78.45137776386103 ]
[ 0.002543030309135725 -3.045754676722361E-4 1 ]
;;;
;; <-
;; =>
;;; {"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
;; <=
;; @@
(defn warp! [ buffer ]
(-> buffer
(warp-perspective! @mt (size buffer ))))
;; @@
;; =>
;;; {"type":"html","content":"<span class='clj-var'>#'talented-silence/warp!</span>","value":"#'talented-silence/warp!"}
;; <=
;; @@
(-> "resources/chapter03/ai5.jpg"
imread
(u/resize-by 0.7)
warp!
u/imshow)
;; @@
;; ->
{ : frame { : color 00 , : title image , : width 400 , : height 400 } }
;;;
;; <-
;; =>
{ " type":"html","content":"<span class='clj - unkown'>#object[javax.swing . JPanel 0x77260939 & quot;javax.swing . JPanel[null.contentPane,0,0,400x378,layout = java.awt . FlowLayout , alignmentX=0.0,alignmentY=0.0,border=,flags=16777225,maximumSize=,minimumSize=,preferredSize = java.awt . Dimension[width=400,height=400]]"]</span>","value":"#object[javax.swing . . JPanel[null.contentPane,0,0,400x378,layout = java.awt . FlowLayout , alignmentX=0.0,alignmentY=0.0,border=,flags=16777227,maximumSize=,minimumSize=,preferredSize = java.awt . Dimension[width=400,height=400]]\ " ] " }
;; <=
;; @@
(u/simple-cam-window warp!)
;; @@
;; ->
{ : frame { : color 00 , : title video , : width 400 , : height 400 } , : video { : device 0 , : width 200 , : height 220 } }
;;;
;; <-
;; =>
;;; {"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
;; <=
;; @@
;; @@
| null | https://raw.githubusercontent.com/hellonico/origami-fun/80117788530d942eaa9a80e2995b37409fa24889/gorillas/chapter04/warping.clj | clojure | **
# Warping
**
@@
@@
=>
{"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
<=
**
**
@@
@@
=>
{"type":"html","content":"<span class='clj-var'>#'talented-silence/my-fn</span>","value":"#'talented-silence/my-fn"}
<=
@@
@@
=>
{"type":"html","content":"<span class='clj-var'>#'talented-silence/mt</span>","value":"#'talented-silence/mt"}
<=
@@
@@
->
<-
=>
{"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
<=
@@
@@
=>
{"type":"html","content":"<span class='clj-var'>#'talented-silence/warp!</span>","value":"#'talented-silence/warp!"}
<=
@@
@@
->
<-
=>
<=
@@
@@
->
<-
=>
{"type":"html","content":"<span class='clj-nil'>nil</span>","value":"nil"}
<=
@@
@@ | gorilla-repl.fileformat = 1
(ns talented-silence
(:require
[opencv4.core :refer :all]
[opencv4.video :as v]
[opencv4.colors.rgb :as rgb]
[opencv4.utils :as u]
[opencv4.gorilla :as g]))
# # # warping
(defn my-fn[mat]
(-> mat
(put-text! (str (java.util.Date.)) (new-point 100 50) FONT_HERSHEY_PLAIN 1 rgb/white 1)
(apply-color-map! COLORMAP_AUTUMN)))
(def mt
(atom nil))
(def points1
[[100 10]
[200 100]
[28 200]
[389 390]])
(def points2
[[70 10]
[200 140]
[20 200]
[389 390]])
(reset! mt
(get-perspective-transform
(u/matrix-to-matofpoint2f points1)
(u/matrix-to-matofpoint2f points2)))
(dump @mt)
[ 1.789337561985906 0.3234215275201738 -94.5799621372129 ]
[ 0.7803091692375479 1.293303360247406 -78.45137776386103 ]
[ 0.002543030309135725 -3.045754676722361E-4 1 ]
(defn warp! [ buffer ]
(-> buffer
(warp-perspective! @mt (size buffer ))))
(-> "resources/chapter03/ai5.jpg"
imread
(u/resize-by 0.7)
warp!
u/imshow)
{ : frame { : color 00 , : title image , : width 400 , : height 400 } }
{ " type":"html","content":"<span class='clj - unkown'>#object[javax.swing . JPanel 0x77260939 & quot;javax.swing . JPanel[null.contentPane,0,0,400x378,layout = java.awt . FlowLayout , alignmentX=0.0,alignmentY=0.0,border=,flags=16777225,maximumSize=,minimumSize=,preferredSize = java.awt . Dimension[width=400,height=400]]"]</span>","value":"#object[javax.swing . . JPanel[null.contentPane,0,0,400x378,layout = java.awt . FlowLayout , alignmentX=0.0,alignmentY=0.0,border=,flags=16777227,maximumSize=,minimumSize=,preferredSize = java.awt . Dimension[width=400,height=400]]\ " ] " }
(u/simple-cam-window warp!)
{ : frame { : color 00 , : title video , : width 400 , : height 400 } , : video { : device 0 , : width 200 , : height 220 } }
|
ba0d40f5e79c933e4b3a78eadf3d81788b97ad57dee58182ae143f27c227b2f2 | emillon/ocaml-zeit | test_client.ml | open OUnit2
let cases = ( >::: )
module Cohttp_mock = struct
let call mock meth headers uri ~body =
Mock.call mock (meth, headers, uri, body)
let configure mock ~status ~body =
Mock.configure mock
(Mock.return (Lwt.return (Cohttp.Response.make ~status (), body)))
let equal_meth a b =
String.equal
(Cohttp.Code.string_of_method a)
(Cohttp.Code.string_of_method b)
let pp_meth fmt m =
Format.pp_print_string fmt (Cohttp.Code.string_of_method m)
let equal_cohttp_header a b =
[%eq: (string * string) list] (Cohttp.Header.to_list a)
(Cohttp.Header.to_list b)
let pp_cohttp_header fmt h =
Format.pp_print_string fmt (Cohttp.Header.to_string h)
let pp_uri fmt u = Format.pp_print_string fmt (Uri.to_string u)
let assert_called_once_with ~ctxt ~expected_meth ~expected_headers
~expected_uri ~expected_body mock =
let expected_args =
(expected_meth, expected_headers, expected_uri, expected_body)
in
Mock_ounit.assert_called_once_with ~ctxt
~cmp:[%eq: meth * cohttp_header * Uri.t * string]
~printer:[%show: meth * cohttp_header * uri * string] expected_args mock
end
let case_lwt s l = s >:: fun ctxt -> Lwt_main.run (l ctxt)
let with_client k ~ctxt ~status ~body ~expected_meth ~expected_extra_headers
~expected_uri ~expected_body =
let token = "TOKEN" in
let expected_headers =
Cohttp.Header.of_list
(("Authorization", "Bearer " ^ token) :: expected_extra_headers)
in
let mock = Mock.make ~name:"cohttp_call" in
Cohttp_mock.configure mock ~status ~body;
let cohttp_call = Cohttp_mock.call mock in
let client = Zeit.Client.make ~cohttp_call ~token () in
let result = k client in
Cohttp_mock.assert_called_once_with ~ctxt ~expected_meth ~expected_headers
~expected_uri:(Uri.of_string expected_uri)
~expected_body mock;
result
let test_list_deployments =
let test ?(status = `OK) ?(body = "") ~expected () ctxt =
let body = Cohttp_lwt.Body.of_string body in
with_client ~ctxt ~status ~body ~expected_meth:`GET
~expected_extra_headers:[] ~expected_body:""
~expected_uri:"" (fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.list_deployments client in
assert_equal ~ctxt
~cmp:[%eq: (Zeit.Deployment.t list, Zeit.Error.t) result]
~printer:[%show: (Zeit.Deployment.t list, Zeit.Error.t) result]
expected got )
in
cases "list_deployments"
[ case_lwt "HTTP error"
(test ~status:`Unauthorized ~expected:(Error Http_error) ())
; case_lwt "JSON error" (test ~expected:(Error Json_error) ())
; case_lwt "Deserialization error"
(test ~body:"{}" ~expected:(Error Deserialization_error) ())
; case_lwt "OK" (test ~body:"{\"deployments\":[]}" ~expected:(Ok []) ())
]
let test_post_file =
let test ~contents ~expected_size ~expected_sha1 ~expected ctxt =
let expected_extra_headers =
[ ("Content-Type", "application/octet-stream")
; ("Content-Length", expected_size)
; ("x-now-digest", expected_sha1)
; ("x-now-size", expected_size) ]
in
let body = Cohttp_lwt.Body.empty in
with_client ~ctxt ~body ~expected_meth:`POST ~expected_extra_headers
~expected_uri:"" ~expected_body:contents
(fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.post_file client contents in
assert_equal ~ctxt ~cmp:[%eq: (string, Zeit.Error.t) result]
~printer:[%show: (string, Zeit.Error.t) result] expected got )
in
let contents = "hello" in
let contents_sha1 = "aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d" in
let expected_size = "5" in
let expected_sha1 = contents_sha1 in
cases "post_file"
[ case_lwt "OK"
(test ~contents ~status:`OK ~expected_size ~expected_sha1
~expected:(Ok contents_sha1))
; case_lwt "HTTP error"
(test ~contents ~status:`Unauthorized ~expected_size ~expected_sha1
~expected:(Error Http_error)) ]
let test_create_deployment =
let test ~name ~files ~body ~expected ~expected_body_json ctxt =
let expected_body = Yojson.Safe.to_string expected_body_json in
with_client ~ctxt ~status:`OK ~body
(fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.create_deployment client ~name ~files in
assert_equal ~ctxt
~cmp:
[%eq:
( Zeit.Deployment.Api_responses.create_result
, Zeit.Error.t )
result]
~printer:
[%show:
( Zeit.Deployment.Api_responses.create_result
, Zeit.Error.t )
result] expected got )
~expected_meth:`POST ~expected_extra_headers:[]
~expected_uri:"" ~expected_body
in
let name = "my-instant-deployment" in
let file_name = "index.html" in
let file_sha = "9d8b952309b28f468919f4a585e18b63a14457f2" in
let file_size = 161 in
let file = (file_name, file_sha, file_size) in
let deploymentId = "ID" in
let url = "URL" in
let readyState = "READYSTATE" in
let create_result =
{Zeit.Deployment.Api_responses.deploymentId; url; readyState}
in
let create_result_body =
Cohttp_lwt.Body.of_string
@@ Yojson.Safe.to_string
@@ `Assoc
[ ("deploymentId", `String deploymentId)
; ("url", `String url)
; ("readyState", `String readyState) ]
in
cases "create_deployment"
[ case_lwt "OK"
(test ~name ~files:[file] ~body:create_result_body
~expected:(Ok create_result)
~expected_body_json:
(`Assoc
[ ("name", `String name)
; ("public", `Bool true)
; ("deploymentType", `String "STATIC")
; ( "files"
, `List
[ `Assoc
[ ("file", `String file_name)
; ("sha", `String file_sha)
; ("size", `Int file_size) ] ] ) ])) ]
let suite =
cases "client"
[test_list_deployments; test_post_file; test_create_deployment]
| null | https://raw.githubusercontent.com/emillon/ocaml-zeit/cdcdd0b155d406d1b8c8947e3c620527c3c9ecf7/test/test_client.ml | ocaml | open OUnit2
let cases = ( >::: )
module Cohttp_mock = struct
let call mock meth headers uri ~body =
Mock.call mock (meth, headers, uri, body)
let configure mock ~status ~body =
Mock.configure mock
(Mock.return (Lwt.return (Cohttp.Response.make ~status (), body)))
let equal_meth a b =
String.equal
(Cohttp.Code.string_of_method a)
(Cohttp.Code.string_of_method b)
let pp_meth fmt m =
Format.pp_print_string fmt (Cohttp.Code.string_of_method m)
let equal_cohttp_header a b =
[%eq: (string * string) list] (Cohttp.Header.to_list a)
(Cohttp.Header.to_list b)
let pp_cohttp_header fmt h =
Format.pp_print_string fmt (Cohttp.Header.to_string h)
let pp_uri fmt u = Format.pp_print_string fmt (Uri.to_string u)
let assert_called_once_with ~ctxt ~expected_meth ~expected_headers
~expected_uri ~expected_body mock =
let expected_args =
(expected_meth, expected_headers, expected_uri, expected_body)
in
Mock_ounit.assert_called_once_with ~ctxt
~cmp:[%eq: meth * cohttp_header * Uri.t * string]
~printer:[%show: meth * cohttp_header * uri * string] expected_args mock
end
let case_lwt s l = s >:: fun ctxt -> Lwt_main.run (l ctxt)
let with_client k ~ctxt ~status ~body ~expected_meth ~expected_extra_headers
~expected_uri ~expected_body =
let token = "TOKEN" in
let expected_headers =
Cohttp.Header.of_list
(("Authorization", "Bearer " ^ token) :: expected_extra_headers)
in
let mock = Mock.make ~name:"cohttp_call" in
Cohttp_mock.configure mock ~status ~body;
let cohttp_call = Cohttp_mock.call mock in
let client = Zeit.Client.make ~cohttp_call ~token () in
let result = k client in
Cohttp_mock.assert_called_once_with ~ctxt ~expected_meth ~expected_headers
~expected_uri:(Uri.of_string expected_uri)
~expected_body mock;
result
let test_list_deployments =
let test ?(status = `OK) ?(body = "") ~expected () ctxt =
let body = Cohttp_lwt.Body.of_string body in
with_client ~ctxt ~status ~body ~expected_meth:`GET
~expected_extra_headers:[] ~expected_body:""
~expected_uri:"" (fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.list_deployments client in
assert_equal ~ctxt
~cmp:[%eq: (Zeit.Deployment.t list, Zeit.Error.t) result]
~printer:[%show: (Zeit.Deployment.t list, Zeit.Error.t) result]
expected got )
in
cases "list_deployments"
[ case_lwt "HTTP error"
(test ~status:`Unauthorized ~expected:(Error Http_error) ())
; case_lwt "JSON error" (test ~expected:(Error Json_error) ())
; case_lwt "Deserialization error"
(test ~body:"{}" ~expected:(Error Deserialization_error) ())
; case_lwt "OK" (test ~body:"{\"deployments\":[]}" ~expected:(Ok []) ())
]
let test_post_file =
let test ~contents ~expected_size ~expected_sha1 ~expected ctxt =
let expected_extra_headers =
[ ("Content-Type", "application/octet-stream")
; ("Content-Length", expected_size)
; ("x-now-digest", expected_sha1)
; ("x-now-size", expected_size) ]
in
let body = Cohttp_lwt.Body.empty in
with_client ~ctxt ~body ~expected_meth:`POST ~expected_extra_headers
~expected_uri:"" ~expected_body:contents
(fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.post_file client contents in
assert_equal ~ctxt ~cmp:[%eq: (string, Zeit.Error.t) result]
~printer:[%show: (string, Zeit.Error.t) result] expected got )
in
let contents = "hello" in
let contents_sha1 = "aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d" in
let expected_size = "5" in
let expected_sha1 = contents_sha1 in
cases "post_file"
[ case_lwt "OK"
(test ~contents ~status:`OK ~expected_size ~expected_sha1
~expected:(Ok contents_sha1))
; case_lwt "HTTP error"
(test ~contents ~status:`Unauthorized ~expected_size ~expected_sha1
~expected:(Error Http_error)) ]
let test_create_deployment =
let test ~name ~files ~body ~expected ~expected_body_json ctxt =
let expected_body = Yojson.Safe.to_string expected_body_json in
with_client ~ctxt ~status:`OK ~body
(fun client ->
let open Zeit.Let.Lwt in
let%map got = Zeit.Client.create_deployment client ~name ~files in
assert_equal ~ctxt
~cmp:
[%eq:
( Zeit.Deployment.Api_responses.create_result
, Zeit.Error.t )
result]
~printer:
[%show:
( Zeit.Deployment.Api_responses.create_result
, Zeit.Error.t )
result] expected got )
~expected_meth:`POST ~expected_extra_headers:[]
~expected_uri:"" ~expected_body
in
let name = "my-instant-deployment" in
let file_name = "index.html" in
let file_sha = "9d8b952309b28f468919f4a585e18b63a14457f2" in
let file_size = 161 in
let file = (file_name, file_sha, file_size) in
let deploymentId = "ID" in
let url = "URL" in
let readyState = "READYSTATE" in
let create_result =
{Zeit.Deployment.Api_responses.deploymentId; url; readyState}
in
let create_result_body =
Cohttp_lwt.Body.of_string
@@ Yojson.Safe.to_string
@@ `Assoc
[ ("deploymentId", `String deploymentId)
; ("url", `String url)
; ("readyState", `String readyState) ]
in
cases "create_deployment"
[ case_lwt "OK"
(test ~name ~files:[file] ~body:create_result_body
~expected:(Ok create_result)
~expected_body_json:
(`Assoc
[ ("name", `String name)
; ("public", `Bool true)
; ("deploymentType", `String "STATIC")
; ( "files"
, `List
[ `Assoc
[ ("file", `String file_name)
; ("sha", `String file_sha)
; ("size", `Int file_size) ] ] ) ])) ]
let suite =
cases "client"
[test_list_deployments; test_post_file; test_create_deployment]
| |
f00d4461dc835bf0e82da2d1bd42712a89b2edc24cadd0a4b7d7b306dd048b1b | aistrate/Okasaki | RedBlackSet_Test.hs | import Test.HUnit
import Test.QuickCheck
import Data.List (sort, nub, group)
import Text.Printf (printf)
import TestHelper
import RedBlackSet -- 6.46s
--import Ex03_10a -- 6.27s
6.07s
main = do printTime $ runTestTT hunitTests
printTime $ mapM_ (\(s,a) -> printf "%-25s: " s >> a) qcheckTests
hunitTests = TestList [
"fromList/sorted input" ~:
[
testSet "" ~? "empty list",
testSet ['a'..'z'] ~? "ascending",
testSet ['z','y'..'a'] ~? "descending",
testSet (['a'..'z'] ++ ['z','y'..'a']) ~? "combined asc/desc",
testSet (['z','y'..'a'] ++ ['a'..'z']) ~? "combined desc/asc",
testSet (replicate 100 'a') ~? "constant",
testSet ([1,5..30000] ++ reverse [2,6..30000] ++
[3,7..30000] ++ reverse [4,8..30000]) ~? "large"
] ]
qcheckTests = [
("fromList/properties", qcheck (testSet::[Int] -> Bool)) ]
quickCheck
verboseCheck
check $ defaultConfig { configMaxTest = 500 }
testSet xs = let expected = nub $ sort xs
in testSet' (fromList xs) expected
testSet' :: Ord a => RedBlackSet a -> [a] -> Bool
testSet' t xs = isCorrect t && toSortedList t == xs
isCorrect :: Ord a => RedBlackSet a -> Bool
isCorrect t = invariant1 t && invariant2 t
-- "No red node has a red child"
invariant1 E = True
invariant1 (T c a _ b) = not (c == R && (color a == R || color b == R)) &&
invariant1 a && invariant1 b
-- "Every path from the root to an empty node
-- contains the same number of black nodes"
invariant2 t = let blacksPerPath = map (length . filter (== B) . map fst) $
allPaths t
in length (group $ sort blacksPerPath) <= 1
allPaths E = []
allPaths (T c E x E) = [[(c, x)]]
allPaths (T c a x b) = map ((c, x) :) (allPaths a ++ allPaths b)
| null | https://raw.githubusercontent.com/aistrate/Okasaki/cc1473c81d053483bb5e327409346da7fda10fb4/MyCode/Ch03/RedBlackSet_Test.hs | haskell | 6.46s
import Ex03_10a -- 6.27s
"No red node has a red child"
"Every path from the root to an empty node
contains the same number of black nodes" | import Test.HUnit
import Test.QuickCheck
import Data.List (sort, nub, group)
import Text.Printf (printf)
import TestHelper
6.07s
main = do printTime $ runTestTT hunitTests
printTime $ mapM_ (\(s,a) -> printf "%-25s: " s >> a) qcheckTests
hunitTests = TestList [
"fromList/sorted input" ~:
[
testSet "" ~? "empty list",
testSet ['a'..'z'] ~? "ascending",
testSet ['z','y'..'a'] ~? "descending",
testSet (['a'..'z'] ++ ['z','y'..'a']) ~? "combined asc/desc",
testSet (['z','y'..'a'] ++ ['a'..'z']) ~? "combined desc/asc",
testSet (replicate 100 'a') ~? "constant",
testSet ([1,5..30000] ++ reverse [2,6..30000] ++
[3,7..30000] ++ reverse [4,8..30000]) ~? "large"
] ]
qcheckTests = [
("fromList/properties", qcheck (testSet::[Int] -> Bool)) ]
quickCheck
verboseCheck
check $ defaultConfig { configMaxTest = 500 }
testSet xs = let expected = nub $ sort xs
in testSet' (fromList xs) expected
testSet' :: Ord a => RedBlackSet a -> [a] -> Bool
testSet' t xs = isCorrect t && toSortedList t == xs
isCorrect :: Ord a => RedBlackSet a -> Bool
isCorrect t = invariant1 t && invariant2 t
invariant1 E = True
invariant1 (T c a _ b) = not (c == R && (color a == R || color b == R)) &&
invariant1 a && invariant1 b
invariant2 t = let blacksPerPath = map (length . filter (== B) . map fst) $
allPaths t
in length (group $ sort blacksPerPath) <= 1
allPaths E = []
allPaths (T c E x E) = [[(c, x)]]
allPaths (T c a x b) = map ((c, x) :) (allPaths a ++ allPaths b)
|
8aac6e5cf6a77dd0ef18c5e17d42fba5d26c7a62c8278165fcbc784f780725ec | VERIMAG-Polyhedra/VPL | Var.ml | type t =
* Last bit : represents value 1 . The end of a path .
* Bit 0 . Take the left branch .
* Bit 1 . Take the right branch .
let toInt: t -> int
= fun p0 ->
let inc i =
let msb = Stdlib.max_int - (Stdlib.max_int lsr 1) in
if i = msb then (* overflow *)
Stdlib.invalid_arg "Var.toInt"
else
i lsl 1
in
let rec fn i msb = function
| XH -> msb + i
| XO p -> fn i (inc msb) p
| XI p -> fn (msb + i) (inc msb) p
in
fn 0 1 p0
let fromInt: int -> t
= fun i0 ->
let rec _fromInt i =
let iMasked = i land 1 in
if i = iMasked then
XH
else
let tail = _fromInt (i lsr 1) in
if iMasked = 1 then
XI tail
else
XO tail
in
if i0 > 0 then
_fromInt i0
else
Stdlib.invalid_arg "Var.fromInt"
let to_string': string -> t -> string
= fun s p -> s ^ (Stdlib.string_of_int (toInt p))
let to_string : t -> string
= to_string' "v"
let plp_print : t -> string
= to_string' ""
let next (bp: t): t =
let rec next_rec (bp': t): t * bool =
match bp' with
XH -> (XH, true)
| XI tail -> (
match next_rec tail with
(tail', true) -> (XO tail', true)
| (tail', false) -> (XI tail', false))
| XO tail -> (
match next_rec tail with
(tail', true) -> (XI tail', false)
| (tail', false) -> (XO tail', false))
in
let (res, overflow) = next_rec bp in
if overflow then XO res else res
let cmp: t -> t -> int
= fun x00 x01 ->
let rec _cmp x0 x1 dec res =
match x0, x1 with
| XO tl0, XO tl1 -> _cmp tl0 tl1 dec res
| XI tl0, XI tl1 -> _cmp tl0 tl1 dec res
| XI tl0, XO tl1 -> _cmp tl0 tl1 true (if dec then res else 1)
| XO tl0, XI tl1 -> _cmp tl0 tl1 true (if dec then res else -1)
| XH, XO _ | XH, XI _ -> -1
| XO _, XH | XI _, XH -> 1
| XH, XH -> res
in
_cmp x00 x01 false 0
let equal x y = cmp x y = 0
module Set
= Set.Make (struct type varT = t type t = varT let compare = cmp end)
let horizon : Set.t -> t
= fun s -> if Set.cardinal s < 1 then XH else next (Set.max_elt s)
let fromLeft : t -> t
= fun x -> XO x
let fromRight : t -> t
= fun x -> XI x
let u = XH
let toPos x = x
let fromPos x = x
let of_string : string -> t
= fun s ->
int_of_string s
|> fromInt
let of_prefixed_string : string -> t
= fun s ->
let s' = String.sub s 1 (String.length s - 1) in
try of_string s'
with Failure _ ->
let e = Printf.sprintf "SxPoly.VariablesInt.of_prefixed_string: s = %s; s' = %s" s s' in
Stdlib.invalid_arg e
let max : t list -> t
= fun l ->
Misc.max cmp l
| null | https://raw.githubusercontent.com/VERIMAG-Polyhedra/VPL/cd78d6e7d120508fd5a694bdb01300477e5646f8/ocaml/datatypes/Var.ml | ocaml | overflow | type t =
* Last bit : represents value 1 . The end of a path .
* Bit 0 . Take the left branch .
* Bit 1 . Take the right branch .
let toInt: t -> int
= fun p0 ->
let inc i =
let msb = Stdlib.max_int - (Stdlib.max_int lsr 1) in
Stdlib.invalid_arg "Var.toInt"
else
i lsl 1
in
let rec fn i msb = function
| XH -> msb + i
| XO p -> fn i (inc msb) p
| XI p -> fn (msb + i) (inc msb) p
in
fn 0 1 p0
let fromInt: int -> t
= fun i0 ->
let rec _fromInt i =
let iMasked = i land 1 in
if i = iMasked then
XH
else
let tail = _fromInt (i lsr 1) in
if iMasked = 1 then
XI tail
else
XO tail
in
if i0 > 0 then
_fromInt i0
else
Stdlib.invalid_arg "Var.fromInt"
let to_string': string -> t -> string
= fun s p -> s ^ (Stdlib.string_of_int (toInt p))
let to_string : t -> string
= to_string' "v"
let plp_print : t -> string
= to_string' ""
let next (bp: t): t =
let rec next_rec (bp': t): t * bool =
match bp' with
XH -> (XH, true)
| XI tail -> (
match next_rec tail with
(tail', true) -> (XO tail', true)
| (tail', false) -> (XI tail', false))
| XO tail -> (
match next_rec tail with
(tail', true) -> (XI tail', false)
| (tail', false) -> (XO tail', false))
in
let (res, overflow) = next_rec bp in
if overflow then XO res else res
let cmp: t -> t -> int
= fun x00 x01 ->
let rec _cmp x0 x1 dec res =
match x0, x1 with
| XO tl0, XO tl1 -> _cmp tl0 tl1 dec res
| XI tl0, XI tl1 -> _cmp tl0 tl1 dec res
| XI tl0, XO tl1 -> _cmp tl0 tl1 true (if dec then res else 1)
| XO tl0, XI tl1 -> _cmp tl0 tl1 true (if dec then res else -1)
| XH, XO _ | XH, XI _ -> -1
| XO _, XH | XI _, XH -> 1
| XH, XH -> res
in
_cmp x00 x01 false 0
let equal x y = cmp x y = 0
module Set
= Set.Make (struct type varT = t type t = varT let compare = cmp end)
let horizon : Set.t -> t
= fun s -> if Set.cardinal s < 1 then XH else next (Set.max_elt s)
let fromLeft : t -> t
= fun x -> XO x
let fromRight : t -> t
= fun x -> XI x
let u = XH
let toPos x = x
let fromPos x = x
let of_string : string -> t
= fun s ->
int_of_string s
|> fromInt
let of_prefixed_string : string -> t
= fun s ->
let s' = String.sub s 1 (String.length s - 1) in
try of_string s'
with Failure _ ->
let e = Printf.sprintf "SxPoly.VariablesInt.of_prefixed_string: s = %s; s' = %s" s s' in
Stdlib.invalid_arg e
let max : t list -> t
= fun l ->
Misc.max cmp l
|
030be4fd3880c63e02611ee2370e60e550f98499705f6dc582fdbd5c59738c55 | INRIA/zelus | lident.ml | (***********************************************************************)
(* *)
(* *)
(* Zelus, a synchronous language for hybrid systems *)
(* *)
( c ) 2020 Paris ( see the file )
(* *)
(* Copyright Institut National de Recherche en Informatique et en *)
Automatique . All rights reserved . This file is distributed under
the terms of the INRIA Non - Commercial License Agreement ( see the
(* LICENSE file). *)
(* *)
(* *********************************************************************)
(* long identifiers *)
type t =
| Name of string
| Modname of qualident
and qualident = { qual: string; id: string }
let qualidname { qual = m; id = id } = m ^ "." ^ id
let modname = function
| Name(n) -> n
| Modname(qualid) -> qualidname qualid
let source = function
| Name(n) -> n
| Modname(qualid) -> qualid.id
let fprint_t ff id = Format.fprintf ff "%s" (modname id)
let compare = compare
| null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/compiler/global/lident.ml | ocaml | *********************************************************************
Zelus, a synchronous language for hybrid systems
Copyright Institut National de Recherche en Informatique et en
LICENSE file).
********************************************************************
long identifiers | ( c ) 2020 Paris ( see the file )
Automatique . All rights reserved . This file is distributed under
the terms of the INRIA Non - Commercial License Agreement ( see the
type t =
| Name of string
| Modname of qualident
and qualident = { qual: string; id: string }
let qualidname { qual = m; id = id } = m ^ "." ^ id
let modname = function
| Name(n) -> n
| Modname(qualid) -> qualidname qualid
let source = function
| Name(n) -> n
| Modname(qualid) -> qualid.id
let fprint_t ff id = Format.fprintf ff "%s" (modname id)
let compare = compare
|
37dc49c9340083554613b63bd7d6ab6148b7626261151ae0873b299776c18504 | w3ntao/sicp-solution | 2-36.rkt | #lang racket
(define nil '())
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op initial (cdr sequence)))))
(define (accumulate-n op init seqs)
(if (null? (car seqs))
nil
(cons (accumulate op
init
(get-first-row seqs))
(accumulate-n op init (get-remain-row seqs)))))
(define (get-first-row seq)
(if (null? seq)
nil
(cons (car (car seq))
(get-first-row (cdr seq)))))
(define (get-remain-row seq)
(if (null? seq)
nil
(cons (cdr (car seq))
(get-remain-row (cdr seq)))))
(define test-case (list (list 1 2 3) (list 4 5 6) (list 7 8 9) (list 10 11 12)))
;(get-first-row test-case)
;(get-remain-row test-case)
(accumulate-n + 0 test-case)
| null | https://raw.githubusercontent.com/w3ntao/sicp-solution/00be3a7b4da50bb266f8a2db521a24e9f8c156be/chap-2/2-36.rkt | racket | (get-first-row test-case)
(get-remain-row test-case) | #lang racket
(define nil '())
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op initial (cdr sequence)))))
(define (accumulate-n op init seqs)
(if (null? (car seqs))
nil
(cons (accumulate op
init
(get-first-row seqs))
(accumulate-n op init (get-remain-row seqs)))))
(define (get-first-row seq)
(if (null? seq)
nil
(cons (car (car seq))
(get-first-row (cdr seq)))))
(define (get-remain-row seq)
(if (null? seq)
nil
(cons (cdr (car seq))
(get-remain-row (cdr seq)))))
(define test-case (list (list 1 2 3) (list 4 5 6) (list 7 8 9) (list 10 11 12)))
(accumulate-n + 0 test-case)
|
9af5d851a57b0b6119f5c8b5048af8174804a35ae5c962d88dd7420af5072bfa | avsm/platform | test.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
Tests the properties against the XML Unicode character database .
let str = Format.asprintf
let exec = Filename.basename Sys.executable_name
let log fmt = Format.eprintf (fmt ^^ "%!")
let uchar_dump ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
UCD loading and access
let load_ucd inf =
try
log "Loading Unicode character database.@\n";
let inf = match inf with None -> "support/ucd.xml" | Some inf -> inf in
let ic = if inf = "-" then stdin else open_in inf in
let d = Uucd.decoder (`Channel ic) in
match Uucd.decode d with
| `Ok db -> db
| `Error e ->
let (l0, c0), (l1, c1) = Uucd.decoded_range d in
log "%s:%d.%d-%d.%d: %s@\n" inf l0 c0 l1 c1 e;
exit 1
with Sys_error e -> log "%s@\n" e; exit 1
let ucd_get p ucd u = match Uucd.cp_prop ucd (Uchar.to_int u) p with
| None -> invalid_arg (str "no property for %a" uchar_dump u)
| Some v -> v
Assert properties
let prop ucd mname fname ucd_get prop =
let do_assert u =
if ucd_get ucd u = prop u then () else
failwith (str "assertion failure on %a" uchar_dump u)
in
log "Asserting %s.%s@\n" mname fname;
for u = 0 to 0xD7FF do do_assert (Uchar.of_int u) done;
for u = 0xE000 to 0x10FFFF do do_assert (Uchar.of_int u) done;
()
Assert modules
let assert_age ucd =
let prop fname ucd_p p = prop ucd "Uucp.Age" fname (ucd_get ucd_p) p in
prop "age" Uucd.age Uucp.Age.age;
()
let assert_alpha ucd =
let prop fname ucd_p p = prop ucd "Uucd.Alpha" fname (ucd_get ucd_p) p in
prop "is_alphabetic" Uucd.alphabetic Uucp.Alpha.is_alphabetic;
()
let assert_block ucd =
let block_prop ucd u = match ucd_get Uucd.block ucd u with
| `High_Surrogates -> assert false
| `Low_Surrogates -> assert false
| `High_PU_Surrogates -> assert false
| #Uucp.Block.t as b ->
try
(* Test Uucp.Block.blocks at the same time *)
let (is, ie) = List.assoc b Uucp.Block.blocks in
if u < is || u > ie then assert false else
b
with Not_found -> assert (b = `NB); b
in
prop ucd "Uucd.Block" "block" block_prop Uucp.Block.block;
()
let assert_break ucd =
let prop fname ucd_p p = prop ucd "Uucp.Break" fname (ucd_get ucd_p) p in
prop "line" Uucd.line_break Uucp.Break.line;
prop "grapheme_cluster" Uucd.grapheme_cluster_break
Uucp.Break.grapheme_cluster;
prop "word" Uucd.word_break Uucp.Break.word;
prop "sentence" Uucd.sentence_break Uucp.Break.sentence;
()
let assert_case ucd =
let map fname ucd_p p =
let assert_map ucd u = match ucd_get ucd_p ucd u with
| `Self -> `Self
| `Cps cps -> `Uchars (List.map Uchar.of_int cps)
in
prop ucd "Uucd.Case" fname assert_map p
in
let prop fname ucd_p p = prop ucd "Uucd.Case" fname (ucd_get ucd_p) p in
prop "is_upper" Uucd.uppercase Uucp.Case.is_upper;
prop "is_lower" Uucd.lowercase Uucp.Case.is_lower;
prop "is_cased" Uucd.cased Uucp.Case.is_cased;
prop "is_case_ignorable" Uucd.case_ignorable Uucp.Case.is_case_ignorable;
map "Map.to_upper" Uucd.uppercase_mapping Uucp.Case.Map.to_upper;
map "Map.to_lower" Uucd.lowercase_mapping Uucp.Case.Map.to_lower;
map "Map.to_title" Uucd.titlecase_mapping Uucp.Case.Map.to_title;
map "Fold.fold" Uucd.case_folding Uucp.Case.Fold.fold;
map "Nfkc_fold.fold" Uucd.nfkc_casefold Uucp.Case.Nfkc_fold.fold;
()
let assert_cjk ucd =
let prop fname ucd_p p = prop ucd "Uucd.Cjk" fname (ucd_get ucd_p) p in
prop "ideographic" Uucd.ideographic Uucp.Cjk.is_ideographic;
prop "ids_bin_op" Uucd.ids_binary_operator Uucp.Cjk.is_ids_bin_op;
prop "ids_tri_op" Uucd.ids_trinary_operator Uucp.Cjk.is_ids_tri_op;
prop "radical" Uucd.radical Uucp.Cjk.is_radical;
prop "unified_ideograph" Uucd.unified_ideograph Uucp.Cjk.is_unified_ideograph;
()
let assert_func ucd =
let prop fname ucd_p p = prop ucd "Uucd.Func" fname (ucd_get ucd_p) p in
prop "is_dash" Uucd.dash Uucp.Func.is_dash;
prop "is_diacritic" Uucd.diacritic Uucp.Func.is_diacritic;
prop "is_extender" Uucd.extender Uucp.Func.is_extender;
prop "is_grapheme_base" Uucd.grapheme_base Uucp.Func.is_grapheme_base;
prop "is_grapheme_extend" Uucd.grapheme_extend Uucp.Func.is_grapheme_extend;
prop "is_math" Uucd.math Uucp.Func.is_math;
prop "is_quotation_mark" Uucd.quotation_mark Uucp.Func.is_quotation_mark;
prop "is_soft_dotted" Uucd.soft_dotted Uucp.Func.is_soft_dotted;
prop "is_terminal_punctuation" Uucd.terminal_punctuation
Uucp.Func.is_terminal_punctuation;
prop "is_regional_indicator" Uucd.regional_indicator
Uucp.Func.is_regional_indicator;
()
let assert_gc ucd =
let prop fname ucd_p p = prop ucd "Uucp.Gc" fname (ucd_get ucd_p) p in
prop "general_category" Uucd.general_category Uucp.Gc.general_category;
()
let assert_gen ucd =
let prop fname ucd_p p = prop ucd "Uucp.Gen" fname (ucd_get ucd_p) p in
prop "is_default_ignorable" Uucd.default_ignorable_code_point
Uucp.Gen.is_default_ignorable;
prop "is_deprecated" Uucd.deprecated Uucp.Gen.is_deprecated ;
prop "is_logical_order_exception" Uucd.logical_order_exception
Uucp.Gen.is_logical_order_exception;
prop "is_non_character" Uucd.noncharacter_code_point
Uucp.Gen.is_non_character;
prop "is_variation_selector" Uucd.variation_selector
Uucp.Gen.is_variation_selector;
()
let assert_hangul ucd =
let prop fname ucd_p p = prop ucd "Uucp.Hangul" fname (ucd_get ucd_p) p in
prop "syllable_type" Uucd.hangul_syllable_type Uucp.Hangul.syllable_type;
()
let assert_id ucd =
let prop fname ucd_p p = prop ucd "Uucp.Id" fname (ucd_get ucd_p) p in
prop "is_id_start" Uucd.id_start Uucp.Id.is_id_start;
prop "is_id_continue" Uucd.id_continue Uucp.Id.is_id_continue;
prop "is_xid_start" Uucd.xid_start Uucp.Id.is_xid_start;
prop "is_xid_continue" Uucd.xid_continue Uucp.Id.is_xid_continue;
prop "is_pattern_syntax" Uucd.pattern_syntax Uucp.Id.is_pattern_syntax;
prop "is_pattern_white_space" Uucd.pattern_white_space
Uucp.Id.is_pattern_white_space;
()
let assert_name ucd =
let buf = Buffer.create 244 in
let name_prop ucd u = match (ucd_get Uucd.name ucd u) with
| `Name n -> n
| `Pattern n ->
Buffer.clear buf;
for i = 0 to String.length n - 1 do
if n.[i] = '#'
then Buffer.add_string buf (str "%04X" (Uchar.to_int u))
else Buffer.add_char buf n.[i]
done;
Buffer.contents buf
in
prop ucd "Uucd.Name" "name" name_prop Uucp.Name.name;
let alias_prop ucd u =
let permute (n, t) = (t, n) in
List.map permute (ucd_get Uucd.name_alias ucd u)
in
prop ucd "Uucd.Name" "name_alias" alias_prop Uucp.Name.name_alias;
()
let assert_num ucd =
let prop fname ucd_p p = prop ucd "Uucp.Num" fname (ucd_get ucd_p) p in
prop "is_ascii_hex_digit" Uucd.ascii_hex_digit Uucp.Num.is_ascii_hex_digit;
prop "is_hex_digit" Uucd.hex_digit Uucp.Num.is_hex_digit;
prop "numeric_type" Uucd.numeric_type Uucp.Num.numeric_type;
prop "numeric_value" Uucd.numeric_value Uucp.Num.numeric_value;
()
let assert_script ucd =
let prop fname ucd_p p = prop ucd "Uucp.Script" fname (ucd_get ucd_p) p in
prop "script" Uucd.script Uucp.Script.script;
prop "script_extensions"
Uucd.script_extensions Uucp.Script.script_extensions;
()
let assert_white ucd =
let prop fname ucd_p p = prop ucd "Uucd.White" fname (ucd_get ucd_p) p in
prop "is_white_space" Uucd.white_space Uucp.White.is_white_space;
()
let test inf mods =
let do_assert m = mods = [] || List.mem m mods in
let ucd = load_ucd inf in
if do_assert `Age then assert_age ucd;
if do_assert `Alpha then assert_alpha ucd;
if do_assert `Block then assert_block ucd;
if do_assert `Break then assert_break ucd;
if do_assert `Case then assert_case ucd;
if do_assert `Cjk then assert_cjk ucd;
if do_assert `Func then assert_func ucd;
if do_assert `Gc then assert_gc ucd;
if do_assert `Gen then assert_gen ucd;
if do_assert `Hangul then assert_hangul ucd;
if do_assert `Id then assert_id ucd;
if do_assert `Name then assert_name ucd;
if do_assert `Num then assert_num ucd;
if do_assert `Script then assert_script ucd;
if do_assert `White then assert_white ucd;
log "Done.@\n";
()
let main () =
let usage = str
"Usage: %s [OPTION]... [DBFILE]\n\
\ Asserts Uucp's data against the Unicode character database DBFILE.\n\
\ DBFILE defaults to support/ucd.xml, without any option asserts all\n\
\ modules.\n\
Options:" exec
in
let inf = ref None in
let set_inf f =
if !inf = None then inf := Some f else
raise (Arg.Bad "only one Unicode character database file can be specified")
in
let mods = ref [] in
let add v = Arg.Unit (fun () -> mods := v :: !mods) in
let options = [
"-age", add `Age, " assert the Age module";
"-alpha", add `Alpha, " assert the Alpha module";
"-block", add `Block, " assert the Block module";
"-break", add `Break, " assert the Break module";
"-case", add `Case, " assert the Case module";
"-cjk", add `Cjk, " assert the CJK module";
"-func", add `Func, " assert the Func module";
"-gc", add `Gc, " assert the Gc module";
"-gen", add `Gen, " assert the Gen module";
"-hangul", add `Hangul, " assert the Hangul module";
"-id", add `Id, " assert the Id module";
"-name", add `Name, " assert the Name module";
"-num", add `Num, " assert the Num module";
"-script", add `Script, " assert the Script module";
"-white", add `White, " assert the White module"; ]
in
Arg.parse (Arg.align options) set_inf usage;
test !inf !mods
let () = main ()
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/uucp.12.0.0%2Bdune/test/test.ml | ocaml | Test Uucp.Block.blocks at the same time | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
Tests the properties against the XML Unicode character database .
let str = Format.asprintf
let exec = Filename.basename Sys.executable_name
let log fmt = Format.eprintf (fmt ^^ "%!")
let uchar_dump ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
UCD loading and access
let load_ucd inf =
try
log "Loading Unicode character database.@\n";
let inf = match inf with None -> "support/ucd.xml" | Some inf -> inf in
let ic = if inf = "-" then stdin else open_in inf in
let d = Uucd.decoder (`Channel ic) in
match Uucd.decode d with
| `Ok db -> db
| `Error e ->
let (l0, c0), (l1, c1) = Uucd.decoded_range d in
log "%s:%d.%d-%d.%d: %s@\n" inf l0 c0 l1 c1 e;
exit 1
with Sys_error e -> log "%s@\n" e; exit 1
let ucd_get p ucd u = match Uucd.cp_prop ucd (Uchar.to_int u) p with
| None -> invalid_arg (str "no property for %a" uchar_dump u)
| Some v -> v
Assert properties
let prop ucd mname fname ucd_get prop =
let do_assert u =
if ucd_get ucd u = prop u then () else
failwith (str "assertion failure on %a" uchar_dump u)
in
log "Asserting %s.%s@\n" mname fname;
for u = 0 to 0xD7FF do do_assert (Uchar.of_int u) done;
for u = 0xE000 to 0x10FFFF do do_assert (Uchar.of_int u) done;
()
Assert modules
let assert_age ucd =
let prop fname ucd_p p = prop ucd "Uucp.Age" fname (ucd_get ucd_p) p in
prop "age" Uucd.age Uucp.Age.age;
()
let assert_alpha ucd =
let prop fname ucd_p p = prop ucd "Uucd.Alpha" fname (ucd_get ucd_p) p in
prop "is_alphabetic" Uucd.alphabetic Uucp.Alpha.is_alphabetic;
()
let assert_block ucd =
let block_prop ucd u = match ucd_get Uucd.block ucd u with
| `High_Surrogates -> assert false
| `Low_Surrogates -> assert false
| `High_PU_Surrogates -> assert false
| #Uucp.Block.t as b ->
try
let (is, ie) = List.assoc b Uucp.Block.blocks in
if u < is || u > ie then assert false else
b
with Not_found -> assert (b = `NB); b
in
prop ucd "Uucd.Block" "block" block_prop Uucp.Block.block;
()
let assert_break ucd =
let prop fname ucd_p p = prop ucd "Uucp.Break" fname (ucd_get ucd_p) p in
prop "line" Uucd.line_break Uucp.Break.line;
prop "grapheme_cluster" Uucd.grapheme_cluster_break
Uucp.Break.grapheme_cluster;
prop "word" Uucd.word_break Uucp.Break.word;
prop "sentence" Uucd.sentence_break Uucp.Break.sentence;
()
let assert_case ucd =
let map fname ucd_p p =
let assert_map ucd u = match ucd_get ucd_p ucd u with
| `Self -> `Self
| `Cps cps -> `Uchars (List.map Uchar.of_int cps)
in
prop ucd "Uucd.Case" fname assert_map p
in
let prop fname ucd_p p = prop ucd "Uucd.Case" fname (ucd_get ucd_p) p in
prop "is_upper" Uucd.uppercase Uucp.Case.is_upper;
prop "is_lower" Uucd.lowercase Uucp.Case.is_lower;
prop "is_cased" Uucd.cased Uucp.Case.is_cased;
prop "is_case_ignorable" Uucd.case_ignorable Uucp.Case.is_case_ignorable;
map "Map.to_upper" Uucd.uppercase_mapping Uucp.Case.Map.to_upper;
map "Map.to_lower" Uucd.lowercase_mapping Uucp.Case.Map.to_lower;
map "Map.to_title" Uucd.titlecase_mapping Uucp.Case.Map.to_title;
map "Fold.fold" Uucd.case_folding Uucp.Case.Fold.fold;
map "Nfkc_fold.fold" Uucd.nfkc_casefold Uucp.Case.Nfkc_fold.fold;
()
let assert_cjk ucd =
let prop fname ucd_p p = prop ucd "Uucd.Cjk" fname (ucd_get ucd_p) p in
prop "ideographic" Uucd.ideographic Uucp.Cjk.is_ideographic;
prop "ids_bin_op" Uucd.ids_binary_operator Uucp.Cjk.is_ids_bin_op;
prop "ids_tri_op" Uucd.ids_trinary_operator Uucp.Cjk.is_ids_tri_op;
prop "radical" Uucd.radical Uucp.Cjk.is_radical;
prop "unified_ideograph" Uucd.unified_ideograph Uucp.Cjk.is_unified_ideograph;
()
let assert_func ucd =
let prop fname ucd_p p = prop ucd "Uucd.Func" fname (ucd_get ucd_p) p in
prop "is_dash" Uucd.dash Uucp.Func.is_dash;
prop "is_diacritic" Uucd.diacritic Uucp.Func.is_diacritic;
prop "is_extender" Uucd.extender Uucp.Func.is_extender;
prop "is_grapheme_base" Uucd.grapheme_base Uucp.Func.is_grapheme_base;
prop "is_grapheme_extend" Uucd.grapheme_extend Uucp.Func.is_grapheme_extend;
prop "is_math" Uucd.math Uucp.Func.is_math;
prop "is_quotation_mark" Uucd.quotation_mark Uucp.Func.is_quotation_mark;
prop "is_soft_dotted" Uucd.soft_dotted Uucp.Func.is_soft_dotted;
prop "is_terminal_punctuation" Uucd.terminal_punctuation
Uucp.Func.is_terminal_punctuation;
prop "is_regional_indicator" Uucd.regional_indicator
Uucp.Func.is_regional_indicator;
()
let assert_gc ucd =
let prop fname ucd_p p = prop ucd "Uucp.Gc" fname (ucd_get ucd_p) p in
prop "general_category" Uucd.general_category Uucp.Gc.general_category;
()
let assert_gen ucd =
let prop fname ucd_p p = prop ucd "Uucp.Gen" fname (ucd_get ucd_p) p in
prop "is_default_ignorable" Uucd.default_ignorable_code_point
Uucp.Gen.is_default_ignorable;
prop "is_deprecated" Uucd.deprecated Uucp.Gen.is_deprecated ;
prop "is_logical_order_exception" Uucd.logical_order_exception
Uucp.Gen.is_logical_order_exception;
prop "is_non_character" Uucd.noncharacter_code_point
Uucp.Gen.is_non_character;
prop "is_variation_selector" Uucd.variation_selector
Uucp.Gen.is_variation_selector;
()
let assert_hangul ucd =
let prop fname ucd_p p = prop ucd "Uucp.Hangul" fname (ucd_get ucd_p) p in
prop "syllable_type" Uucd.hangul_syllable_type Uucp.Hangul.syllable_type;
()
let assert_id ucd =
let prop fname ucd_p p = prop ucd "Uucp.Id" fname (ucd_get ucd_p) p in
prop "is_id_start" Uucd.id_start Uucp.Id.is_id_start;
prop "is_id_continue" Uucd.id_continue Uucp.Id.is_id_continue;
prop "is_xid_start" Uucd.xid_start Uucp.Id.is_xid_start;
prop "is_xid_continue" Uucd.xid_continue Uucp.Id.is_xid_continue;
prop "is_pattern_syntax" Uucd.pattern_syntax Uucp.Id.is_pattern_syntax;
prop "is_pattern_white_space" Uucd.pattern_white_space
Uucp.Id.is_pattern_white_space;
()
let assert_name ucd =
let buf = Buffer.create 244 in
let name_prop ucd u = match (ucd_get Uucd.name ucd u) with
| `Name n -> n
| `Pattern n ->
Buffer.clear buf;
for i = 0 to String.length n - 1 do
if n.[i] = '#'
then Buffer.add_string buf (str "%04X" (Uchar.to_int u))
else Buffer.add_char buf n.[i]
done;
Buffer.contents buf
in
prop ucd "Uucd.Name" "name" name_prop Uucp.Name.name;
let alias_prop ucd u =
let permute (n, t) = (t, n) in
List.map permute (ucd_get Uucd.name_alias ucd u)
in
prop ucd "Uucd.Name" "name_alias" alias_prop Uucp.Name.name_alias;
()
let assert_num ucd =
let prop fname ucd_p p = prop ucd "Uucp.Num" fname (ucd_get ucd_p) p in
prop "is_ascii_hex_digit" Uucd.ascii_hex_digit Uucp.Num.is_ascii_hex_digit;
prop "is_hex_digit" Uucd.hex_digit Uucp.Num.is_hex_digit;
prop "numeric_type" Uucd.numeric_type Uucp.Num.numeric_type;
prop "numeric_value" Uucd.numeric_value Uucp.Num.numeric_value;
()
let assert_script ucd =
let prop fname ucd_p p = prop ucd "Uucp.Script" fname (ucd_get ucd_p) p in
prop "script" Uucd.script Uucp.Script.script;
prop "script_extensions"
Uucd.script_extensions Uucp.Script.script_extensions;
()
let assert_white ucd =
let prop fname ucd_p p = prop ucd "Uucd.White" fname (ucd_get ucd_p) p in
prop "is_white_space" Uucd.white_space Uucp.White.is_white_space;
()
let test inf mods =
let do_assert m = mods = [] || List.mem m mods in
let ucd = load_ucd inf in
if do_assert `Age then assert_age ucd;
if do_assert `Alpha then assert_alpha ucd;
if do_assert `Block then assert_block ucd;
if do_assert `Break then assert_break ucd;
if do_assert `Case then assert_case ucd;
if do_assert `Cjk then assert_cjk ucd;
if do_assert `Func then assert_func ucd;
if do_assert `Gc then assert_gc ucd;
if do_assert `Gen then assert_gen ucd;
if do_assert `Hangul then assert_hangul ucd;
if do_assert `Id then assert_id ucd;
if do_assert `Name then assert_name ucd;
if do_assert `Num then assert_num ucd;
if do_assert `Script then assert_script ucd;
if do_assert `White then assert_white ucd;
log "Done.@\n";
()
let main () =
let usage = str
"Usage: %s [OPTION]... [DBFILE]\n\
\ Asserts Uucp's data against the Unicode character database DBFILE.\n\
\ DBFILE defaults to support/ucd.xml, without any option asserts all\n\
\ modules.\n\
Options:" exec
in
let inf = ref None in
let set_inf f =
if !inf = None then inf := Some f else
raise (Arg.Bad "only one Unicode character database file can be specified")
in
let mods = ref [] in
let add v = Arg.Unit (fun () -> mods := v :: !mods) in
let options = [
"-age", add `Age, " assert the Age module";
"-alpha", add `Alpha, " assert the Alpha module";
"-block", add `Block, " assert the Block module";
"-break", add `Break, " assert the Break module";
"-case", add `Case, " assert the Case module";
"-cjk", add `Cjk, " assert the CJK module";
"-func", add `Func, " assert the Func module";
"-gc", add `Gc, " assert the Gc module";
"-gen", add `Gen, " assert the Gen module";
"-hangul", add `Hangul, " assert the Hangul module";
"-id", add `Id, " assert the Id module";
"-name", add `Name, " assert the Name module";
"-num", add `Num, " assert the Num module";
"-script", add `Script, " assert the Script module";
"-white", add `White, " assert the White module"; ]
in
Arg.parse (Arg.align options) set_inf usage;
test !inf !mods
let () = main ()
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
7b5575a9f86722642fef0fedc39a0e70bf47b0723c79836e8d4eae752807b34d | wh5a/thih | ConWithType.hs | -- ConWithType.hs
Output : 3
data Weird a b = Funny (a -> b)
data Woo a = W | N a
--fun :: Weird a b -> a -> b
fun = \w -> \x -> case w of
Funny f -> f x
-- main :: Int
main = fun (Funny inc) 2
inc :: Int -> Int
inc = \x -> x + 1
| null | https://raw.githubusercontent.com/wh5a/thih/dc5cb16ba4e998097135beb0c7b0b416cac7bfae/hatchet/examples/ConWithType.hs | haskell | ConWithType.hs
fun :: Weird a b -> a -> b
main :: Int | Output : 3
data Weird a b = Funny (a -> b)
data Woo a = W | N a
fun = \w -> \x -> case w of
Funny f -> f x
main = fun (Funny inc) 2
inc :: Int -> Int
inc = \x -> x + 1
|
626a0b46ea3dad21568e7dc359586dbd3cd525a5ac9991ccf1642f4cf6bbdf8b | plumatic/grab-bag | suggestions.clj | (ns domain.suggestions
"Schema for Suggestion Group"
(:use plumbing.core)
(:require
[schema.core :as s]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Schema
(def +suggestion-sources+
#{:facebook :twitter :curated :local :grabbag :pocket :google})
(s/defschema SuggestionGroupSource
(apply s/enum +suggestion-sources+))
(s/defschema SuggestionGroupType
{:source SuggestionGroupSource
:type (s/enum :activity :topic)})
(s/defschema SuggestionGroup
{:type SuggestionGroupType
:title String
:image (s/maybe String)})
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/domain/src/domain/suggestions.clj | clojure |
Schema | (ns domain.suggestions
"Schema for Suggestion Group"
(:use plumbing.core)
(:require
[schema.core :as s]))
(def +suggestion-sources+
#{:facebook :twitter :curated :local :grabbag :pocket :google})
(s/defschema SuggestionGroupSource
(apply s/enum +suggestion-sources+))
(s/defschema SuggestionGroupType
{:source SuggestionGroupSource
:type (s/enum :activity :topic)})
(s/defschema SuggestionGroup
{:type SuggestionGroupType
:title String
:image (s/maybe String)})
|
d2eaafedd22cfb60a859ee90fb0a41901b10a946f74b31725045f5a2a594d4ab | danilkolikov/dfl | Ast.hs | |
Module : Frontend . Desugaring . Grouping . Ast
Description : Desugared version of AST of DFL
Copyright : ( c ) , 2019
License : MIT
Desugared version of AST of DFL . Nodes with the same names are grouped together ,
names are disambiguated .
Module : Frontend.Desugaring.Grouping.Ast
Description : Desugared version of AST of DFL
Copyright : (c) Danil Kolikov, 2019
License : MIT
Desugared version of AST of DFL. Nodes with the same names are grouped together,
names are disambiguated.
-}
module Frontend.Desugaring.Grouping.Ast
( module Frontend.Desugaring.Grouping.Ast
, module Frontend.Desugaring.Ast
) where
import Data.List.NonEmpty (NonEmpty)
import Core.Ident (Ident)
import Frontend.Desugaring.Ast
import Frontend.Syntax.Position (WithLocation)
-- | Infix pattern
data InfixPattern
= InfixPatternApplication (WithLocation InfixPattern)
(WithLocation Ident)
(WithLocation InfixPattern)
| InfixPatternSimple (WithLocation Pattern)
deriving (Eq, Show)
-- | Pattern
data Pattern
= PatternInfix (WithLocation InfixPattern) -- ^ Infix pattern
| PatternConstr (WithLocation Ident)
[WithLocation Pattern] -- ^ Application of a constructor
| PatternRecord (WithLocation Ident)
[WithLocation PatternBinding] -- ^ Application of a record constructor
| PatternVar (WithLocation Ident)
(Maybe (WithLocation Pattern)) -- ^ Variable with possible pattern
| PatternConst (WithLocation Const) -- ^ Constant
| PatternWildcard -- ^ Wildcard
deriving (Show, Eq)
-- | Record pattern binding
data PatternBinding =
PatternBinding (WithLocation Ident) -- ^ Field name
(WithLocation Pattern) -- ^ Pattern
deriving (Show, Eq)
-- | Infix expression
data InfixExp
= InfixExpApplication (WithLocation InfixExp)
(WithLocation Ident)
(WithLocation InfixExp)
| InfixExpNegated (WithLocation Ident)
(WithLocation InfixExp)
| InfixExpSimple (WithLocation Exp)
deriving (Eq, Show)
-- | Expression
data Exp
^ Nested InfixExpression
| ExpTyped (WithLocation Exp)
[WithLocation Constraint]
(WithLocation Type) -- ^ Expression with an explicitly specified type
| ExpAbstraction (NonEmpty (WithLocation Pattern))
(WithLocation Exp) -- ^ Lambda-abstraction
| ExpLet (Expressions Exp)
(WithLocation Exp) -- ^ Let-abstraction
| ExpCase (WithLocation Exp)
(NonEmpty (WithLocation Alt)) -- ^ Case expression
| ExpDo [WithLocation Stmt]
(WithLocation Exp) -- ^ Do statement
| ExpApplication (WithLocation Exp)
(NonEmpty (WithLocation Exp)) -- ^ Application of expressions
| ExpVar (WithLocation Ident) -- ^ Variable
| ExpConstr (WithLocation Ident) -- ^ Constructor
| ExpConst (WithLocation Const) -- ^ Constant
| ExpListCompr (WithLocation Exp)
(NonEmpty (WithLocation Stmt)) -- ^ List comprehension
| ExpLeftSection (WithLocation Exp)
(WithLocation Exp) -- ^ Left section
| ExpRightSection (WithLocation Exp)
(WithLocation Exp) -- ^ Right section
| ExpRecordConstr (WithLocation Ident)
[WithLocation Binding] -- ^ Construction of a record
| ExpRecordUpdate (WithLocation Exp)
(NonEmpty (WithLocation Binding)) -- ^ Update of a record
deriving (Show, Eq)
-- | Statements in `do` blocks or in list comprehension
data Stmt
= StmtPattern (WithLocation Pattern)
(WithLocation Exp)
| StmtLet (Expressions Exp)
| StmtExp (WithLocation Exp)
deriving (Show, Eq)
-- | Record field binding
data Binding =
Binding (WithLocation Ident) -- ^ Field name
(WithLocation Exp) -- ^ Expression
deriving (Show, Eq)
-- | Alternative in `case` expressions
data Alt
= AltSimple (WithLocation Pattern) -- ^ Pattern
(WithLocation Exp) -- ^ Expression
| AltGuarded (WithLocation Pattern) -- ^ Pattern
(NonEmpty (WithLocation GuardedExp)) -- ^ Guarded expression
(Expressions Exp) -- ^ Where block
deriving (Show, Eq)
-- | Expression with a guard
data GuardedExp =
GuardedExp (NonEmpty (WithLocation Stmt)) -- ^ List of guards
(WithLocation Exp) -- ^ Expression
deriving (Show, Eq)
| null | https://raw.githubusercontent.com/danilkolikov/dfl/698a8f32e23b381afe803fc0e353293a3bf644ba/src/Frontend/Desugaring/Grouping/Ast.hs | haskell | | Infix pattern
| Pattern
^ Infix pattern
^ Application of a constructor
^ Application of a record constructor
^ Variable with possible pattern
^ Constant
^ Wildcard
| Record pattern binding
^ Field name
^ Pattern
| Infix expression
| Expression
^ Expression with an explicitly specified type
^ Lambda-abstraction
^ Let-abstraction
^ Case expression
^ Do statement
^ Application of expressions
^ Variable
^ Constructor
^ Constant
^ List comprehension
^ Left section
^ Right section
^ Construction of a record
^ Update of a record
| Statements in `do` blocks or in list comprehension
| Record field binding
^ Field name
^ Expression
| Alternative in `case` expressions
^ Pattern
^ Expression
^ Pattern
^ Guarded expression
^ Where block
| Expression with a guard
^ List of guards
^ Expression | |
Module : Frontend . Desugaring . Grouping . Ast
Description : Desugared version of AST of DFL
Copyright : ( c ) , 2019
License : MIT
Desugared version of AST of DFL . Nodes with the same names are grouped together ,
names are disambiguated .
Module : Frontend.Desugaring.Grouping.Ast
Description : Desugared version of AST of DFL
Copyright : (c) Danil Kolikov, 2019
License : MIT
Desugared version of AST of DFL. Nodes with the same names are grouped together,
names are disambiguated.
-}
module Frontend.Desugaring.Grouping.Ast
( module Frontend.Desugaring.Grouping.Ast
, module Frontend.Desugaring.Ast
) where
import Data.List.NonEmpty (NonEmpty)
import Core.Ident (Ident)
import Frontend.Desugaring.Ast
import Frontend.Syntax.Position (WithLocation)
data InfixPattern
= InfixPatternApplication (WithLocation InfixPattern)
(WithLocation Ident)
(WithLocation InfixPattern)
| InfixPatternSimple (WithLocation Pattern)
deriving (Eq, Show)
data Pattern
| PatternConstr (WithLocation Ident)
| PatternRecord (WithLocation Ident)
| PatternVar (WithLocation Ident)
deriving (Show, Eq)
data PatternBinding =
deriving (Show, Eq)
data InfixExp
= InfixExpApplication (WithLocation InfixExp)
(WithLocation Ident)
(WithLocation InfixExp)
| InfixExpNegated (WithLocation Ident)
(WithLocation InfixExp)
| InfixExpSimple (WithLocation Exp)
deriving (Eq, Show)
data Exp
^ Nested InfixExpression
| ExpTyped (WithLocation Exp)
[WithLocation Constraint]
| ExpAbstraction (NonEmpty (WithLocation Pattern))
| ExpLet (Expressions Exp)
| ExpCase (WithLocation Exp)
| ExpDo [WithLocation Stmt]
| ExpApplication (WithLocation Exp)
| ExpListCompr (WithLocation Exp)
| ExpLeftSection (WithLocation Exp)
| ExpRightSection (WithLocation Exp)
| ExpRecordConstr (WithLocation Ident)
| ExpRecordUpdate (WithLocation Exp)
deriving (Show, Eq)
data Stmt
= StmtPattern (WithLocation Pattern)
(WithLocation Exp)
| StmtLet (Expressions Exp)
| StmtExp (WithLocation Exp)
deriving (Show, Eq)
data Binding =
deriving (Show, Eq)
data Alt
deriving (Show, Eq)
data GuardedExp =
deriving (Show, Eq)
|
0489838bc311de94a16c1adf6ca952583a98d3eec7aacbb256e0fac0b3fd33e8 | tsloughter/kuberl | kuberl_v1beta1_eviction.erl | -module(kuberl_v1beta1_eviction).
-export([encode/1]).
-export_type([kuberl_v1beta1_eviction/0]).
-type kuberl_v1beta1_eviction() ::
#{ 'apiVersion' => binary(),
'deleteOptions' => kuberl_v1_delete_options:kuberl_v1_delete_options(),
'kind' => binary(),
'metadata' => kuberl_v1_object_meta:kuberl_v1_object_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'deleteOptions' := DeleteOptions,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'deleteOptions' => DeleteOptions,
'kind' => Kind,
'metadata' => Metadata
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_eviction.erl | erlang | -module(kuberl_v1beta1_eviction).
-export([encode/1]).
-export_type([kuberl_v1beta1_eviction/0]).
-type kuberl_v1beta1_eviction() ::
#{ 'apiVersion' => binary(),
'deleteOptions' => kuberl_v1_delete_options:kuberl_v1_delete_options(),
'kind' => binary(),
'metadata' => kuberl_v1_object_meta:kuberl_v1_object_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'deleteOptions' := DeleteOptions,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'deleteOptions' => DeleteOptions,
'kind' => Kind,
'metadata' => Metadata
}.
| |
0e20d0609d395df1737405d2fb4ec48152218d137d5d9d47cc0aba8069528037 | anoma/juvix | Types.hs | module Juvix.Compiler.Backend.C.Data.Types where
import Juvix.Compiler.Backend.C.Data.BuiltinTable
import Juvix.Compiler.Backend.C.Language
import Juvix.Compiler.Internal.Language qualified as Micro
import Juvix.Prelude
newtype MiniCResult = MiniCResult
{ _resultCCode :: Text
}
data BindingInfo = BindingInfo
{ _bindingInfoExpr :: Expression,
_bindingInfoType :: CFunType
}
newtype PatternInfoTable = PatternInfoTable
{_patternBindings :: HashMap Text BindingInfo}
type CArity = Int
data ClosureInfo = ClosureInfo
{ _closureNameId :: Micro.NameId,
_closureRootName :: Text,
_closureBuiltin :: Maybe Micro.BuiltinPrim,
_closureMembers :: [CDeclType],
_closureFunType :: CFunType,
_closureCArity :: CArity
}
deriving stock (Show, Eq)
closureNamedId :: ClosureInfo -> Text
closureNamedId ClosureInfo {..} = _closureRootName <> "_" <> show (length _closureMembers)
makeLenses ''ClosureInfo
makeLenses ''MiniCResult
makeLenses ''PatternInfoTable
makeLenses ''BindingInfo
closureRootFunction :: ClosureInfo -> Text
closureRootFunction c = case c ^. closureBuiltin of
Just b -> fromMaybe unsup (builtinName b)
where
unsup :: a
unsup = error ("unsupported builtin " <> show b)
Nothing -> c ^. closureRootName
| null | https://raw.githubusercontent.com/anoma/juvix/01a44e436d9495b5f83bb8b7001225c62775cd75/src/Juvix/Compiler/Backend/C/Data/Types.hs | haskell | module Juvix.Compiler.Backend.C.Data.Types where
import Juvix.Compiler.Backend.C.Data.BuiltinTable
import Juvix.Compiler.Backend.C.Language
import Juvix.Compiler.Internal.Language qualified as Micro
import Juvix.Prelude
newtype MiniCResult = MiniCResult
{ _resultCCode :: Text
}
data BindingInfo = BindingInfo
{ _bindingInfoExpr :: Expression,
_bindingInfoType :: CFunType
}
newtype PatternInfoTable = PatternInfoTable
{_patternBindings :: HashMap Text BindingInfo}
type CArity = Int
data ClosureInfo = ClosureInfo
{ _closureNameId :: Micro.NameId,
_closureRootName :: Text,
_closureBuiltin :: Maybe Micro.BuiltinPrim,
_closureMembers :: [CDeclType],
_closureFunType :: CFunType,
_closureCArity :: CArity
}
deriving stock (Show, Eq)
closureNamedId :: ClosureInfo -> Text
closureNamedId ClosureInfo {..} = _closureRootName <> "_" <> show (length _closureMembers)
makeLenses ''ClosureInfo
makeLenses ''MiniCResult
makeLenses ''PatternInfoTable
makeLenses ''BindingInfo
closureRootFunction :: ClosureInfo -> Text
closureRootFunction c = case c ^. closureBuiltin of
Just b -> fromMaybe unsup (builtinName b)
where
unsup :: a
unsup = error ("unsupported builtin " <> show b)
Nothing -> c ^. closureRootName
| |
62acdb9cc329547b3265b76a3b3e51b7306868e8f364cbd77740491f0d745878 | petelliott/pscheme | library.scm | (define-library (pscheme compiler library)
(import (scheme base)
(scheme file)
(srfi 1)
(srfi 28)
(pscheme compiler compile)
(pscheme compiler syntax)
(pscheme string))
(export library?
library-name
library-imports
library-defines
add-library-import!
add-library-export!
add-library-define!
add-library-syntax!
library-exports
new-library
lookup-library
add-to-load-path
library-filename
current-library
lookup-syntax
lookup-global
compile-and-import)
(begin
(define-record-type library
(make-library name fresh-compile imports exports defines syntax)
library?
(name library-name)
(fresh-compile library-fresh-compile set-library-fresh-compile!)
(imports library-imports set-library-imports!)
(exports library-exports set-library-exports!)
(defines library-defines set-library-defines!)
(syntax library-syntax set-library-syntax!))
(define (add-library-import! to-lib import-lib)
(set-library-imports! to-lib (cons import-lib (library-imports to-lib))))
(define (add-library-export! to-lib export)
(set-library-exports! to-lib (cons export (library-exports to-lib))))
(define (add-library-define! to-lib name sig)
(set-library-defines! to-lib (cons (cons name sig) (library-defines to-lib))))
(define (add-library-syntax! lib name syntax)
(set-library-syntax! lib (cons (cons name syntax) (library-syntax lib))))
(define libraries '())
(define (new-library name imports exports)
(define library (make-library name (should-fresh-compile) imports exports '() '()))
(set! libraries (cons library libraries))
library)
(define (lookup-library name)
(find (lambda (lib) (equal? name (library-name lib)))
libraries))
(define load-paths '())
(define (add-to-load-path path)
(set! load-paths (cons path load-paths)))
(define (library-filename name)
(define base-name (string-append (string-join "/" (map (lambda (part) (format "~a" part)) name)) ".scm"))
(find file-exists?
(map (lambda (path) (string-append path "/" base-name))
load-paths)))
(define current-library (make-parameter (new-library '(r7rs-user) '() '())))
(define (compile-and-import name)
(define lib (or (lookup-library name)
(begin
(compile-file (library-filename name) 'library)
(lookup-library name))))
(should-fresh-compile (or (library-fresh-compile lib)
(should-fresh-compile)))
(set-library-fresh-compile! (current-library) (should-fresh-compile))
(add-library-import! (current-library) lib))
(define (find-library name curr-lib)
(or (and (assoc name (library-defines curr-lib) syntax-equal?) curr-lib)
(find (lambda (lib) (member name (library-exports lib)))
(library-imports curr-lib))))
(define (lookup-syntax name lib)
(define library (or (find-library name lib)
lib))
(define entry (assoc name (library-syntax library) syntax-equal?))
(if entry
(cdr entry)
#f))
(define (lookup-global name l)
;; TODO: we should resolve local shadowing before exported globals
(define lib (find-library name l))
(and lib
(if (syntax-node? name)
`(global ,(library-name lib) ,(syntax-node-sym name) ,(syntax-node-instance name))
`(global ,(library-name lib) ,name))))
))
| null | https://raw.githubusercontent.com/petelliott/pscheme/b529ac9d102047e332a6f03ca9a65868b0b82a59/scm/pscheme/compiler/library.scm | scheme | TODO: we should resolve local shadowing before exported globals | (define-library (pscheme compiler library)
(import (scheme base)
(scheme file)
(srfi 1)
(srfi 28)
(pscheme compiler compile)
(pscheme compiler syntax)
(pscheme string))
(export library?
library-name
library-imports
library-defines
add-library-import!
add-library-export!
add-library-define!
add-library-syntax!
library-exports
new-library
lookup-library
add-to-load-path
library-filename
current-library
lookup-syntax
lookup-global
compile-and-import)
(begin
(define-record-type library
(make-library name fresh-compile imports exports defines syntax)
library?
(name library-name)
(fresh-compile library-fresh-compile set-library-fresh-compile!)
(imports library-imports set-library-imports!)
(exports library-exports set-library-exports!)
(defines library-defines set-library-defines!)
(syntax library-syntax set-library-syntax!))
(define (add-library-import! to-lib import-lib)
(set-library-imports! to-lib (cons import-lib (library-imports to-lib))))
(define (add-library-export! to-lib export)
(set-library-exports! to-lib (cons export (library-exports to-lib))))
(define (add-library-define! to-lib name sig)
(set-library-defines! to-lib (cons (cons name sig) (library-defines to-lib))))
(define (add-library-syntax! lib name syntax)
(set-library-syntax! lib (cons (cons name syntax) (library-syntax lib))))
(define libraries '())
(define (new-library name imports exports)
(define library (make-library name (should-fresh-compile) imports exports '() '()))
(set! libraries (cons library libraries))
library)
(define (lookup-library name)
(find (lambda (lib) (equal? name (library-name lib)))
libraries))
(define load-paths '())
(define (add-to-load-path path)
(set! load-paths (cons path load-paths)))
(define (library-filename name)
(define base-name (string-append (string-join "/" (map (lambda (part) (format "~a" part)) name)) ".scm"))
(find file-exists?
(map (lambda (path) (string-append path "/" base-name))
load-paths)))
(define current-library (make-parameter (new-library '(r7rs-user) '() '())))
(define (compile-and-import name)
(define lib (or (lookup-library name)
(begin
(compile-file (library-filename name) 'library)
(lookup-library name))))
(should-fresh-compile (or (library-fresh-compile lib)
(should-fresh-compile)))
(set-library-fresh-compile! (current-library) (should-fresh-compile))
(add-library-import! (current-library) lib))
(define (find-library name curr-lib)
(or (and (assoc name (library-defines curr-lib) syntax-equal?) curr-lib)
(find (lambda (lib) (member name (library-exports lib)))
(library-imports curr-lib))))
(define (lookup-syntax name lib)
(define library (or (find-library name lib)
lib))
(define entry (assoc name (library-syntax library) syntax-equal?))
(if entry
(cdr entry)
#f))
(define (lookup-global name l)
(define lib (find-library name l))
(and lib
(if (syntax-node? name)
`(global ,(library-name lib) ,(syntax-node-sym name) ,(syntax-node-instance name))
`(global ,(library-name lib) ,name))))
))
|
fdb3705e09cb5028748e0353ee69d8821333e639a99c32c423eea0a9495841e8 | BitGameEN/bitgamex | ranch_acceptor.erl | Copyright ( c ) 2011 - 2018 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(ranch_acceptor).
-export([start_link/4]).
-export([loop/4]).
-spec start_link(inet:socket(), module(), module(), pid())
-> {ok, pid()}.
start_link(LSocket, Transport, Logger, ConnsSup) ->
Pid = spawn_link(?MODULE, loop, [LSocket, Transport, Logger, ConnsSup]),
{ok, Pid}.
-spec loop(inet:socket(), module(), module(), pid()) -> no_return().
loop(LSocket, Transport, Logger, ConnsSup) ->
_ = case Transport:accept(LSocket, infinity) of
{ok, CSocket} ->
case Transport:controlling_process(CSocket, ConnsSup) of
ok ->
%% This call will not return until process has been started
%% AND we are below the maximum number of connections.
ranch_conns_sup:start_protocol(ConnsSup, CSocket);
{error, _} ->
Transport:close(CSocket)
end;
%% Reduce the accept rate if we run out of file descriptors.
%% We can't accept anymore anyway, so we might as well wait
%% a little for the situation to resolve itself.
{error, emfile} ->
ranch:log(warning,
"Ranch acceptor reducing accept rate: out of file descriptors~n",
[], Logger),
receive after 100 -> ok end;
%% We want to crash if the listening socket got closed.
{error, Reason} when Reason =/= closed ->
ok
end,
flush(Logger),
?MODULE:loop(LSocket, Transport, Logger, ConnsSup).
flush(Logger) ->
receive Msg ->
ranch:log(warning,
"Ranch acceptor received unexpected message: ~p~n",
[Msg], Logger),
flush(Logger)
after 0 ->
ok
end.
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/deps/ranch/src/ranch_acceptor.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
This call will not return until process has been started
AND we are below the maximum number of connections.
Reduce the accept rate if we run out of file descriptors.
We can't accept anymore anyway, so we might as well wait
a little for the situation to resolve itself.
We want to crash if the listening socket got closed. | Copyright ( c ) 2011 - 2018 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(ranch_acceptor).
-export([start_link/4]).
-export([loop/4]).
-spec start_link(inet:socket(), module(), module(), pid())
-> {ok, pid()}.
start_link(LSocket, Transport, Logger, ConnsSup) ->
Pid = spawn_link(?MODULE, loop, [LSocket, Transport, Logger, ConnsSup]),
{ok, Pid}.
-spec loop(inet:socket(), module(), module(), pid()) -> no_return().
loop(LSocket, Transport, Logger, ConnsSup) ->
_ = case Transport:accept(LSocket, infinity) of
{ok, CSocket} ->
case Transport:controlling_process(CSocket, ConnsSup) of
ok ->
ranch_conns_sup:start_protocol(ConnsSup, CSocket);
{error, _} ->
Transport:close(CSocket)
end;
{error, emfile} ->
ranch:log(warning,
"Ranch acceptor reducing accept rate: out of file descriptors~n",
[], Logger),
receive after 100 -> ok end;
{error, Reason} when Reason =/= closed ->
ok
end,
flush(Logger),
?MODULE:loop(LSocket, Transport, Logger, ConnsSup).
flush(Logger) ->
receive Msg ->
ranch:log(warning,
"Ranch acceptor received unexpected message: ~p~n",
[Msg], Logger),
flush(Logger)
after 0 ->
ok
end.
|
a4cc49a67ffc150c5dfcff468a8a5c9ee6f3d8f8b37578a8c49e87ab55e028bf | dannypsnl/plt-research | info.rkt | #lang info
(define collection "continuations")
(define deps '("base"))
(define build-deps '("scribble-lib" "racket-doc" "rackunit-lib"))
(define pkg-desc "Description Here")
(define version "0.0")
(define pkg-authors '(dannypsnl))
| null | https://raw.githubusercontent.com/dannypsnl/plt-research/6e8c2af4252f418cb84ac1ead49991c89c5f377b/continuations/info.rkt | racket | #lang info
(define collection "continuations")
(define deps '("base"))
(define build-deps '("scribble-lib" "racket-doc" "rackunit-lib"))
(define pkg-desc "Description Here")
(define version "0.0")
(define pkg-authors '(dannypsnl))
| |
03ae7bca214461259b34dc924c9a55d7962b828186555585a32322b35abf6005 | DHSProgram/DHS-Indicators-SPSS | WE_ASSETS_MR.sps | * Encoding: windows-1252.
*****************************************************************************************************
Program: WE_ASSETS_MR.sps
Purpose: Code to compute employment, earnings, and asset ownership in men and women
Data inputs: MR dataset
Data outputs: coded variables
Author: Shireen Assaf and translated to SPSS by Ivana Bjelic
Date last modified: Oct 19, 2019 by Ivana Bjelic
Note: The indicators below can be computed for men and women.
* For men the indicator is computed for age 15-49 in line 33. This can be commented out if the indicators are required for all men.
*****************************************************************************************************/
*----------------------------------------------------------------------------
Variables created in this file:
we_empl "Employment status in the last 12 months among those currently in a union"
we_empl_earn "Type of earnings among those employed in the past 12 months and currently in a union"
we_earn_wm_decide "Who descides on wife's cash earnings for employment in the last 12 months"
we_earn_wm_compare "Comparison of cash earnings with husband's cash earnings"
we_earn_mn_decide "Who descides on husband's cash earnings for employment in the last 12 months among men currently in a union"
we_earn_hs_decide "Who descides on husband's cash earnings for employment in the last 12 months among women currently in a union"
we_own_house "Ownership of housing"
we_own_land "Ownership of land"
we_house_deed "Title or deed possesion for owned house"
we_land_deed "Title or deed possesion for owned land"
we_bank "Use an account in a bank or other financial institution"
we_mobile "Own a mobile phone"
we_mobile_finance "Use mobile phone for financial transactions"
----------------------------------------------------------------------------.
* indicators from MR file
*** Employment and earnings ***
*Employment in the last 12 months.
do if mv502=1.
+recode mv731 (0=0) (1 thru 3=1) (8,9=sysmis) into we_empl.
end if.
variable labels we_empl "Employment status in the last 12 months among those currently in a union".
value labels we_empl 0 "No" 1 "Yes".
*Employment by type of earnings.
if any(mv731,1,2,3) & mv502=1 we_empl_earn=mv741.
apply dictionary from *
/source variables = MV741
/target variables = we_empl_earn.
variable labels we_empl_earn "Type of earnings among those employed in the past 12 months and currently in a union".
*Who decides on how husband's cash earnings are used.
do if any(mv731,1,2,3) & any(mv741,1,2) & mv502=1.
+compute we_earn_mn_decide=mv739.
+if mv739=8 we_earn_mn_decide=9.
end if.
add value labels mv739 9"Don't know/Missing".
apply dictionary from *
/source variables = MV739
/target variables = we_earn_mn_decide.
variable labels we_earn_mn_decide "Who descides on husband's cash earnings for employment in the last 12 months among men currently in a union".
*** Ownership of assets ***
*Own a house.
compute we_own_house = mv745a.
apply dictionary from *
/source variables = MV745A
/target variables = we_own_house.
variable labels we_own_house "Ownership of housing".
*Own land.
compute we_own_land = mv745b.
apply dictionary from *
/source variables = MV745B
/target variables = we_own_land.
variable labels we_own_land "Ownership of land".
*Ownership of house deed.
do if any(mv745a,1,2,3).
+recode mv745c (1=1) (2=2) (0=0) (3,8,9=9) into we_house_deed.
end if.
variable labels we_house_deed "Title or deed possesion for owned house".
value labels we_house_deed
0 "Does not have title/deed"
1 "Respondent's name on title/deed"
2 "Respondent's name is not on title/deed"
9 "Don't know/missing".
*Ownership of land deed.
do if any(mv745b,1,2,3).
+recode mv745d (1=1) (2=2) (0=0) (3,8,9=9) into we_land_deed.
end if.
variable labels we_land_deed "Title or deed possesion for owned land".
value labels we_land_deed
0 "Does not have title/deed"
1 "Respondent's name on title/deed"
2 "Respondent's name is not on title/deed"
9 "Don't know/missing".
*Own a bank account.
compute we_bank=mv170.
if mv170=8 | mv170=9 we_bank=0.
variable labels we_bank "Use an account in a bank or other financial institution".
value labels we_bank 0 "No" 1 "Yes".
*Own a mobile phone.
compute we_mobile=mv169a.
if mv169a=8 | mv169a=9 we_mobile=0.
variable labels we_mobile "Own a mobile phone".
value labels we_mobile 0 "No" 1 "Yes".
*Use mobile for finances.
do if (mv169a<>8 & mv169a<>9).
+if mv169a=1 we_mobile_finance=mv169b.
+if mv169b=8 | mv169b=9 we_mobile_finance=0.
end if.
variable labels we_mobile_finance "Use mobile phone for financial transactions".
value labels we_mobile_finance 0 "No" 1 "Yes".
| null | https://raw.githubusercontent.com/DHSProgram/DHS-Indicators-SPSS/578e6d40eff9edebda7cf0db0d9a0a52a537d98c/Chap15_WE/WE_ASSETS_MR.sps | scheme | * Encoding: windows-1252.
*****************************************************************************************************
Program: WE_ASSETS_MR.sps
Purpose: Code to compute employment, earnings, and asset ownership in men and women
Data inputs: MR dataset
Data outputs: coded variables
Author: Shireen Assaf and translated to SPSS by Ivana Bjelic
Date last modified: Oct 19, 2019 by Ivana Bjelic
Note: The indicators below can be computed for men and women.
* For men the indicator is computed for age 15-49 in line 33. This can be commented out if the indicators are required for all men.
*****************************************************************************************************/
*----------------------------------------------------------------------------
Variables created in this file:
we_empl "Employment status in the last 12 months among those currently in a union"
we_empl_earn "Type of earnings among those employed in the past 12 months and currently in a union"
we_earn_wm_decide "Who descides on wife's cash earnings for employment in the last 12 months"
we_earn_wm_compare "Comparison of cash earnings with husband's cash earnings"
we_earn_mn_decide "Who descides on husband's cash earnings for employment in the last 12 months among men currently in a union"
we_earn_hs_decide "Who descides on husband's cash earnings for employment in the last 12 months among women currently in a union"
we_own_house "Ownership of housing"
we_own_land "Ownership of land"
we_house_deed "Title or deed possesion for owned house"
we_land_deed "Title or deed possesion for owned land"
we_bank "Use an account in a bank or other financial institution"
we_mobile "Own a mobile phone"
we_mobile_finance "Use mobile phone for financial transactions"
----------------------------------------------------------------------------.
* indicators from MR file
*** Employment and earnings ***
*Employment in the last 12 months.
do if mv502=1.
+recode mv731 (0=0) (1 thru 3=1) (8,9=sysmis) into we_empl.
end if.
variable labels we_empl "Employment status in the last 12 months among those currently in a union".
value labels we_empl 0 "No" 1 "Yes".
*Employment by type of earnings.
if any(mv731,1,2,3) & mv502=1 we_empl_earn=mv741.
apply dictionary from *
/source variables = MV741
/target variables = we_empl_earn.
variable labels we_empl_earn "Type of earnings among those employed in the past 12 months and currently in a union".
*Who decides on how husband's cash earnings are used.
do if any(mv731,1,2,3) & any(mv741,1,2) & mv502=1.
+compute we_earn_mn_decide=mv739.
+if mv739=8 we_earn_mn_decide=9.
end if.
add value labels mv739 9"Don't know/Missing".
apply dictionary from *
/source variables = MV739
/target variables = we_earn_mn_decide.
variable labels we_earn_mn_decide "Who descides on husband's cash earnings for employment in the last 12 months among men currently in a union".
*** Ownership of assets ***
*Own a house.
compute we_own_house = mv745a.
apply dictionary from *
/source variables = MV745A
/target variables = we_own_house.
variable labels we_own_house "Ownership of housing".
*Own land.
compute we_own_land = mv745b.
apply dictionary from *
/source variables = MV745B
/target variables = we_own_land.
variable labels we_own_land "Ownership of land".
*Ownership of house deed.
do if any(mv745a,1,2,3).
+recode mv745c (1=1) (2=2) (0=0) (3,8,9=9) into we_house_deed.
end if.
variable labels we_house_deed "Title or deed possesion for owned house".
value labels we_house_deed
0 "Does not have title/deed"
1 "Respondent's name on title/deed"
2 "Respondent's name is not on title/deed"
9 "Don't know/missing".
*Ownership of land deed.
do if any(mv745b,1,2,3).
+recode mv745d (1=1) (2=2) (0=0) (3,8,9=9) into we_land_deed.
end if.
variable labels we_land_deed "Title or deed possesion for owned land".
value labels we_land_deed
0 "Does not have title/deed"
1 "Respondent's name on title/deed"
2 "Respondent's name is not on title/deed"
9 "Don't know/missing".
*Own a bank account.
compute we_bank=mv170.
if mv170=8 | mv170=9 we_bank=0.
variable labels we_bank "Use an account in a bank or other financial institution".
value labels we_bank 0 "No" 1 "Yes".
*Own a mobile phone.
compute we_mobile=mv169a.
if mv169a=8 | mv169a=9 we_mobile=0.
variable labels we_mobile "Own a mobile phone".
value labels we_mobile 0 "No" 1 "Yes".
*Use mobile for finances.
do if (mv169a<>8 & mv169a<>9).
+if mv169a=1 we_mobile_finance=mv169b.
+if mv169b=8 | mv169b=9 we_mobile_finance=0.
end if.
variable labels we_mobile_finance "Use mobile phone for financial transactions".
value labels we_mobile_finance 0 "No" 1 "Yes".
| |
67a455633ad0b782ada6a02d938cebc3f7c3cada339aceadca2622c64bfe51f9 | BranchTaken/Hemlock | test_wI_wX.ml | open! Basis.Rudiments
open! Basis
let test () =
File.Fmt.stdout
|> (fun formatter ->
List.fold I128.([of_string "0x8000_0000_0000_0000_0000_0000_0000_0000i128"; of_i64 (-1L);
of_i64 0L; of_i64 1L; of_string "0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi128"])
~init:formatter ~f:(fun formatter i ->
formatter
|> Fmt.fmt "extend_to_i512 "
|> I128.fmt ~alt:true ~radix:Radix.Hex ~pretty:true i
|> Fmt.fmt " -> "
|> I512.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (I128.extend_to_i512 i)
|> Fmt.fmt "\n"
)
)
|> Fmt.fmt "\n"
|> (fun formatter ->
List.fold I512.([of_i64 (-1L); of_i64 0L; of_i64 1L;
of_string "0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512";
of_string "0x8000_0000_0000_0000_0000_0000_0000_0000i512";
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512";
of_string "0x1_0000_0000_0000_0000_0000_0000_0000_0000i512";
of_string "0x1_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512"]) ~init:formatter
~f:(fun formatter i ->
formatter
|> Fmt.fmt "trunc_of_i512/narrow_of_i512_opt "
|> I512.fmt ~alt:true ~radix:Radix.Hex ~pretty:true i
|> Fmt.fmt " -> "
|> I128.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (I128.trunc_of_i512 i)
|> Fmt.fmt "/"
|> (Option.fmt I128.pp) (I128.narrow_of_i512_opt i)
|> Fmt.fmt "\n"
)
)
|> ignore
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/convert/test_wI_wX.ml | ocaml | open! Basis.Rudiments
open! Basis
let test () =
File.Fmt.stdout
|> (fun formatter ->
List.fold I128.([of_string "0x8000_0000_0000_0000_0000_0000_0000_0000i128"; of_i64 (-1L);
of_i64 0L; of_i64 1L; of_string "0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi128"])
~init:formatter ~f:(fun formatter i ->
formatter
|> Fmt.fmt "extend_to_i512 "
|> I128.fmt ~alt:true ~radix:Radix.Hex ~pretty:true i
|> Fmt.fmt " -> "
|> I512.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (I128.extend_to_i512 i)
|> Fmt.fmt "\n"
)
)
|> Fmt.fmt "\n"
|> (fun formatter ->
List.fold I512.([of_i64 (-1L); of_i64 0L; of_i64 1L;
of_string "0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512";
of_string "0x8000_0000_0000_0000_0000_0000_0000_0000i512";
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512";
of_string "0x1_0000_0000_0000_0000_0000_0000_0000_0000i512";
of_string "0x1_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffffi512"]) ~init:formatter
~f:(fun formatter i ->
formatter
|> Fmt.fmt "trunc_of_i512/narrow_of_i512_opt "
|> I512.fmt ~alt:true ~radix:Radix.Hex ~pretty:true i
|> Fmt.fmt " -> "
|> I128.fmt ~alt:true ~radix:Radix.Hex ~pretty:true (I128.trunc_of_i512 i)
|> Fmt.fmt "/"
|> (Option.fmt I128.pp) (I128.narrow_of_i512_opt i)
|> Fmt.fmt "\n"
)
)
|> ignore
let _ = test ()
| |
6b601a388e8c9530cb5949228a356ea2b823a71f10a0316e6958d92766fc0181 | binaryage/cljs-oops | schema.clj | (ns oops.schema
"The code for compile-time conversion of selectors to paths. Uses clojure.spec to do the heavy-lifting."
(:require [clojure.spec.alpha :as s]
[clojure.walk :refer [postwalk]]
[clojure.string :as string]
[oops.config :as config]
[oops.sdefs :as sdefs]
[oops.constants :refer [dot-access soft-access punch-access]]
[oops.reporting :refer [report-if-needed! report-offending-selector-if-needed!]]
[oops.debug :refer [debug-assert log]]))
; --- path utils ------------------------------------------------------------------------------------------------------------
(defn unescape-modifiers [s]
(string/replace s #"^\\([?!])" "$1"))
(defn parse-selector-element [element-str]
(case (first element-str)
\? [soft-access (.substring element-str 1)]
\! [punch-access (.substring element-str 1)]
[dot-access (unescape-modifiers element-str)]))
(defn unescape-dots [s]
(string/replace s #"\\\." "."))
(defn parse-selector-string [selector-str]
(let [elements (->> (string/split selector-str #"(?<!\\)\.") ;
(remove empty?)
(map unescape-dots))]
(map parse-selector-element elements)))
(defn coerce-key [destructured-key]
(let [value (second destructured-key)]
(case (first destructured-key)
:string (parse-selector-string value)
:keyword (parse-selector-string (name value)))))
(defn coerce-key-node [node]
(if (and (sequential? node)
(= (first node) :key))
[(coerce-key (second node))]
node))
(defn coerce-selector-keys [destructured-selector]
(postwalk coerce-key-node destructured-selector))
(defn coerce-selector-node [node]
(if (and (sequential? node)
(= (first node) :selector))
(vector (second node))
node))
(defn coerce-nested-selectors [destructured-selector]
(postwalk coerce-selector-node destructured-selector))
(defn standalone-modifier? [item]
(and (pos? (first item))
(empty? (second item))))
(defn detect-standalone-modifier [state item]
(if (standalone-modifier? item)
(update state :pending-modifier #(or % item)) ; in case of multiple standalone modifiers in a row, the left-most one wins
(update state :result conj item)))
(defn merge-standalone-modifier [modifier-item following-item]
(list (first modifier-item) (second following-item)))
(defn merge-standalone-modifiers [items]
(let [* (fn [state item]
(if-some [pending-modifier (:pending-modifier state)]
(let [merged-item (merge-standalone-modifier pending-modifier item)
state (assoc state :pending-modifier nil)]
(detect-standalone-modifier state merged-item))
(detect-standalone-modifier state item)))
init-state {:result []
:pending-modifier nil}
processed-items (reduce * init-state items)]
(:result processed-items)))
(defn build-selector-path [destructured-selector]
{:post [(or (nil? %) (s/valid? ::sdefs/obj-path %))]}
(let [path (when-not (= destructured-selector ::s/invalid)
(->> destructured-selector
(coerce-selector-keys)
(coerce-nested-selectors)
(flatten)
(partition 2)
(merge-standalone-modifiers)
(map vec)))]
(debug-assert (or (nil? path) (s/valid? ::sdefs/obj-path path)))
path))
(defn selector->path [selector]
(->> selector
(s/conform ::sdefs/obj-selector)
(build-selector-path)))
(defn static-selector? [selector]
(s/valid? ::sdefs/obj-selector selector))
(defn get-access-modes [path]
(map first path))
(defn find-offending-selector [selector-list offender-matcher]
(let [* (fn [selector]
(let [path (selector->path selector)
modes (get-access-modes path)]
(when (some offender-matcher modes)
selector)))]
(some * selector-list)))
(defn check-and-report-invalid-mode! [modes mode selector-list message-type]
(when (some #{mode} modes)
(let [offending-selector (find-offending-selector selector-list #{mode})]
(report-offending-selector-if-needed! offending-selector message-type))))
(defn check-static-path! [path op selector-list]
(when (config/diagnostics?)
(if (empty? path)
(report-if-needed! :static-unexpected-empty-selector)
(let [modes (get-access-modes path)]
(case op
:get (check-and-report-invalid-mode! modes punch-access selector-list :static-unexpected-punching-selector)
:set (check-and-report-invalid-mode! modes soft-access selector-list :static-unexpected-soft-selector)))))
path)
| null | https://raw.githubusercontent.com/binaryage/cljs-oops/a2b48d59047c28decb0d6334e2debbf21848e29c/src/lib/oops/schema.clj | clojure | --- path utils ------------------------------------------------------------------------------------------------------------
in case of multiple standalone modifiers in a row, the left-most one wins | (ns oops.schema
"The code for compile-time conversion of selectors to paths. Uses clojure.spec to do the heavy-lifting."
(:require [clojure.spec.alpha :as s]
[clojure.walk :refer [postwalk]]
[clojure.string :as string]
[oops.config :as config]
[oops.sdefs :as sdefs]
[oops.constants :refer [dot-access soft-access punch-access]]
[oops.reporting :refer [report-if-needed! report-offending-selector-if-needed!]]
[oops.debug :refer [debug-assert log]]))
(defn unescape-modifiers [s]
(string/replace s #"^\\([?!])" "$1"))
(defn parse-selector-element [element-str]
(case (first element-str)
\? [soft-access (.substring element-str 1)]
\! [punch-access (.substring element-str 1)]
[dot-access (unescape-modifiers element-str)]))
(defn unescape-dots [s]
(string/replace s #"\\\." "."))
(defn parse-selector-string [selector-str]
(remove empty?)
(map unescape-dots))]
(map parse-selector-element elements)))
(defn coerce-key [destructured-key]
(let [value (second destructured-key)]
(case (first destructured-key)
:string (parse-selector-string value)
:keyword (parse-selector-string (name value)))))
(defn coerce-key-node [node]
(if (and (sequential? node)
(= (first node) :key))
[(coerce-key (second node))]
node))
(defn coerce-selector-keys [destructured-selector]
(postwalk coerce-key-node destructured-selector))
(defn coerce-selector-node [node]
(if (and (sequential? node)
(= (first node) :selector))
(vector (second node))
node))
(defn coerce-nested-selectors [destructured-selector]
(postwalk coerce-selector-node destructured-selector))
(defn standalone-modifier? [item]
(and (pos? (first item))
(empty? (second item))))
(defn detect-standalone-modifier [state item]
(if (standalone-modifier? item)
(update state :result conj item)))
(defn merge-standalone-modifier [modifier-item following-item]
(list (first modifier-item) (second following-item)))
(defn merge-standalone-modifiers [items]
(let [* (fn [state item]
(if-some [pending-modifier (:pending-modifier state)]
(let [merged-item (merge-standalone-modifier pending-modifier item)
state (assoc state :pending-modifier nil)]
(detect-standalone-modifier state merged-item))
(detect-standalone-modifier state item)))
init-state {:result []
:pending-modifier nil}
processed-items (reduce * init-state items)]
(:result processed-items)))
(defn build-selector-path [destructured-selector]
{:post [(or (nil? %) (s/valid? ::sdefs/obj-path %))]}
(let [path (when-not (= destructured-selector ::s/invalid)
(->> destructured-selector
(coerce-selector-keys)
(coerce-nested-selectors)
(flatten)
(partition 2)
(merge-standalone-modifiers)
(map vec)))]
(debug-assert (or (nil? path) (s/valid? ::sdefs/obj-path path)))
path))
(defn selector->path [selector]
(->> selector
(s/conform ::sdefs/obj-selector)
(build-selector-path)))
(defn static-selector? [selector]
(s/valid? ::sdefs/obj-selector selector))
(defn get-access-modes [path]
(map first path))
(defn find-offending-selector [selector-list offender-matcher]
(let [* (fn [selector]
(let [path (selector->path selector)
modes (get-access-modes path)]
(when (some offender-matcher modes)
selector)))]
(some * selector-list)))
(defn check-and-report-invalid-mode! [modes mode selector-list message-type]
(when (some #{mode} modes)
(let [offending-selector (find-offending-selector selector-list #{mode})]
(report-offending-selector-if-needed! offending-selector message-type))))
(defn check-static-path! [path op selector-list]
(when (config/diagnostics?)
(if (empty? path)
(report-if-needed! :static-unexpected-empty-selector)
(let [modes (get-access-modes path)]
(case op
:get (check-and-report-invalid-mode! modes punch-access selector-list :static-unexpected-punching-selector)
:set (check-and-report-invalid-mode! modes soft-access selector-list :static-unexpected-soft-selector)))))
path)
|
1b81bf479a607e3494477db27e9640155739500492044b383a780733c7fd04f5 | iand675/hs-opentelemetry | Detector.hs | module OpenTelemetry.Resource.Service.Detector where
import qualified Data.Text as T
import OpenTelemetry.Resource.Service
import System.Environment (getProgName, lookupEnv)
{- | Detect a service name using the 'OTEL_SERVICE_NAME' environment
variable. Otherwise, populates the name with 'unknown_service:process_name'.
-}
detectService :: IO Service
detectService = do
mSvcName <- lookupEnv "OTEL_SERVICE_NAME"
svcName <- case mSvcName of
Nothing -> T.pack . ("unknown_service:" <>) <$> getProgName
Just svcName -> pure $ T.pack svcName
pure $
Service
{ serviceName = svcName
, serviceNamespace = Nothing
, serviceInstanceId = Nothing
, serviceVersion = Nothing
}
| null | https://raw.githubusercontent.com/iand675/hs-opentelemetry/b08550db292ca0d8b9ce9156988e6d08dd6a2e61/sdk/src/OpenTelemetry/Resource/Service/Detector.hs | haskell | | Detect a service name using the 'OTEL_SERVICE_NAME' environment
variable. Otherwise, populates the name with 'unknown_service:process_name'.
| module OpenTelemetry.Resource.Service.Detector where
import qualified Data.Text as T
import OpenTelemetry.Resource.Service
import System.Environment (getProgName, lookupEnv)
detectService :: IO Service
detectService = do
mSvcName <- lookupEnv "OTEL_SERVICE_NAME"
svcName <- case mSvcName of
Nothing -> T.pack . ("unknown_service:" <>) <$> getProgName
Just svcName -> pure $ T.pack svcName
pure $
Service
{ serviceName = svcName
, serviceNamespace = Nothing
, serviceInstanceId = Nothing
, serviceVersion = Nothing
}
|
c851ad20d58584a39646642f7a39d5fc374ba0a537b9d4074f8e3f0e4f473973 | lambdamikel/DLMAPS | syntactic-sugar12.lisp | -*- Mode : Lisp ; Syntax : Ansi - Common - Lisp ; Package : THEMATIC - SUBSTRATE ; Base : 10 -*-
(in-package :THEMATIC-SUBSTRATE)
(defmethod transform-datatype-expression ((parser nrql-abox-query-parser) property expr)
( and ( min 3 ) ( max 5 ) ( or ... ) )
(let ((attribute (get-attribute-for-datatype-property parser property)))
(labels ((get-simple-expressions (list)
(remove-if-not #'(lambda (x)
(and (consp x)
(not (cddr x)) ; sonst wurde schon ersetzt!
(member (to-keyword (first x))
+racer-cd-predicates+)))
list)))
(if (not (consp expr))
(parser-error
(format nil "Unrecognized concept expression ~A" expr))
(let ((op (to-keyword (first expr))))
(if (member op +racer-cd-predicates+)
(if (cddr expr)
expr
`(,(first expr)
,attribute
,(second expr)))
(case op
((:and :or)
(let* ((simple-expressions
(get-simple-expressions (rest expr)))
(atomic-expressions
(remove-if-not #'symbolp (rest expr)))
(other-expressions
(remove-if #'(lambda (x)
(or (member x simple-expressions)
(member x atomic-expressions)))
(rest expr))))
`(,(first expr)
,@(when atomic-expressions
(list atomic-expressions))
,@(mapcar #'(lambda (simple-expr)
`(,(first simple-expr)
,attribute
,(second simple-expr)))
simple-expressions)
,@(mapcar #'(lambda (x)
(transform-datatype-expression parser property x))
other-expressions))))
((:not)
`(not ,(transform-datatype-expression parser property (second expr))))
((:an :a)
(if (second expr)
hier kann nur sowas wie INTEGER , STRING , REAL , CARDINAL STEHEN !
;; TYPE CHECK:
(if (eq (second expr) attribute)
`(racer:an ,attribute)
(if (member (to-keyword (second expr))
'(:integer :string :real :cardinal :boolean))
(if (eq (second expr) (get-datatype-range parser property))
`(racer::an ,attribute)
`(racer:no ,attribute))
;;(parser-error expr)
`(racer:no ,attribute)
))
`(racer:an ,attribute)))
((:no)
`(racer:no ,attribute))
(otherwise
(parser-error
(format nil "Unrecognized concept expression ~A" expr))))))))))
#-:owl-datatype-support
(defmethod replace-syntactic-concept-expression-sugar ((parser nrql-abox-query-parser) concept)
concept)
#+(and :owl-datatype-support :midelora)
(defmethod replace-syntactic-concept-expression-sugar ((parser midelora-abox-query-parser) concept)
concept)
#+:owl-datatype-support
(defmethod replace-syntactic-concept-expression-sugar ((parser nrql-abox-query-parser) concept)
(when concept
(cond ((symbolp concept) concept)
((consp concept)
(let ((op (to-keyword (first concept))))
(if (member op +racer-cd-predicates+)
( > DTP 30 ) - > ( some DTP - ROLE ( > DTP - ATTRIBUTE 30 ) )
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role
,(transform-datatype-expression parser role
`(,(first concept)
,(third concept)))))
(t
(parser-error
(format nil "Unrecognized concept expression ~A" concept)))))
(case op
((:not)
`(not ,(replace-syntactic-concept-expression-sugar parser (second concept))))
((:and :or)
`(,(first concept)
,@(mapcar #'(lambda (x)
(replace-syntactic-concept-expression-sugar parser x))
(rest concept))))
((:all :some)
(let ((role (second concept))
(qual (third concept)))
(if (is-datatype-property-p parser role)
`(,(first concept)
,role
,(transform-datatype-expression parser
role
(if (symbolp qual)
`(racer:an ,qual)
qual)))
`(,(first concept)
,role
,(replace-syntactic-concept-expression-sugar parser qual)))))
((:at-least :at-most :exactly)
(let* ((num (second concept))
(role (third concept))
(qual (fourth concept)))
(if (is-datatype-property-p parser role)
`(,(first concept)
,num
,role
,(transform-datatype-expression parser
role
(if (symbolp qual)
`(racer:an ,qual)
qual)))
`(,(first concept)
,num ,role ,(replace-syntactic-concept-expression-sugar parser qual)))))
diese , eigentlich nue fuer Attribute gueltige Syntax wird
nun einfach auf Datatype Properties ausgedehnt !
((:a :an)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role ,(transform-datatype-expression parser role `(an ,(third concept)))))
(t
(parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
((:no)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(all ,role ,(transform-datatype-expression parser role '(no))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
(otherwise
(if (member op +racer-cd-predicates+)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role ,(transform-datatype-expression parser
role (third concept))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept)))))
(parser-error
(format nil "Unrecognized concept expression ~A" concept))))))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
;;;
;;;
;;;
(defmethod is-datatype-property-p ((parser nrql-abox-query-parser) property)
(and (symbolp property)
(role-p property (tbox (substrate parser)))
(role-used-as-datatype-property-p property (tbox (substrate parser)))))
(defmethod get-attribute-for-datatype-property ((parser nrql-abox-query-parser) property)
(intern (format nil "RACER-INTERNAL%HAS-~A-VALUE"
(let ((range (datatype-role-range property (tbox (substrate parser)))))
HACK !
(if (eq range 'cardinal)
'integer
range)))
(find-package :racer)))
(defmethod get-datatype-range ((parser nrql-abox-query-parser) property)
(when (is-datatype-property-p parser property)
(datatype-role-range property (tbox (substrate parser)))))
| null | https://raw.githubusercontent.com/lambdamikel/DLMAPS/7f8dbb9432069d41e6a7d9c13dc5b25602ad35dc/src/query/syntactic-sugar12.lisp | lisp | Syntax : Ansi - Common - Lisp ; Package : THEMATIC - SUBSTRATE ; Base : 10 -*-
sonst wurde schon ersetzt!
TYPE CHECK:
(parser-error expr)
|
(in-package :THEMATIC-SUBSTRATE)
(defmethod transform-datatype-expression ((parser nrql-abox-query-parser) property expr)
( and ( min 3 ) ( max 5 ) ( or ... ) )
(let ((attribute (get-attribute-for-datatype-property parser property)))
(labels ((get-simple-expressions (list)
(remove-if-not #'(lambda (x)
(and (consp x)
(member (to-keyword (first x))
+racer-cd-predicates+)))
list)))
(if (not (consp expr))
(parser-error
(format nil "Unrecognized concept expression ~A" expr))
(let ((op (to-keyword (first expr))))
(if (member op +racer-cd-predicates+)
(if (cddr expr)
expr
`(,(first expr)
,attribute
,(second expr)))
(case op
((:and :or)
(let* ((simple-expressions
(get-simple-expressions (rest expr)))
(atomic-expressions
(remove-if-not #'symbolp (rest expr)))
(other-expressions
(remove-if #'(lambda (x)
(or (member x simple-expressions)
(member x atomic-expressions)))
(rest expr))))
`(,(first expr)
,@(when atomic-expressions
(list atomic-expressions))
,@(mapcar #'(lambda (simple-expr)
`(,(first simple-expr)
,attribute
,(second simple-expr)))
simple-expressions)
,@(mapcar #'(lambda (x)
(transform-datatype-expression parser property x))
other-expressions))))
((:not)
`(not ,(transform-datatype-expression parser property (second expr))))
((:an :a)
(if (second expr)
hier kann nur sowas wie INTEGER , STRING , REAL , CARDINAL STEHEN !
(if (eq (second expr) attribute)
`(racer:an ,attribute)
(if (member (to-keyword (second expr))
'(:integer :string :real :cardinal :boolean))
(if (eq (second expr) (get-datatype-range parser property))
`(racer::an ,attribute)
`(racer:no ,attribute))
`(racer:no ,attribute)
))
`(racer:an ,attribute)))
((:no)
`(racer:no ,attribute))
(otherwise
(parser-error
(format nil "Unrecognized concept expression ~A" expr))))))))))
#-:owl-datatype-support
(defmethod replace-syntactic-concept-expression-sugar ((parser nrql-abox-query-parser) concept)
concept)
#+(and :owl-datatype-support :midelora)
(defmethod replace-syntactic-concept-expression-sugar ((parser midelora-abox-query-parser) concept)
concept)
#+:owl-datatype-support
(defmethod replace-syntactic-concept-expression-sugar ((parser nrql-abox-query-parser) concept)
(when concept
(cond ((symbolp concept) concept)
((consp concept)
(let ((op (to-keyword (first concept))))
(if (member op +racer-cd-predicates+)
( > DTP 30 ) - > ( some DTP - ROLE ( > DTP - ATTRIBUTE 30 ) )
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role
,(transform-datatype-expression parser role
`(,(first concept)
,(third concept)))))
(t
(parser-error
(format nil "Unrecognized concept expression ~A" concept)))))
(case op
((:not)
`(not ,(replace-syntactic-concept-expression-sugar parser (second concept))))
((:and :or)
`(,(first concept)
,@(mapcar #'(lambda (x)
(replace-syntactic-concept-expression-sugar parser x))
(rest concept))))
((:all :some)
(let ((role (second concept))
(qual (third concept)))
(if (is-datatype-property-p parser role)
`(,(first concept)
,role
,(transform-datatype-expression parser
role
(if (symbolp qual)
`(racer:an ,qual)
qual)))
`(,(first concept)
,role
,(replace-syntactic-concept-expression-sugar parser qual)))))
((:at-least :at-most :exactly)
(let* ((num (second concept))
(role (third concept))
(qual (fourth concept)))
(if (is-datatype-property-p parser role)
`(,(first concept)
,num
,role
,(transform-datatype-expression parser
role
(if (symbolp qual)
`(racer:an ,qual)
qual)))
`(,(first concept)
,num ,role ,(replace-syntactic-concept-expression-sugar parser qual)))))
diese , eigentlich nue fuer Attribute gueltige Syntax wird
nun einfach auf Datatype Properties ausgedehnt !
((:a :an)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role ,(transform-datatype-expression parser role `(an ,(third concept)))))
(t
(parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
((:no)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(all ,role ,(transform-datatype-expression parser role '(no))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
(otherwise
(if (member op +racer-cd-predicates+)
(let ((role (second concept)))
(cond ((and (role-p role (tbox (substrate parser)))
(cd-attribute-p role (tbox (substrate parser))))
concept)
((is-datatype-property-p parser role)
`(some ,role ,(transform-datatype-expression parser
role (third concept))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept)))))
(parser-error
(format nil "Unrecognized concept expression ~A" concept))))))))
(t (parser-error
(format nil "Unrecognized concept expression ~A" concept))))))
(defmethod is-datatype-property-p ((parser nrql-abox-query-parser) property)
(and (symbolp property)
(role-p property (tbox (substrate parser)))
(role-used-as-datatype-property-p property (tbox (substrate parser)))))
(defmethod get-attribute-for-datatype-property ((parser nrql-abox-query-parser) property)
(intern (format nil "RACER-INTERNAL%HAS-~A-VALUE"
(let ((range (datatype-role-range property (tbox (substrate parser)))))
HACK !
(if (eq range 'cardinal)
'integer
range)))
(find-package :racer)))
(defmethod get-datatype-range ((parser nrql-abox-query-parser) property)
(when (is-datatype-property-p parser property)
(datatype-role-range property (tbox (substrate parser)))))
|
bfd3355a26f51d58aa11809f2f064c3baa4d0d52999ad8bbb9585caa579ea84a | VisionsGlobalEmpowerment/webchange | views_row.cljs | (ns webchange.editor-v2.course-table.views-row
(:require
[cljs-react-material-ui.icons :as ic]
[cljs-react-material-ui.reagent :as ui]
[re-frame.core :as re-frame]
[webchange.editor-v2.course-table.fields.activities.views :refer [activities]]
[webchange.editor-v2.course-table.fields.concepts.views :refer [concepts]]
[webchange.editor-v2.course-table.fields.skills.views :refer [skills]]
[webchange.editor-v2.course-table.fields.tags.views :refer [tags]]
[webchange.editor-v2.course-table.state.selection :as selection-state]
[webchange.editor-v2.course-table.utils.cell-data :refer [activity->cell-data cell-data->cell-attributes]]
[webchange.ui-deprecated.theme :refer [get-in-theme]]))
(defn- index
[{:keys [data]}]
(:idx data))
(defn- lesson
[{:keys [data]}]
(-> data :lesson-idx inc))
(defn- level
[{:keys [data]}]
(-> data :level-idx inc))
(defn- default-component
[{:keys [field]}]
(let [color (get-in-theme [:palette :warning :default])]
[:div {:style {:align-items "center"
:color color
:display "flex"}}
[ic/warning {:style {:font-size "16px"
:margin-right "8px"}}]
[ui/typography {:style {:color color}} (str "<" field ">")]]))
(def components {:level-idx [level]
:lesson-idx [lesson]
:idx [index]
:concepts [concepts]
:activity [activities]
:abbr-global [skills {:field :abbr}]
:skills [skills {:field :name}]
:tags [tags]})
(defn- get-component
[id]
(if (contains? components id)
(get components id)
default-component))
(defn- cell-selected?
[selection-data cell-data field]
(let [fields-to-check (cond
(= field :idx) [:level-idx :lesson-idx :activity-idx]
(some #{field} [:level-idx :lesson-idx :concepts]) [:level-idx :lesson-idx :field]
:else (keys selection-data))]
(and (some? selection-data)
(= (select-keys selection-data fields-to-check)
(select-keys cell-data fields-to-check)))))
(defn- field-editable?
[field]
(some #{field} [:abbr-global :activity :concepts :skills :tags]))
(defn- field-cell
[{:keys [data field span] :as props}]
(let [selection @(re-frame/subscribe [::selection-state/selection])
cell-data (activity->cell-data data field)
spanned? (some? span)
selected? (cell-selected? selection cell-data field)
editable? (field-editable? (:field cell-data))
[component component-props] (get-component field)]
[ui/table-cell (cond-> (merge (:cell-props props)
(cell-data->cell-attributes cell-data)
{:class-name (clojure.core/name field)})
spanned? (assoc :row-span span)
selected? (update :class-name str " selected")
editable? (update :class-name str " editable"))
[component (merge component-props
{:edit? selected?
:data data})]]))
(defn- lesson-row-selected?
[selection-data cell-data]
(let [fields-to-check [:level-idx :lesson-idx]]
(and (= (:field selection-data) :lesson-idx)
(= (select-keys selection-data fields-to-check)
(select-keys cell-data fields-to-check)))))
(defn activity-row
[{:keys [data columns span-columns skip-columns]}]
(let [filtered-columns (->> columns
(filter (fn [{:keys [id]}]
(-> skip-columns
(contains? id)
(not)))))
selection @(re-frame/subscribe [::selection-state/selection])
cell-data (activity->cell-data data)
lesson-selected? (lesson-row-selected? selection cell-data)]
[ui/table-row {:class-name (if lesson-selected? "row-selected" "row-not-selected")}
(for [{:keys [id]} filtered-columns]
^{:key id}
[field-cell {:data data
:span (get span-columns id)
:field id}])]))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/e5747e187937d85e9c92c728d52a704f323f00ef/src/cljs/webchange/editor_v2/course_table/views_row.cljs | clojure | (ns webchange.editor-v2.course-table.views-row
(:require
[cljs-react-material-ui.icons :as ic]
[cljs-react-material-ui.reagent :as ui]
[re-frame.core :as re-frame]
[webchange.editor-v2.course-table.fields.activities.views :refer [activities]]
[webchange.editor-v2.course-table.fields.concepts.views :refer [concepts]]
[webchange.editor-v2.course-table.fields.skills.views :refer [skills]]
[webchange.editor-v2.course-table.fields.tags.views :refer [tags]]
[webchange.editor-v2.course-table.state.selection :as selection-state]
[webchange.editor-v2.course-table.utils.cell-data :refer [activity->cell-data cell-data->cell-attributes]]
[webchange.ui-deprecated.theme :refer [get-in-theme]]))
(defn- index
[{:keys [data]}]
(:idx data))
(defn- lesson
[{:keys [data]}]
(-> data :lesson-idx inc))
(defn- level
[{:keys [data]}]
(-> data :level-idx inc))
(defn- default-component
[{:keys [field]}]
(let [color (get-in-theme [:palette :warning :default])]
[:div {:style {:align-items "center"
:color color
:display "flex"}}
[ic/warning {:style {:font-size "16px"
:margin-right "8px"}}]
[ui/typography {:style {:color color}} (str "<" field ">")]]))
(def components {:level-idx [level]
:lesson-idx [lesson]
:idx [index]
:concepts [concepts]
:activity [activities]
:abbr-global [skills {:field :abbr}]
:skills [skills {:field :name}]
:tags [tags]})
(defn- get-component
[id]
(if (contains? components id)
(get components id)
default-component))
(defn- cell-selected?
[selection-data cell-data field]
(let [fields-to-check (cond
(= field :idx) [:level-idx :lesson-idx :activity-idx]
(some #{field} [:level-idx :lesson-idx :concepts]) [:level-idx :lesson-idx :field]
:else (keys selection-data))]
(and (some? selection-data)
(= (select-keys selection-data fields-to-check)
(select-keys cell-data fields-to-check)))))
(defn- field-editable?
[field]
(some #{field} [:abbr-global :activity :concepts :skills :tags]))
(defn- field-cell
[{:keys [data field span] :as props}]
(let [selection @(re-frame/subscribe [::selection-state/selection])
cell-data (activity->cell-data data field)
spanned? (some? span)
selected? (cell-selected? selection cell-data field)
editable? (field-editable? (:field cell-data))
[component component-props] (get-component field)]
[ui/table-cell (cond-> (merge (:cell-props props)
(cell-data->cell-attributes cell-data)
{:class-name (clojure.core/name field)})
spanned? (assoc :row-span span)
selected? (update :class-name str " selected")
editable? (update :class-name str " editable"))
[component (merge component-props
{:edit? selected?
:data data})]]))
(defn- lesson-row-selected?
[selection-data cell-data]
(let [fields-to-check [:level-idx :lesson-idx]]
(and (= (:field selection-data) :lesson-idx)
(= (select-keys selection-data fields-to-check)
(select-keys cell-data fields-to-check)))))
(defn activity-row
[{:keys [data columns span-columns skip-columns]}]
(let [filtered-columns (->> columns
(filter (fn [{:keys [id]}]
(-> skip-columns
(contains? id)
(not)))))
selection @(re-frame/subscribe [::selection-state/selection])
cell-data (activity->cell-data data)
lesson-selected? (lesson-row-selected? selection cell-data)]
[ui/table-row {:class-name (if lesson-selected? "row-selected" "row-not-selected")}
(for [{:keys [id]} filtered-columns]
^{:key id}
[field-cell {:data data
:span (get span-columns id)
:field id}])]))
| |
9aaa8e971d6bfb7d4347b8e343403262bd5ffe3d13026073228acbd16f55a868 | chrisdone/prana | Conc.hs | # LANGUAGE Unsafe #
# LANGUAGE CPP , NoImplicitPrelude #
# OPTIONS_GHC -Wno - missing - signatures #
# OPTIONS_HADDOCK not - home #
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Conc
Copyright : ( c ) The University of Glasgow , 1994 - 2002
-- License : see libraries/base/LICENSE
--
-- Maintainer :
-- Stability : internal
Portability : non - portable ( GHC extensions )
--
-- Basic concurrency stuff.
--
-----------------------------------------------------------------------------
-- No: #hide, because bits of this module are exposed by the stm package.
-- However, we don't want this module to be the home location for the
-- bits it exports, we'd rather have Control.Concurrent and the other
-- higher level modules be the home. Hence: #not-home
module GHC.Conc
( ThreadId(..)
-- * Forking and suchlike
, forkIO
, forkIOWithUnmask
, forkOn
, forkOnWithUnmask
, numCapabilities
, getNumCapabilities
, setNumCapabilities
, getNumProcessors
, numSparks
, childHandler
, myThreadId
, killThread
, throwTo
, par
, pseq
, runSparks
, yield
, labelThread
, mkWeakThreadId
, ThreadStatus(..), BlockReason(..)
, threadStatus
, threadCapability
, newStablePtrPrimMVar, PrimMVar
-- * Waiting
, threadDelay
, registerDelay
, threadWaitRead
, threadWaitWrite
, threadWaitReadSTM
, threadWaitWriteSTM
, closeFdWith
-- * Allocation counter and limit
, setAllocationCounter
, getAllocationCounter
, enableAllocationLimit
, disableAllocationLimit
* TVars
, STM(..)
, atomically
, retry
, orElse
, throwSTM
, catchSTM
, alwaysSucceeds
, always
, TVar(..)
, newTVar
, newTVarIO
, readTVar
, readTVarIO
, writeTVar
, unsafeIOToSTM
-- * Miscellaneous
, withMVar
#if defined(mingw32_HOST_OS)
, asyncRead
, asyncWrite
, asyncDoProc
, asyncReadBA
, asyncWriteBA
#endif
#if !defined(mingw32_HOST_OS)
, Signal, HandlerFun, setHandler, runHandlers
#endif
, ensureIOManagerIsRunning
, ioManagerCapabilitiesChanged
#if defined(mingw32_HOST_OS)
, ConsoleEvent(..)
, win32ConsoleHandler
, toWin32ConsoleEvent
#endif
, setUncaughtExceptionHandler
, getUncaughtExceptionHandler
, reportError, reportStackOverflow, reportHeapOverflow
) where
import GHC.Conc.IO
import GHC.Conc.Sync
#if !defined(mingw32_HOST_OS)
import GHC.Conc.Signal
#endif
| null | https://raw.githubusercontent.com/chrisdone/prana/f2e45538937d326aff562b6d49296eaedd015662/prana-boot/packages/base-4.11.1.0/GHC/Conc.hs | haskell | ---------------------------------------------------------------------------
|
Module : GHC.Conc
License : see libraries/base/LICENSE
Maintainer :
Stability : internal
Basic concurrency stuff.
---------------------------------------------------------------------------
No: #hide, because bits of this module are exposed by the stm package.
However, we don't want this module to be the home location for the
bits it exports, we'd rather have Control.Concurrent and the other
higher level modules be the home. Hence: #not-home
* Forking and suchlike
* Waiting
* Allocation counter and limit
* Miscellaneous | # LANGUAGE Unsafe #
# LANGUAGE CPP , NoImplicitPrelude #
# OPTIONS_GHC -Wno - missing - signatures #
# OPTIONS_HADDOCK not - home #
Copyright : ( c ) The University of Glasgow , 1994 - 2002
Portability : non - portable ( GHC extensions )
module GHC.Conc
( ThreadId(..)
, forkIO
, forkIOWithUnmask
, forkOn
, forkOnWithUnmask
, numCapabilities
, getNumCapabilities
, setNumCapabilities
, getNumProcessors
, numSparks
, childHandler
, myThreadId
, killThread
, throwTo
, par
, pseq
, runSparks
, yield
, labelThread
, mkWeakThreadId
, ThreadStatus(..), BlockReason(..)
, threadStatus
, threadCapability
, newStablePtrPrimMVar, PrimMVar
, threadDelay
, registerDelay
, threadWaitRead
, threadWaitWrite
, threadWaitReadSTM
, threadWaitWriteSTM
, closeFdWith
, setAllocationCounter
, getAllocationCounter
, enableAllocationLimit
, disableAllocationLimit
* TVars
, STM(..)
, atomically
, retry
, orElse
, throwSTM
, catchSTM
, alwaysSucceeds
, always
, TVar(..)
, newTVar
, newTVarIO
, readTVar
, readTVarIO
, writeTVar
, unsafeIOToSTM
, withMVar
#if defined(mingw32_HOST_OS)
, asyncRead
, asyncWrite
, asyncDoProc
, asyncReadBA
, asyncWriteBA
#endif
#if !defined(mingw32_HOST_OS)
, Signal, HandlerFun, setHandler, runHandlers
#endif
, ensureIOManagerIsRunning
, ioManagerCapabilitiesChanged
#if defined(mingw32_HOST_OS)
, ConsoleEvent(..)
, win32ConsoleHandler
, toWin32ConsoleEvent
#endif
, setUncaughtExceptionHandler
, getUncaughtExceptionHandler
, reportError, reportStackOverflow, reportHeapOverflow
) where
import GHC.Conc.IO
import GHC.Conc.Sync
#if !defined(mingw32_HOST_OS)
import GHC.Conc.Signal
#endif
|
b976b3e6422238c9c44035f9663413a5c76d8fd627ca0200beb6c288f94d6e30 | kayhide/wakame | Row.hs | module Test.Wakame.Row where
import GHC.Generics
import Test.Tasty
import Test.Tasty.Hspec
import Test.Tasty.QuickCheck
import Test.Utils ()
import Wakame.Generics ()
import Wakame.Row (NP (..), V (..), fromRow, toRow)
data Point = Point { x :: Double, y :: Double }
deriving (Eq, Show, Generic)
prop_toRow :: (V '("x", Double), V '("y", Double)) -> Property
prop_toRow (x, y) =
toRow (x, y) === x :* y :* Nil
prop_fromRow :: (V '("x", Double), V '("y", Double)) -> Property
prop_fromRow (x@(V x'), y@(V y')) =
fromRow (x :* y :* Nil) === Point x' y'
| null | https://raw.githubusercontent.com/kayhide/wakame/4d16ecf2221655300b5db588c2775cfc0b8d92cd/test/tasty/Test/Wakame/Row.hs | haskell | module Test.Wakame.Row where
import GHC.Generics
import Test.Tasty
import Test.Tasty.Hspec
import Test.Tasty.QuickCheck
import Test.Utils ()
import Wakame.Generics ()
import Wakame.Row (NP (..), V (..), fromRow, toRow)
data Point = Point { x :: Double, y :: Double }
deriving (Eq, Show, Generic)
prop_toRow :: (V '("x", Double), V '("y", Double)) -> Property
prop_toRow (x, y) =
toRow (x, y) === x :* y :* Nil
prop_fromRow :: (V '("x", Double), V '("y", Double)) -> Property
prop_fromRow (x@(V x'), y@(V y')) =
fromRow (x :* y :* Nil) === Point x' y'
| |
b19dc98e2be4c1be36a36dba21bdf0a06e3acecd6b5a056ad4f015e842089e07 | active-group/reacl-c | core_testing.cljc | (ns reacl-c.test-util.core-testing
(:require [reacl-c.base :as base]
[reacl-c.dom-base :as dom]
[reacl-c.core :as c]
[active.clojure.lens :as lens]
[clojure.string :as string]
[clojure.set :as set])
(:refer-clojure :exclude [contains?]))
(def ^:private clj-contains? clojure.core/contains?)
;; Only pure tests: No local-/state changes, no implicit mounting (init), no effects, no subscriptions.
(defn- reduce-item [mk-ref mk-async f-leaf f-container f-wrapper f-dynamic f-other item state]
(let [rec (fn [state item]
(reduce-item mk-ref mk-async f-leaf f-container f-wrapper f-dynamic f-other
item state))]
(cond
(string? item)
(f-leaf item state)
(base/lifecycle? item)
(f-leaf item state)
(base/dynamic? item)
(f-dynamic (rec state (apply (base/dynamic-f item) state (base/dynamic-args item))) item state)
(or (nil? item) (base/fragment? item))
(f-container (map #(rec state %)
(if (nil? item) nil (base/fragment-children item)))
item state)
(dom/element? item)
(f-container (map #(rec state %)
(dom/element-children item))
item state)
(base/static? item)
(f-dynamic (rec state (apply (base/static-f item) (base/static-args item))) item state)
(base/with-refs? item)
(f-dynamic (rec state (apply (base/with-refs-f item) (repeatedly (base/with-refs-n item) mk-ref) (base/with-refs-args item))) item state)
(base/with-async? item)
(f-dynamic (rec state (apply (base/with-async-f item) (mk-async) (base/with-async-args item))) item state)
(base/focus? item)
(f-wrapper (rec (lens/yank state (base/focus-lens item))
(base/focus-e item))
item state)
(base/local-state? item)
(f-wrapper (rec [state (base/eval-local-state-init (base/local-state-initial item))]
(base/local-state-e item))
item state)
(base/handle-action? item)
(f-wrapper (rec state (base/handle-action-e item))
item state)
(base/refer? item)
(f-wrapper (rec state (base/refer-e item))
item state)
(base/handle-state-change? item)
(f-wrapper (rec state (base/handle-state-change-e item))
item state)
(base/handle-message? item)
(f-wrapper (rec state (base/handle-message-e item))
item state)
(base/named? item)
(f-wrapper (rec state (base/named-e item))
item state)
(base/handle-error? item)
(f-wrapper (rec state (base/handle-error-e item))
item state)
(base/keyed? item)
(f-wrapper (rec state (base/keyed-e item))
item state)
;; TODO:?
#_(interop/lift-react? item)
#_item
:else
(f-other item state))))
(defn- unknown-item-error [item]
(ex-info (str "Unknown item: " (pr-str item)) {:item item}))
(defn- make-dummy-ref []
(reify base/Ref
(-deref-ref [this] (throw (ex-info "Cannot derefence in a test environment." {})))))
(defn- dummy-async [r]
(throw (ex-info "Cannot do an async injection in a test environment." {})))
(defn render
"Returns how an item looks like in the given state. Returns a list
of only dom elements and strings." ;; TODO: + react items?
[item & [state]]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(cond
(string? item)
(list item)
(base/lifecycle? item) nil
:else (throw (unknown-item-error item))))
(fn container [c-res item state]
(cond
(nil? item) nil
(base/fragment? item) (apply concat c-res)
(dom/element? item) (list (lens/shove item dom/element-children (apply concat c-res)))
:else (throw (unknown-item-error item))))
(fn wrapper [res item state]
(cond
(or (base/focus? item)
(base/local-state? item)
(base/handle-action? item)
(base/refer? item)
(base/handle-state-change? item)
(base/handle-message? item)
(base/named? item)
(base/handle-error? item)
(base/keyed? item))
res
:else (throw (unknown-item-error item))))
(fn dynamic [res item state]
(cond
(or (base/dynamic? item)
(base/with-refs? item)
(base/with-async? item)
(base/static? item))
res
:else
(throw (unknown-item-error item))))
(fn other [item state]
;; -> or an IRenderable extension point?
(throw (unknown-item-error item)))
item
state))
(defn- split-css-classes [s]
(map string/trim (string/split (or s "") #" ")))
(defn- contains-in-order? [l1 l2] ;; l1 contains l2 ?
(or (empty? l2)
(and (not (empty? l1))
(or (= (first l1) (first l2))
(contains-in-order? (rest l1) l2)))))
(defn- dom-attrs-contains? [a1 a2] ;; if a1 contains a2; resp. a2 < a1
(reduce-kv (fn [res k v]
(and res
(or (= (get a1 k) v)
;; or sub matching for style and class attributes
(case k
:style
(let [st1 (:style a1)]
(reduce-kv (fn [res k v]
(and res (= (get st1 k) v)))
true
v))
:class
(contains-in-order? (split-css-classes (:class a1))
(split-css-classes v))
false))))
true
a2))
(defn- item-empty? [item]
(or (nil? item) (and (base/fragment? item) (empty? (base/fragment-children item)))))
(declare like?)
(defn- list-like? [lst sub-lst]
(let [[missing remaining]
(reduce (fn [[n remaining] c]
(loop [remaining remaining]
(if (empty? remaining)
[n nil]
(if (or (= (first remaining) c)
(like? (first remaining) c))
[(dec n) remaining]
(recur (rest remaining))))))
[(count sub-lst) lst]
sub-lst)]
(zero? missing)))
(defn- dom-like? [item sub-item]
(assert (dom/element? item) item)
(assert (dom/element? sub-item) sub-item)
(and (= (dom/element-type item) (dom/element-type sub-item))
(dom-attrs-contains? (dom/element-attrs item) (dom/element-attrs sub-item))
;; item has all events defined that sub-item has:
(set/subset? (set (keys (dom/element-events sub-item)))
(set (keys (dom/element-events item))))
;; TODO: maybe flatten out fragments in children? maybe concatenate strings?
(list-like? (dom/element-children item)
(dom/element-children sub-item))))
(defn like?
"Returns if `sub-item` is like `item`, meaning:
- if both are a strings, then `sub-item` is a substring in `item`,
- if both are dom elements, then `sub-item` is the same type of dom element, but may contain less attributes or children,
- if both have children, then every child of `sub-item` is `like?` a child in `item`, and in the same order.
"
[item sub-item]
;; Note: (string/includes? "foo" "") is true, so we say 'nothing is like anything' too.
(cond
(string? item)
(and (string? sub-item)
(string/includes? item sub-item))
(item-empty? item)
(or (nil? sub-item)
(and (base/fragment? sub-item)
(list-like? nil
(base/fragment-children sub-item))))
(base/fragment? item) ;; always a non-empty fragment here
(and (base/fragment? sub-item)
(list-like? (base/fragment-children item)
(base/fragment-children sub-item)))
(dom/element? item)
(and (dom/element? sub-item)
(dom-like? item sub-item))
(or (base/lifecycle? item)
(base/dynamic? item)
(base/static? item)
(base/with-refs? item)
(base/with-async? item))
(= item sub-item)
(or (base/focus? item)
(base/local-state? item)
(base/handle-action? item)
(base/refer? item)
(base/handle-state-change? item)
(base/handle-message? item)
(base/named? item)
(base/handle-error? item)
(base/keyed? item))
;; we could say the :e's must only be 'like?', but rest equal; but
;; then again, that does not work for dynamics anyway.
(= item sub-item)
;; TODO:?
#_(interop/lift-react? item)
:else
(throw (unknown-item-error item))))
(defn- contains-by? [f item sub-item & [options]]
(assert (every? #{:state :sub-item-state} (keys options)) (keys options))
(let [state (get options :state nil)
g (if (clj-contains? options :sub-item-state)
(let [sub-item-state (:sub-item-state options)]
(fn [item sub-item state]
(and (= state sub-item-state)
(f item sub-item))))
(fn [item sub-item state]
(f item sub-item)))]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(g item sub-item state))
(fn container [c-res item state]
(or (g item sub-item state)
;; one or more children of item 'contain' the sub-item
(reduce #(or %1 %2) false c-res)))
(fn wrapper [res item state]
(or (g item sub-item state)
res))
(fn dynamic [res item state]
(or (g item sub-item state)
res))
(fn other [item state]
(throw (unknown-item-error item)))
item state)))
(defn contains-like?
"Returns if `item`, or any item 'below' it, is [[like?]] `sub-item`
in the given state of `item`. Options can be:
- `:state`: specifies the state of `item` used to resolve dynamic
items in it. It defaults to `nil`,
- `:sub-item-state`: if specified the sub item only matches if it
occurs in `item` with that state initially."
[item sub-item & options]
(contains-by? like? item sub-item (apply hash-map options)))
(defn contains?
"Returns if `item`, or any item 'below' it, is equal to `sub-item`
in the given state of `item`. Options can be:
- `:state`: specifies the state of `item` used to resolve dynamic
items in it. It defaults to `nil`,
- `:sub-item-state`: if specified the sub item only matches if it
occurs in `item` with that state initially."
[item sub-item & options]
(contains-by? = item sub-item (apply hash-map options)))
TODO : could / should some of these fns be shared with the ' real ' implementations ?
(defn- lift-returned [r]
TODO : move all impls of it to one place ? !
(if (base/returned? r) r (c/return :state r)))
(defn- merge-returned [r1 r2]
(base/merge-returned r1 (lift-returned r2)))
(defn- de-focus [res item state]
merge changed state in subitem back into parent state :
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(lens/shove res base/returned-state
(lens/shove state (base/focus-lens item) st))
res)))
(defn- de-local-state [res item]
;; remove local part of changed state:
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(lens/shove res base/returned-state (first st))
res)))
(defn- do-handle-actions [res item state]
(let [f (base/handle-action-f item)
{handle true pass false} (group-by (base/handle-action-pred item)
(base/returned-actions res))]
(reduce (fn [res a]
(let [state (if (not= base/keep-state (base/returned-state res))
(base/returned-state res)
state)]
(merge-returned res
(f state a))))
(lens/shove res base/returned-actions (vec pass))
handle)))
(defn- do-handle-state-change [res item state]
(let [f (base/handle-state-change-f item)]
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(merge-returned res (f state st))
res))))
(defn- run-lifecycle [item state f]
;; resolve, find all livecycle (with their state); call that.
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(cond
(base/lifecycle? item)
(lift-returned (f item state))
:else
(c/return)))
(fn container [c-res item state]
(reduce merge-returned (c/return) c-res))
(fn wrapper [res item state]
(cond
(base/focus? item)
(de-focus res item state)
(base/local-state? item)
(de-local-state res item)
(base/handle-action? item)
(do-handle-actions res item state)
(base/handle-state-change? item)
(do-handle-state-change res item state)
:else
res))
(fn dynamic [res item state]
res)
(fn other [item state]
(throw (unknown-item-error item)))
item
state))
(defn init
"Returns what happens when the given item is initialized in the
given state, which happens when it is first used in an item tree, or
if it is updated to a new state. Returns a [[core/return]] value."
[item state]
(run-lifecycle item state
(fn [it state]
(if-let [h (base/lifecycle-init it)]
(h state)
(c/return)))))
(defn finalize
"Returns what happens when the given item is finalized in the given
state, which happens when it is now longer used in an item
tree. Returns a [[core/return]] value."
[item state]
(run-lifecycle item state
(fn [it state]
(if-let [h (base/lifecycle-finish it)]
(h state)
(c/return)))))
(defn- r-comp [& fs]
(apply comp (reverse fs)))
(defn- find-handle-message [item state]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
nil)
(fn container [c-res item state]
;; containers don't pass messages; so we don't care.
nil)
(fn wrapper [res item state]
(cond
(base/handle-message? item)
{:f (base/handle-message-f item)
:state state
:post identity}
(base/focus? item)
(when res
(update res :post r-comp #(de-focus % item state)))
(base/local-state? item)
(when res
(update res :post r-comp #(de-local-state % item)))
(base/handle-action? item)
(when res
(update res :post r-comp #(do-handle-actions % item state)))
(base/handle-state-change? item)
(when res
(update res :post r-comp #(do-handle-state-change % item state)))
:else
res))
(fn dynamic [res item state]
res)
(fn other [item state]
(throw (unknown-item-error item)))
item
state))
(defn handle-message
"Returns what happens when the given message would be sent to that
item in the given state. Returns a [[core/return]] value or nil, if
the message would not be handled at all."
[item state msg]
;; Note: will not recur, e.g. when the message handler returns a new
;; :message... (forward-messages for example); to make that work, we
;; need to remove explicit ref object from the api, and only allow
;; refer-items as the message targets.
;; find applicable handle-message:
(if-let [{f :f state :state post :post} (find-handle-message item state)]
(post (f state msg))
;; message won't be processed:
nil))
| null | https://raw.githubusercontent.com/active-group/reacl-c/46e7cf3512de0c3db4edae0b770f2f26dd64f9b5/src/reacl_c/test_util/core_testing.cljc | clojure | Only pure tests: No local-/state changes, no implicit mounting (init), no effects, no subscriptions.
TODO:?
TODO: + react items?
-> or an IRenderable extension point?
l1 contains l2 ?
if a1 contains a2; resp. a2 < a1
or sub matching for style and class attributes
item has all events defined that sub-item has:
TODO: maybe flatten out fragments in children? maybe concatenate strings?
Note: (string/includes? "foo" "") is true, so we say 'nothing is like anything' too.
always a non-empty fragment here
we could say the :e's must only be 'like?', but rest equal; but
then again, that does not work for dynamics anyway.
TODO:?
one or more children of item 'contain' the sub-item
remove local part of changed state:
resolve, find all livecycle (with their state); call that.
containers don't pass messages; so we don't care.
Note: will not recur, e.g. when the message handler returns a new
:message... (forward-messages for example); to make that work, we
need to remove explicit ref object from the api, and only allow
refer-items as the message targets.
find applicable handle-message:
message won't be processed: | (ns reacl-c.test-util.core-testing
(:require [reacl-c.base :as base]
[reacl-c.dom-base :as dom]
[reacl-c.core :as c]
[active.clojure.lens :as lens]
[clojure.string :as string]
[clojure.set :as set])
(:refer-clojure :exclude [contains?]))
(def ^:private clj-contains? clojure.core/contains?)
(defn- reduce-item [mk-ref mk-async f-leaf f-container f-wrapper f-dynamic f-other item state]
(let [rec (fn [state item]
(reduce-item mk-ref mk-async f-leaf f-container f-wrapper f-dynamic f-other
item state))]
(cond
(string? item)
(f-leaf item state)
(base/lifecycle? item)
(f-leaf item state)
(base/dynamic? item)
(f-dynamic (rec state (apply (base/dynamic-f item) state (base/dynamic-args item))) item state)
(or (nil? item) (base/fragment? item))
(f-container (map #(rec state %)
(if (nil? item) nil (base/fragment-children item)))
item state)
(dom/element? item)
(f-container (map #(rec state %)
(dom/element-children item))
item state)
(base/static? item)
(f-dynamic (rec state (apply (base/static-f item) (base/static-args item))) item state)
(base/with-refs? item)
(f-dynamic (rec state (apply (base/with-refs-f item) (repeatedly (base/with-refs-n item) mk-ref) (base/with-refs-args item))) item state)
(base/with-async? item)
(f-dynamic (rec state (apply (base/with-async-f item) (mk-async) (base/with-async-args item))) item state)
(base/focus? item)
(f-wrapper (rec (lens/yank state (base/focus-lens item))
(base/focus-e item))
item state)
(base/local-state? item)
(f-wrapper (rec [state (base/eval-local-state-init (base/local-state-initial item))]
(base/local-state-e item))
item state)
(base/handle-action? item)
(f-wrapper (rec state (base/handle-action-e item))
item state)
(base/refer? item)
(f-wrapper (rec state (base/refer-e item))
item state)
(base/handle-state-change? item)
(f-wrapper (rec state (base/handle-state-change-e item))
item state)
(base/handle-message? item)
(f-wrapper (rec state (base/handle-message-e item))
item state)
(base/named? item)
(f-wrapper (rec state (base/named-e item))
item state)
(base/handle-error? item)
(f-wrapper (rec state (base/handle-error-e item))
item state)
(base/keyed? item)
(f-wrapper (rec state (base/keyed-e item))
item state)
#_(interop/lift-react? item)
#_item
:else
(f-other item state))))
(defn- unknown-item-error [item]
(ex-info (str "Unknown item: " (pr-str item)) {:item item}))
(defn- make-dummy-ref []
(reify base/Ref
(-deref-ref [this] (throw (ex-info "Cannot derefence in a test environment." {})))))
(defn- dummy-async [r]
(throw (ex-info "Cannot do an async injection in a test environment." {})))
(defn render
"Returns how an item looks like in the given state. Returns a list
[item & [state]]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(cond
(string? item)
(list item)
(base/lifecycle? item) nil
:else (throw (unknown-item-error item))))
(fn container [c-res item state]
(cond
(nil? item) nil
(base/fragment? item) (apply concat c-res)
(dom/element? item) (list (lens/shove item dom/element-children (apply concat c-res)))
:else (throw (unknown-item-error item))))
(fn wrapper [res item state]
(cond
(or (base/focus? item)
(base/local-state? item)
(base/handle-action? item)
(base/refer? item)
(base/handle-state-change? item)
(base/handle-message? item)
(base/named? item)
(base/handle-error? item)
(base/keyed? item))
res
:else (throw (unknown-item-error item))))
(fn dynamic [res item state]
(cond
(or (base/dynamic? item)
(base/with-refs? item)
(base/with-async? item)
(base/static? item))
res
:else
(throw (unknown-item-error item))))
(fn other [item state]
(throw (unknown-item-error item)))
item
state))
(defn- split-css-classes [s]
(map string/trim (string/split (or s "") #" ")))
(or (empty? l2)
(and (not (empty? l1))
(or (= (first l1) (first l2))
(contains-in-order? (rest l1) l2)))))
(reduce-kv (fn [res k v]
(and res
(or (= (get a1 k) v)
(case k
:style
(let [st1 (:style a1)]
(reduce-kv (fn [res k v]
(and res (= (get st1 k) v)))
true
v))
:class
(contains-in-order? (split-css-classes (:class a1))
(split-css-classes v))
false))))
true
a2))
(defn- item-empty? [item]
(or (nil? item) (and (base/fragment? item) (empty? (base/fragment-children item)))))
(declare like?)
(defn- list-like? [lst sub-lst]
(let [[missing remaining]
(reduce (fn [[n remaining] c]
(loop [remaining remaining]
(if (empty? remaining)
[n nil]
(if (or (= (first remaining) c)
(like? (first remaining) c))
[(dec n) remaining]
(recur (rest remaining))))))
[(count sub-lst) lst]
sub-lst)]
(zero? missing)))
(defn- dom-like? [item sub-item]
(assert (dom/element? item) item)
(assert (dom/element? sub-item) sub-item)
(and (= (dom/element-type item) (dom/element-type sub-item))
(dom-attrs-contains? (dom/element-attrs item) (dom/element-attrs sub-item))
(set/subset? (set (keys (dom/element-events sub-item)))
(set (keys (dom/element-events item))))
(list-like? (dom/element-children item)
(dom/element-children sub-item))))
(defn like?
"Returns if `sub-item` is like `item`, meaning:
- if both are a strings, then `sub-item` is a substring in `item`,
- if both are dom elements, then `sub-item` is the same type of dom element, but may contain less attributes or children,
- if both have children, then every child of `sub-item` is `like?` a child in `item`, and in the same order.
"
[item sub-item]
(cond
(string? item)
(and (string? sub-item)
(string/includes? item sub-item))
(item-empty? item)
(or (nil? sub-item)
(and (base/fragment? sub-item)
(list-like? nil
(base/fragment-children sub-item))))
(and (base/fragment? sub-item)
(list-like? (base/fragment-children item)
(base/fragment-children sub-item)))
(dom/element? item)
(and (dom/element? sub-item)
(dom-like? item sub-item))
(or (base/lifecycle? item)
(base/dynamic? item)
(base/static? item)
(base/with-refs? item)
(base/with-async? item))
(= item sub-item)
(or (base/focus? item)
(base/local-state? item)
(base/handle-action? item)
(base/refer? item)
(base/handle-state-change? item)
(base/handle-message? item)
(base/named? item)
(base/handle-error? item)
(base/keyed? item))
(= item sub-item)
#_(interop/lift-react? item)
:else
(throw (unknown-item-error item))))
(defn- contains-by? [f item sub-item & [options]]
(assert (every? #{:state :sub-item-state} (keys options)) (keys options))
(let [state (get options :state nil)
g (if (clj-contains? options :sub-item-state)
(let [sub-item-state (:sub-item-state options)]
(fn [item sub-item state]
(and (= state sub-item-state)
(f item sub-item))))
(fn [item sub-item state]
(f item sub-item)))]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(g item sub-item state))
(fn container [c-res item state]
(or (g item sub-item state)
(reduce #(or %1 %2) false c-res)))
(fn wrapper [res item state]
(or (g item sub-item state)
res))
(fn dynamic [res item state]
(or (g item sub-item state)
res))
(fn other [item state]
(throw (unknown-item-error item)))
item state)))
(defn contains-like?
"Returns if `item`, or any item 'below' it, is [[like?]] `sub-item`
in the given state of `item`. Options can be:
- `:state`: specifies the state of `item` used to resolve dynamic
items in it. It defaults to `nil`,
- `:sub-item-state`: if specified the sub item only matches if it
occurs in `item` with that state initially."
[item sub-item & options]
(contains-by? like? item sub-item (apply hash-map options)))
(defn contains?
"Returns if `item`, or any item 'below' it, is equal to `sub-item`
in the given state of `item`. Options can be:
- `:state`: specifies the state of `item` used to resolve dynamic
items in it. It defaults to `nil`,
- `:sub-item-state`: if specified the sub item only matches if it
occurs in `item` with that state initially."
[item sub-item & options]
(contains-by? = item sub-item (apply hash-map options)))
TODO : could / should some of these fns be shared with the ' real ' implementations ?
(defn- lift-returned [r]
TODO : move all impls of it to one place ? !
(if (base/returned? r) r (c/return :state r)))
(defn- merge-returned [r1 r2]
(base/merge-returned r1 (lift-returned r2)))
(defn- de-focus [res item state]
merge changed state in subitem back into parent state :
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(lens/shove res base/returned-state
(lens/shove state (base/focus-lens item) st))
res)))
(defn- de-local-state [res item]
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(lens/shove res base/returned-state (first st))
res)))
(defn- do-handle-actions [res item state]
(let [f (base/handle-action-f item)
{handle true pass false} (group-by (base/handle-action-pred item)
(base/returned-actions res))]
(reduce (fn [res a]
(let [state (if (not= base/keep-state (base/returned-state res))
(base/returned-state res)
state)]
(merge-returned res
(f state a))))
(lens/shove res base/returned-actions (vec pass))
handle)))
(defn- do-handle-state-change [res item state]
(let [f (base/handle-state-change-f item)]
(let [st (base/returned-state res)]
(if (not= base/keep-state st)
(merge-returned res (f state st))
res))))
(defn- run-lifecycle [item state f]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
(cond
(base/lifecycle? item)
(lift-returned (f item state))
:else
(c/return)))
(fn container [c-res item state]
(reduce merge-returned (c/return) c-res))
(fn wrapper [res item state]
(cond
(base/focus? item)
(de-focus res item state)
(base/local-state? item)
(de-local-state res item)
(base/handle-action? item)
(do-handle-actions res item state)
(base/handle-state-change? item)
(do-handle-state-change res item state)
:else
res))
(fn dynamic [res item state]
res)
(fn other [item state]
(throw (unknown-item-error item)))
item
state))
(defn init
"Returns what happens when the given item is initialized in the
given state, which happens when it is first used in an item tree, or
if it is updated to a new state. Returns a [[core/return]] value."
[item state]
(run-lifecycle item state
(fn [it state]
(if-let [h (base/lifecycle-init it)]
(h state)
(c/return)))))
(defn finalize
"Returns what happens when the given item is finalized in the given
state, which happens when it is now longer used in an item
tree. Returns a [[core/return]] value."
[item state]
(run-lifecycle item state
(fn [it state]
(if-let [h (base/lifecycle-finish it)]
(h state)
(c/return)))))
(defn- r-comp [& fs]
(apply comp (reverse fs)))
(defn- find-handle-message [item state]
(reduce-item make-dummy-ref
(constantly dummy-async)
(fn leaf [item state]
nil)
(fn container [c-res item state]
nil)
(fn wrapper [res item state]
(cond
(base/handle-message? item)
{:f (base/handle-message-f item)
:state state
:post identity}
(base/focus? item)
(when res
(update res :post r-comp #(de-focus % item state)))
(base/local-state? item)
(when res
(update res :post r-comp #(de-local-state % item)))
(base/handle-action? item)
(when res
(update res :post r-comp #(do-handle-actions % item state)))
(base/handle-state-change? item)
(when res
(update res :post r-comp #(do-handle-state-change % item state)))
:else
res))
(fn dynamic [res item state]
res)
(fn other [item state]
(throw (unknown-item-error item)))
item
state))
(defn handle-message
"Returns what happens when the given message would be sent to that
item in the given state. Returns a [[core/return]] value or nil, if
the message would not be handled at all."
[item state msg]
(if-let [{f :f state :state post :post} (find-handle-message item state)]
(post (f state msg))
nil))
|
66cf856ce0ca370e7d8922a069f319db7d84eb4bc2cb3edebca1f90f44e354bf | jacekschae/learn-reitit-course-files | test_system.clj | (ns cheffy.test-system
(:require [clojure.test :refer :all]
[integrant.repl.state :as state]
[ring.mock.request :as mock]
[muuntaja.core :as m]))
(defn test-endpoint
([method uri]
(test-endpoint method uri nil))
([method uri opts]
(let [app (-> state/system :cheffy/app)
request (app (-> (mock/request method uri)
(cond-> (:body opts) (mock/json-body (:body opts)))))]
(update request :body (partial m/decode "application/json")))))
(comment
(let [request (test-endpoint :get "/v1/recipes")
decoded-request (m/decode-response-body request)]
(assoc request :body decoded-request))
(test-endpoint :post "/v1/recipes" {:img "string"
:name "my name"
:prep-time 30})) | null | https://raw.githubusercontent.com/jacekschae/learn-reitit-course-files/c13a8eb622a371ad719d3d9023f1b4eff9392e4c/increments/28-auth0-test-config/test/cheffy/test_system.clj | clojure | (ns cheffy.test-system
(:require [clojure.test :refer :all]
[integrant.repl.state :as state]
[ring.mock.request :as mock]
[muuntaja.core :as m]))
(defn test-endpoint
([method uri]
(test-endpoint method uri nil))
([method uri opts]
(let [app (-> state/system :cheffy/app)
request (app (-> (mock/request method uri)
(cond-> (:body opts) (mock/json-body (:body opts)))))]
(update request :body (partial m/decode "application/json")))))
(comment
(let [request (test-endpoint :get "/v1/recipes")
decoded-request (m/decode-response-body request)]
(assoc request :body decoded-request))
(test-endpoint :post "/v1/recipes" {:img "string"
:name "my name"
:prep-time 30})) | |
bbb6d417568b328902848c9a0fe3e2abc3693e3d55030bc07b5998aa28fe0bd0 | Haskell-Things/HSlice | Orphans.hs | {- ORMOLU_DISABLE -}
- Copyright 2016 and
- Copyright 2019
-
- This program is free software : you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation , either version 3 of the License , or
- ( at your option ) any later version .
-
- This program is distributed in the hope that it will be useful ,
- but WITHOUT ANY WARRANTY ; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
- GNU Affero General Public License for more details .
- You should have received a copy of the GNU Affero General Public License
- along with this program . If not , see < / > .
- Copyright 2016 Noah Halford and Catherine Moresco
- Copyright 2019 Julia Longtin
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see </>.
-}
-- This file is a container for orphan instances. these should go in their appropriate upstreams.
-- So that none of the orphans here generate warnings.
# OPTIONS_GHC -Wno - orphans #
-- So we can use ℝ in instance declarations.
# LANGUAGE FlexibleInstances #
So we can use '
# LANGUAGE DataKinds #
-- So we can add generic to ℝ
# LANGUAGE StandaloneDeriving #
-- So we can add generic to ℝ
# LANGUAGE DeriveGeneric #
So we can add MemoTrie to ℝ
# LANGUAGE TypeFamilies #
So we can add MemoTrie to ℝ
# LANGUAGE TypeOperators #
module Graphics.Slicer.Orphans () where
import Prelude (Double, Integer, Int, Monoid(mempty), Ord, Semigroup((<>)), (+), (.), ($), decodeFloat, error, encodeFloat, seq, uncurry)
import Control.DeepSeq (NFData (rnf))
import Data.MemoTrie (HasTrie(enumerate, trie, untrie), Reg, (:->:), enumerateGeneric, trieGeneric, untrieGeneric)
import Data.Set (Set, elems, fromList)
import GHC.Generics (Generic)
import Graphics.Slicer.Definitions (Fastℕ(Fastℕ), ℝ)
import Numeric.Rounded.Hardware (Rounded, RoundingMode(TowardInf))
import Slist.Size (Size (Infinity, Size))
import Slist.Type (Slist (Slist))
instance NFData a => NFData (Slist a) where
rnf (Slist vals n) = rnf vals `seq` rnf n
instance NFData Size where
rnf Infinity = ()
rnf (Size n) = seq n ()
| FIXME : move this to the proper place in ImplicitCAD .
instance NFData Fastℕ where
rnf a = seq a ()
instance Semigroup ℝ where
(<>) a b = a + b
instance Monoid ℝ where
mempty = 0
deriving instance Generic ℝ
deriving instance Generic Fastℕ
deriving instance Generic Size
deriving instance Generic (Slist a)
-- | Take apart a double, returning things hasTrie can already handle.
mangle :: Double -> (Integer, Int)
mangle = decodeFloat
| Accept our mangled double 's contents back from HasTrie , encoding it back into a Double .
unMangle :: (Integer, Int) -> Double
unMangle = uncurry encodeFloat
instance HasTrie Double where
data Double :->: a = DoubleTrie ((Integer, Int) :->: a)
trie f = DoubleTrie $ trie $ f . unMangle
untrie (DoubleTrie t) = untrie t . mangle
enumerate = error "cannot enumerate doubles."
instance (HasTrie a, Ord a) => HasTrie (Set a) where
data (Set a) :->: b = SetTrie ([a] :->: b)
trie s = SetTrie $ trie $ s . fromList
untrie (SetTrie t) = untrie t . elems
enumerate = error "cannot enumerate sets."
instance HasTrie Size where
newtype (Size :->: b) = SizeTrie { unSizeTrie :: Reg Size :->: b }
trie = trieGeneric SizeTrie
untrie = untrieGeneric unSizeTrie
enumerate = enumerateGeneric unSizeTrie
instance HasTrie Fastℕ where
newtype (Fastℕ :->: b) = FastℕTrie { unFastℕTrie :: Reg Fastℕ :->: b }
trie = trieGeneric FastℕTrie
untrie = untrieGeneric unFastℕTrie
enumerate = enumerateGeneric unFastℕTrie
instance (HasTrie a) => HasTrie (Slist a) where
newtype ((Slist a) :->: b) = SlistTrie { unSlistTrie :: Reg (Slist a) :->: b }
trie = trieGeneric SlistTrie
untrie = untrieGeneric unSlistTrie
enumerate = enumerateGeneric unSlistTrie
instance HasTrie (Rounded 'TowardInf ℝ) where
newtype ((Rounded 'TowardInf ℝ) :->: b) = RTℝTrie { unRTℝTrie :: Reg (Rounded 'TowardInf ℝ) :->: b }
trie = trieGeneric RTℝTrie
untrie = untrieGeneric unRTℝTrie
enumerate = enumerateGeneric unRTℝTrie
| null | https://raw.githubusercontent.com/Haskell-Things/HSlice/73c6bf1f34c946cd5903306b106356fcb73d916c/Graphics/Slicer/Orphans.hs | haskell | ORMOLU_DISABLE
This file is a container for orphan instances. these should go in their appropriate upstreams.
So that none of the orphans here generate warnings.
So we can use ℝ in instance declarations.
So we can add generic to ℝ
So we can add generic to ℝ
| Take apart a double, returning things hasTrie can already handle. |
- Copyright 2016 and
- Copyright 2019
-
- This program is free software : you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation , either version 3 of the License , or
- ( at your option ) any later version .
-
- This program is distributed in the hope that it will be useful ,
- but WITHOUT ANY WARRANTY ; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
- GNU Affero General Public License for more details .
- You should have received a copy of the GNU Affero General Public License
- along with this program . If not , see < / > .
- Copyright 2016 Noah Halford and Catherine Moresco
- Copyright 2019 Julia Longtin
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see </>.
-}
# OPTIONS_GHC -Wno - orphans #
# LANGUAGE FlexibleInstances #
So we can use '
# LANGUAGE DataKinds #
# LANGUAGE StandaloneDeriving #
# LANGUAGE DeriveGeneric #
So we can add MemoTrie to ℝ
# LANGUAGE TypeFamilies #
So we can add MemoTrie to ℝ
# LANGUAGE TypeOperators #
module Graphics.Slicer.Orphans () where
import Prelude (Double, Integer, Int, Monoid(mempty), Ord, Semigroup((<>)), (+), (.), ($), decodeFloat, error, encodeFloat, seq, uncurry)
import Control.DeepSeq (NFData (rnf))
import Data.MemoTrie (HasTrie(enumerate, trie, untrie), Reg, (:->:), enumerateGeneric, trieGeneric, untrieGeneric)
import Data.Set (Set, elems, fromList)
import GHC.Generics (Generic)
import Graphics.Slicer.Definitions (Fastℕ(Fastℕ), ℝ)
import Numeric.Rounded.Hardware (Rounded, RoundingMode(TowardInf))
import Slist.Size (Size (Infinity, Size))
import Slist.Type (Slist (Slist))
instance NFData a => NFData (Slist a) where
rnf (Slist vals n) = rnf vals `seq` rnf n
instance NFData Size where
rnf Infinity = ()
rnf (Size n) = seq n ()
| FIXME : move this to the proper place in ImplicitCAD .
instance NFData Fastℕ where
rnf a = seq a ()
instance Semigroup ℝ where
(<>) a b = a + b
instance Monoid ℝ where
mempty = 0
deriving instance Generic ℝ
deriving instance Generic Fastℕ
deriving instance Generic Size
deriving instance Generic (Slist a)
mangle :: Double -> (Integer, Int)
mangle = decodeFloat
| Accept our mangled double 's contents back from HasTrie , encoding it back into a Double .
unMangle :: (Integer, Int) -> Double
unMangle = uncurry encodeFloat
instance HasTrie Double where
data Double :->: a = DoubleTrie ((Integer, Int) :->: a)
trie f = DoubleTrie $ trie $ f . unMangle
untrie (DoubleTrie t) = untrie t . mangle
enumerate = error "cannot enumerate doubles."
instance (HasTrie a, Ord a) => HasTrie (Set a) where
data (Set a) :->: b = SetTrie ([a] :->: b)
trie s = SetTrie $ trie $ s . fromList
untrie (SetTrie t) = untrie t . elems
enumerate = error "cannot enumerate sets."
instance HasTrie Size where
newtype (Size :->: b) = SizeTrie { unSizeTrie :: Reg Size :->: b }
trie = trieGeneric SizeTrie
untrie = untrieGeneric unSizeTrie
enumerate = enumerateGeneric unSizeTrie
instance HasTrie Fastℕ where
newtype (Fastℕ :->: b) = FastℕTrie { unFastℕTrie :: Reg Fastℕ :->: b }
trie = trieGeneric FastℕTrie
untrie = untrieGeneric unFastℕTrie
enumerate = enumerateGeneric unFastℕTrie
instance (HasTrie a) => HasTrie (Slist a) where
newtype ((Slist a) :->: b) = SlistTrie { unSlistTrie :: Reg (Slist a) :->: b }
trie = trieGeneric SlistTrie
untrie = untrieGeneric unSlistTrie
enumerate = enumerateGeneric unSlistTrie
instance HasTrie (Rounded 'TowardInf ℝ) where
newtype ((Rounded 'TowardInf ℝ) :->: b) = RTℝTrie { unRTℝTrie :: Reg (Rounded 'TowardInf ℝ) :->: b }
trie = trieGeneric RTℝTrie
untrie = untrieGeneric unRTℝTrie
enumerate = enumerateGeneric unRTℝTrie
|
705ad4ca1ae4e1ef81ff13a7e79840af01411e8cb57e080d010b88b7a284e89a | 5HT/ant | Galley.ml |
open XNum;
open Runtime;
open Dim;
open Box;
type line_params =
{
baseline_skip : dim; (* amount of glue between baselines *)
line_skip_limit : num; (* minimal amout of glue between lines *)
line_skip : dim; (* the amout if the lines are closer together *)
leading : box -> box -> line_params -> dim;
penalty between two lines
};
type space_params =
{
space_factor : num;
space_skip : option dim;
xspace_skip : option dim;
victorian_spacing : bool
};
type graphics_params =
{
gp_colour : Graphic.colour;
gp_bg_colour : Graphic.colour;
gp_alpha : num
};
type galley =
{
lines : list box; (* list of all lines and glue in reverse order *)
glue : list box; (* glue at the end of the galley *)
measure : num; (* the width of the galley *)
graphics_params : graphics_params;
current_par_params : ParLayout.par_params;
current_line_params : line_params;
current_line_break_params : ParLayout.line_break_params;
current_hyphen_params : JustHyph.hyphen_params;
current_space_params : space_params;
current_math_params : MathLayout.math_params;
par_params : ParLayout.par_params;
line_params : line_params;
line_break_params : ParLayout.line_break_params;
hyphen_params : JustHyph.hyphen_params;
space_params : space_params;
math_params : MathLayout.math_params
};
|skyline_dist < line 1 > < line 2>| determines the distance between the baselines of < line 1 >
and < line 2 > if they were set without glue between them .
|skyline_dist <line 1> <line 2>| determines the distance between the baselines of <line 1>
and <line 2> if they were set without glue between them.
*)
value skyline_dist line1 line2 = do
{
(* <off> is the vertical offset of <b2> w.r.t. <b1> *)
let rec dist off b1 b2 = match (b1.b_contents, b2.b_contents) with
[ (CompBox c1, CompBox _) -> dist_comp_comp off c1 b2
| (CompBox c1, _) -> dist_comp_simple off c1 b2
| (_, CompBox c2) -> dist_simple_comp off b1 c2
| (_, _) -> dist_simple_simple off b1 b2
]
and dist_simple_simple off b1 b2 = do
{
if off >=/ b2.b_width.d_base then
dim_max b1.b_depth b2.b_height (* boxes do not intersect *)
else
dim_add b1.b_depth b2.b_height
}
and dist_simple_comp off b1 c2 = match c2 with
[ [] -> b1.b_depth
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_add (dist (off +/ x.d_base) b1 b) y;
dim_max d (dist_simple_comp off b1 cs)
}
| [_ :: cs] -> dist_simple_comp off b1 cs
]
and dist_comp_simple off c1 b2 = match c1 with
[ [] -> b2.b_height
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_sub (dist (off -/ x.d_base) b b2) y;
dim_max d (dist_comp_simple off cs b2)
}
| [_ :: cs] -> dist_comp_simple off cs b2
]
and dist_comp_comp off c1 b2 = match c1 with
[ [] -> dim_zero
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_sub (dist (off -/ x.d_base) b b2) y;
dim_max d (dist_comp_comp off cs b2)
}
| [_ :: cs] -> dist_comp_comp off cs b2
];
dist num_zero line1 line2
};
|leading_<version > < line 1 > < line 2 > < line - params>| determines the amount of glue that has to be inserted
between two lines .
|fixed| the distance between baselines is < baseline - skip > ,
|register| the distance is a mutliple of < baseline - skip > ,
|TeX| uses the TeX - algorithm ,
|skyline| uses the TeX - algorithm with the skyline distance .
|leading_<version> <line 1> <line 2> <line-params>| determines the amount of glue that has to be inserted
between two lines.
|fixed| the distance between baselines is <baseline-skip>,
|register| the distance is a mutliple of <baseline-skip>,
|TeX| uses the TeX-algorithm,
|skyline| uses the TeX-algorithm with the skyline distance.
*)
value leading_fixed line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
dim_sub line_params.baseline_skip dist;
};
value leading_register line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
let fac = ceiling_num ((dist.d_base +/ line_params.line_skip_limit)
// line_params.baseline_skip.d_base);
dim_sub (fixed_dim (fac */ line_params.baseline_skip.d_base)) dist
};
value leading_TeX line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
if line_params.baseline_skip.d_base >=/
dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip dist
else
line_params.line_skip
};
value leading_skyline line1 line2 line_params = do
{
let simple_dist = dim_add line1.b_depth line2.b_height;
if line_params.baseline_skip.d_base >=/
simple_dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip simple_dist
else do
{
let dist = skyline_dist line1 line2;
if line_params.baseline_skip.d_base >=/ dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip simple_dist
else
dim_sub line_params.line_skip (dim_sub simple_dist dist)
}
};
(* galleys *)
value new_galley measure line_params par_params line_break_params hyphen_params space_params math_params =
{
lines = [];
glue = [];
measure = measure;
par_params = { (par_params) with ParLayout.measure = measure };
line_params = line_params;
line_break_params = line_break_params;
hyphen_params = hyphen_params;
space_params = space_params;
math_params = math_params;
current_par_params = { (par_params) with ParLayout.measure = measure };
current_line_params = line_params;
current_line_break_params = line_break_params;
current_hyphen_params = hyphen_params;
current_space_params = space_params;
current_math_params = math_params;
graphics_params =
{
gp_colour = Graphic.Grey num_zero;
gp_bg_colour = Graphic.Grey num_one;
gp_alpha = num_zero
}
};
value lines galley = List.rev (galley.glue @ galley.lines);
value get_line galley i = List.nth (galley.glue @ galley.lines) i;
value keep_lines galley lines =
{
(galley)
with
lines = List.rev lines
};
value last_line galley = match galley.lines with
[ [] -> empty_box
| [b::_] -> b
];
value modify_glue galley f =
{
(galley)
with
glue = f galley.glue
};
value measure galley = galley.measure;
value graphics_params galley = galley.graphics_params;
value par_params galley = galley.par_params;
value line_params galley = galley.line_params;
value line_break_params galley = galley.line_break_params;
value hyphen_params galley = galley.hyphen_params;
value space_params galley = galley.space_params;
value math_params galley = galley.math_params;
value current_par_params galley = galley.current_par_params;
value current_line_params galley = galley.current_line_params;
value current_line_break_params galley = galley.current_line_break_params;
value current_hyphen_params galley = galley.current_hyphen_params;
value current_space_params galley = galley.current_space_params;
value current_math_params galley = galley.current_math_params;
value set_line_params galley p = { (galley) with line_params = p };
value set_line_break_params galley p = { (galley) with line_break_params = p };
value set_hyphen_params galley p = { (galley) with hyphen_params = p };
value set_space_params galley p = { (galley) with space_params = p };
value set_math_params galley p = { (galley) with math_params = p };
value set_graphics_params galley p = { (galley) with graphics_params = p };
value set_current_line_params galley p = { (galley) with current_line_params = p };
value set_current_line_break_params galley p = { (galley) with current_line_break_params = p };
value set_current_hyphen_params galley p = { (galley) with current_hyphen_params = p };
value set_current_space_params galley p = { (galley) with current_space_params = p };
value set_current_math_params galley p = { (galley) with current_math_params = p };
value set_par_params galley p =
{
(galley)
with
par_params = { (p) with ParLayout.measure = galley.measure }
};
value set_current_par_params galley p =
{
(galley)
with
current_par_params = { (p) with ParLayout.measure = galley.measure }
};
value copy_params galley from_galley =
{
(galley)
with
graphics_params = from_galley.graphics_params;
par_params = { (from_galley.par_params)
with
ParLayout.measure = galley.measure };
line_params = from_galley.line_params;
line_break_params = from_galley.line_break_params;
hyphen_params = from_galley.hyphen_params;
space_params = from_galley.space_params;
math_params = from_galley.math_params;
current_par_params = { (from_galley.current_par_params)
with
ParLayout.measure = galley.measure };
current_line_params = from_galley.current_line_params;
current_line_break_params = from_galley.current_line_break_params;
current_hyphen_params = from_galley.current_hyphen_params;
current_space_params = from_galley.current_space_params;
current_math_params = from_galley.current_math_params
};
value reset_params galley =
{
(galley)
with
current_par_params = { (galley.par_params)
with
ParLayout.measure = galley.measure };
current_line_params = galley.line_params;
current_line_break_params = galley.line_break_params;
current_hyphen_params = galley.hyphen_params;
current_space_params = galley.space_params;
current_math_params = galley.math_params
};
(* add a line to the galley *)
value add_line galley line = do
{
(* We need to keep track of graphic state changes. *)
let update_gfx_cmds ((fg, bg, alpha) as gfx) c = match c with
[ Graphic.SetColour _ -> (Some c, bg, alpha)
| Graphic.SetBgColour _ -> (fg, Some c, alpha)
| Graphic.SetAlpha _ -> (fg, bg, Some c)
| _ -> gfx
];
(* |make_gfx_cmd_boxes <state>| returns a list of command boxes that sets the right graphics state. *)
let make_gfx_cmd_boxes (fg, bg, alpha) = do
{
(match fg with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
@ (match bg with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
@ (match alpha with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
};
let leading = match galley.lines with
[ [] -> dim_zero
| [b::_] -> galley.current_line_params.leading b line galley.current_line_params
];
let gfx = match line.b_contents with
[ CompBox bs -> List.fold_left update_gfx_cmds (None, None, None) bs
| _ -> (None, None, None)
];
{
(galley)
with
lines = [line;
new_glue_box dim_zero leading True True
:: galley.glue @ galley.lines];
glue = make_gfx_cmd_boxes gfx
}
};
(* add glue and control boxes to the galley *)
value add_glue galley box =
{
(galley)
with
glue = [box :: galley.glue]
};
(* add a paragraph to the galley *)
value add_paragraph galley loc items = do
{
(* search for v-insert boxes *)
let rec extract_inserts boxes result above below = match boxes with
[ [] -> (ListBuilder.get result, List.rev above, List.rev below)
| [b::bs] -> match b.b_contents with
[ CommandBox (`ParCmd (VInsert below_flag contents)) ->
if below_flag then
extract_inserts bs result above (List.rev contents @ below)
else
extract_inserts bs result (List.rev contents @ above) below
| _ -> do
{
ListBuilder.add result b;
extract_inserts bs result above below
}
]
];
let lines =
ParLayout.break_paragraph
loc
items
galley.current_par_params
galley.current_line_break_params
galley.current_hyphen_params;
Move this function to ParLayout ?
let rec box_lines result line_no boxes = match boxes with
[ [] -> (line_no, ListBuilder.get result)
| [b::bs] -> do
{
let (body, above, below) = extract_inserts b (ListBuilder.make ()) [] [];
ListBuilder.add result
(ParLayout.layout_line galley.measure line_no body galley.current_par_params, above, below);
box_lines
result
(line_no + 1)
bs
}
];
let insert_break galley penalty = do
{
add_glue galley (new_break_box penalty False [] [] [])
};
let insert_par_skip galley = do
{
add_glue
(insert_break galley num_zero)
(new_glue_box dim_zero galley.current_par_params.ParLayout.par_skip True True)
};
let rec insert_insertion galley boxes = match boxes with
[ [] -> galley
| [b::bs] -> insert_insertion (add_glue galley b) bs
];
let add_line_and_insertions galley line above below = do
{
insert_insertion
(add_line
(insert_insertion galley above)
line)
below
};
match box_lines (ListBuilder.make ()) 0 lines with
[ (_, []) -> galley
| (num, [(line, above, below) :: ls]) -> do
{
iter 1 (num-1) ls
(add_line_and_insertions (insert_par_skip galley) line above below)
where rec iter lines_above lines_below lines galley = match lines with
[ [] -> galley
| [(line, above, below) :: ls] ->
iter (lines_above + 1) (lines_below - 1) ls
(add_line_and_insertions
(insert_break
galley
(galley.current_line_params.club_widow_penalty lines_above lines_below))
line above below)
]
}
]
};
(*
|put_in_vbox <galley>| and |put_in_vtop <galley>| return a box containing the entire material of the
<galley>.
*)
value put_in_vbox galley = do
{
VBox.make (List.rev galley.lines)
};
value put_in_vtop galley = do
{
VBox.make_top (List.rev galley.lines)
};
| null | https://raw.githubusercontent.com/5HT/ant/6acf51f4c4ebcc06c52c595776e0293cfa2f1da4/Typesetting/Galley.ml | ocaml | amount of glue between baselines
minimal amout of glue between lines
the amout if the lines are closer together
list of all lines and glue in reverse order
glue at the end of the galley
the width of the galley
<off> is the vertical offset of <b2> w.r.t. <b1>
boxes do not intersect
galleys
add a line to the galley
We need to keep track of graphic state changes.
|make_gfx_cmd_boxes <state>| returns a list of command boxes that sets the right graphics state.
add glue and control boxes to the galley
add a paragraph to the galley
search for v-insert boxes
|put_in_vbox <galley>| and |put_in_vtop <galley>| return a box containing the entire material of the
<galley>.
|
open XNum;
open Runtime;
open Dim;
open Box;
type line_params =
{
leading : box -> box -> line_params -> dim;
penalty between two lines
};
type space_params =
{
space_factor : num;
space_skip : option dim;
xspace_skip : option dim;
victorian_spacing : bool
};
type graphics_params =
{
gp_colour : Graphic.colour;
gp_bg_colour : Graphic.colour;
gp_alpha : num
};
type galley =
{
graphics_params : graphics_params;
current_par_params : ParLayout.par_params;
current_line_params : line_params;
current_line_break_params : ParLayout.line_break_params;
current_hyphen_params : JustHyph.hyphen_params;
current_space_params : space_params;
current_math_params : MathLayout.math_params;
par_params : ParLayout.par_params;
line_params : line_params;
line_break_params : ParLayout.line_break_params;
hyphen_params : JustHyph.hyphen_params;
space_params : space_params;
math_params : MathLayout.math_params
};
|skyline_dist < line 1 > < line 2>| determines the distance between the baselines of < line 1 >
and < line 2 > if they were set without glue between them .
|skyline_dist <line 1> <line 2>| determines the distance between the baselines of <line 1>
and <line 2> if they were set without glue between them.
*)
value skyline_dist line1 line2 = do
{
let rec dist off b1 b2 = match (b1.b_contents, b2.b_contents) with
[ (CompBox c1, CompBox _) -> dist_comp_comp off c1 b2
| (CompBox c1, _) -> dist_comp_simple off c1 b2
| (_, CompBox c2) -> dist_simple_comp off b1 c2
| (_, _) -> dist_simple_simple off b1 b2
]
and dist_simple_simple off b1 b2 = do
{
if off >=/ b2.b_width.d_base then
else
dim_add b1.b_depth b2.b_height
}
and dist_simple_comp off b1 c2 = match c2 with
[ [] -> b1.b_depth
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_add (dist (off +/ x.d_base) b1 b) y;
dim_max d (dist_simple_comp off b1 cs)
}
| [_ :: cs] -> dist_simple_comp off b1 cs
]
and dist_comp_simple off c1 b2 = match c1 with
[ [] -> b2.b_height
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_sub (dist (off -/ x.d_base) b b2) y;
dim_max d (dist_comp_simple off cs b2)
}
| [_ :: cs] -> dist_comp_simple off cs b2
]
and dist_comp_comp off c1 b2 = match c1 with
[ [] -> dim_zero
| [Graphic.PutBox x y b :: cs] -> do
{
let d = dim_sub (dist (off -/ x.d_base) b b2) y;
dim_max d (dist_comp_comp off cs b2)
}
| [_ :: cs] -> dist_comp_comp off cs b2
];
dist num_zero line1 line2
};
|leading_<version > < line 1 > < line 2 > < line - params>| determines the amount of glue that has to be inserted
between two lines .
|fixed| the distance between baselines is < baseline - skip > ,
|register| the distance is a mutliple of < baseline - skip > ,
|TeX| uses the TeX - algorithm ,
|skyline| uses the TeX - algorithm with the skyline distance .
|leading_<version> <line 1> <line 2> <line-params>| determines the amount of glue that has to be inserted
between two lines.
|fixed| the distance between baselines is <baseline-skip>,
|register| the distance is a mutliple of <baseline-skip>,
|TeX| uses the TeX-algorithm,
|skyline| uses the TeX-algorithm with the skyline distance.
*)
value leading_fixed line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
dim_sub line_params.baseline_skip dist;
};
value leading_register line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
let fac = ceiling_num ((dist.d_base +/ line_params.line_skip_limit)
// line_params.baseline_skip.d_base);
dim_sub (fixed_dim (fac */ line_params.baseline_skip.d_base)) dist
};
value leading_TeX line1 line2 line_params = do
{
let dist = dim_add line1.b_depth line2.b_height;
if line_params.baseline_skip.d_base >=/
dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip dist
else
line_params.line_skip
};
value leading_skyline line1 line2 line_params = do
{
let simple_dist = dim_add line1.b_depth line2.b_height;
if line_params.baseline_skip.d_base >=/
simple_dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip simple_dist
else do
{
let dist = skyline_dist line1 line2;
if line_params.baseline_skip.d_base >=/ dist.d_base +/ line_params.line_skip_limit then
dim_sub line_params.baseline_skip simple_dist
else
dim_sub line_params.line_skip (dim_sub simple_dist dist)
}
};
value new_galley measure line_params par_params line_break_params hyphen_params space_params math_params =
{
lines = [];
glue = [];
measure = measure;
par_params = { (par_params) with ParLayout.measure = measure };
line_params = line_params;
line_break_params = line_break_params;
hyphen_params = hyphen_params;
space_params = space_params;
math_params = math_params;
current_par_params = { (par_params) with ParLayout.measure = measure };
current_line_params = line_params;
current_line_break_params = line_break_params;
current_hyphen_params = hyphen_params;
current_space_params = space_params;
current_math_params = math_params;
graphics_params =
{
gp_colour = Graphic.Grey num_zero;
gp_bg_colour = Graphic.Grey num_one;
gp_alpha = num_zero
}
};
value lines galley = List.rev (galley.glue @ galley.lines);
value get_line galley i = List.nth (galley.glue @ galley.lines) i;
value keep_lines galley lines =
{
(galley)
with
lines = List.rev lines
};
value last_line galley = match galley.lines with
[ [] -> empty_box
| [b::_] -> b
];
value modify_glue galley f =
{
(galley)
with
glue = f galley.glue
};
value measure galley = galley.measure;
value graphics_params galley = galley.graphics_params;
value par_params galley = galley.par_params;
value line_params galley = galley.line_params;
value line_break_params galley = galley.line_break_params;
value hyphen_params galley = galley.hyphen_params;
value space_params galley = galley.space_params;
value math_params galley = galley.math_params;
value current_par_params galley = galley.current_par_params;
value current_line_params galley = galley.current_line_params;
value current_line_break_params galley = galley.current_line_break_params;
value current_hyphen_params galley = galley.current_hyphen_params;
value current_space_params galley = galley.current_space_params;
value current_math_params galley = galley.current_math_params;
value set_line_params galley p = { (galley) with line_params = p };
value set_line_break_params galley p = { (galley) with line_break_params = p };
value set_hyphen_params galley p = { (galley) with hyphen_params = p };
value set_space_params galley p = { (galley) with space_params = p };
value set_math_params galley p = { (galley) with math_params = p };
value set_graphics_params galley p = { (galley) with graphics_params = p };
value set_current_line_params galley p = { (galley) with current_line_params = p };
value set_current_line_break_params galley p = { (galley) with current_line_break_params = p };
value set_current_hyphen_params galley p = { (galley) with current_hyphen_params = p };
value set_current_space_params galley p = { (galley) with current_space_params = p };
value set_current_math_params galley p = { (galley) with current_math_params = p };
value set_par_params galley p =
{
(galley)
with
par_params = { (p) with ParLayout.measure = galley.measure }
};
value set_current_par_params galley p =
{
(galley)
with
current_par_params = { (p) with ParLayout.measure = galley.measure }
};
value copy_params galley from_galley =
{
(galley)
with
graphics_params = from_galley.graphics_params;
par_params = { (from_galley.par_params)
with
ParLayout.measure = galley.measure };
line_params = from_galley.line_params;
line_break_params = from_galley.line_break_params;
hyphen_params = from_galley.hyphen_params;
space_params = from_galley.space_params;
math_params = from_galley.math_params;
current_par_params = { (from_galley.current_par_params)
with
ParLayout.measure = galley.measure };
current_line_params = from_galley.current_line_params;
current_line_break_params = from_galley.current_line_break_params;
current_hyphen_params = from_galley.current_hyphen_params;
current_space_params = from_galley.current_space_params;
current_math_params = from_galley.current_math_params
};
value reset_params galley =
{
(galley)
with
current_par_params = { (galley.par_params)
with
ParLayout.measure = galley.measure };
current_line_params = galley.line_params;
current_line_break_params = galley.line_break_params;
current_hyphen_params = galley.hyphen_params;
current_space_params = galley.space_params;
current_math_params = galley.math_params
};
value add_line galley line = do
{
let update_gfx_cmds ((fg, bg, alpha) as gfx) c = match c with
[ Graphic.SetColour _ -> (Some c, bg, alpha)
| Graphic.SetBgColour _ -> (fg, Some c, alpha)
| Graphic.SetAlpha _ -> (fg, bg, Some c)
| _ -> gfx
];
let make_gfx_cmd_boxes (fg, bg, alpha) = do
{
(match fg with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
@ (match bg with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
@ (match alpha with
[ Some c -> [new_command_box (`GfxCmd c)]
| None -> []
])
};
let leading = match galley.lines with
[ [] -> dim_zero
| [b::_] -> galley.current_line_params.leading b line galley.current_line_params
];
let gfx = match line.b_contents with
[ CompBox bs -> List.fold_left update_gfx_cmds (None, None, None) bs
| _ -> (None, None, None)
];
{
(galley)
with
lines = [line;
new_glue_box dim_zero leading True True
:: galley.glue @ galley.lines];
glue = make_gfx_cmd_boxes gfx
}
};
value add_glue galley box =
{
(galley)
with
glue = [box :: galley.glue]
};
value add_paragraph galley loc items = do
{
let rec extract_inserts boxes result above below = match boxes with
[ [] -> (ListBuilder.get result, List.rev above, List.rev below)
| [b::bs] -> match b.b_contents with
[ CommandBox (`ParCmd (VInsert below_flag contents)) ->
if below_flag then
extract_inserts bs result above (List.rev contents @ below)
else
extract_inserts bs result (List.rev contents @ above) below
| _ -> do
{
ListBuilder.add result b;
extract_inserts bs result above below
}
]
];
let lines =
ParLayout.break_paragraph
loc
items
galley.current_par_params
galley.current_line_break_params
galley.current_hyphen_params;
Move this function to ParLayout ?
let rec box_lines result line_no boxes = match boxes with
[ [] -> (line_no, ListBuilder.get result)
| [b::bs] -> do
{
let (body, above, below) = extract_inserts b (ListBuilder.make ()) [] [];
ListBuilder.add result
(ParLayout.layout_line galley.measure line_no body galley.current_par_params, above, below);
box_lines
result
(line_no + 1)
bs
}
];
let insert_break galley penalty = do
{
add_glue galley (new_break_box penalty False [] [] [])
};
let insert_par_skip galley = do
{
add_glue
(insert_break galley num_zero)
(new_glue_box dim_zero galley.current_par_params.ParLayout.par_skip True True)
};
let rec insert_insertion galley boxes = match boxes with
[ [] -> galley
| [b::bs] -> insert_insertion (add_glue galley b) bs
];
let add_line_and_insertions galley line above below = do
{
insert_insertion
(add_line
(insert_insertion galley above)
line)
below
};
match box_lines (ListBuilder.make ()) 0 lines with
[ (_, []) -> galley
| (num, [(line, above, below) :: ls]) -> do
{
iter 1 (num-1) ls
(add_line_and_insertions (insert_par_skip galley) line above below)
where rec iter lines_above lines_below lines galley = match lines with
[ [] -> galley
| [(line, above, below) :: ls] ->
iter (lines_above + 1) (lines_below - 1) ls
(add_line_and_insertions
(insert_break
galley
(galley.current_line_params.club_widow_penalty lines_above lines_below))
line above below)
]
}
]
};
value put_in_vbox galley = do
{
VBox.make (List.rev galley.lines)
};
value put_in_vtop galley = do
{
VBox.make_top (List.rev galley.lines)
};
|
fc8ebc0c501d7ab60c3198da01f8a41c2cdf19b811969a4e71379aa11c645003 | screenshotbot/screenshotbot-oss | test-cdn.lisp | ;; Copyright 2018-Present Modern Interpreters Inc.
;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :util.test-cdn
(:use :cl
:alexandria
:fiveam))
(in-package :util.test-cdn)
(def-suite* :util.test-cdn)
(test should-be-set-up-by-default
(is (numberp util.cdn:*cdn-cache-key*)))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/2372e102c5ef391ed63aa096f678e2ad1df36afe/src/util/tests/test-cdn.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :util.test-cdn
(:use :cl
:alexandria
:fiveam))
(in-package :util.test-cdn)
(def-suite* :util.test-cdn)
(test should-be-set-up-by-default
(is (numberp util.cdn:*cdn-cache-key*)))
|
071bea4d4ecd1f1da922bc73d7269968a7c836e5e82f420be3fa62addad7656a | freckle/faktory_worker_haskell | Main.hs | module Main (main) where
import Prelude
import Control.Exception.Safe
import Data.Aeson
import Faktory.Job (perform)
import Faktory.Producer
import GHC.Generics
import System.Environment (getArgs)
-- | Must match examples/consumer
newtype Job = Job { jobMessage :: String }
deriving stock Generic
deriving anyclass ToJSON
main :: IO ()
main = bracket newProducerEnv closeProducer $ \producer -> do
args <- getArgs
jobId <- perform mempty producer Job { jobMessage = unwords args }
putStrLn $ "Pushed job: " <> show jobId
| null | https://raw.githubusercontent.com/freckle/faktory_worker_haskell/1d58f03a54205430defc611356f31772421149be/examples/producer/Main.hs | haskell | | Must match examples/consumer | module Main (main) where
import Prelude
import Control.Exception.Safe
import Data.Aeson
import Faktory.Job (perform)
import Faktory.Producer
import GHC.Generics
import System.Environment (getArgs)
newtype Job = Job { jobMessage :: String }
deriving stock Generic
deriving anyclass ToJSON
main :: IO ()
main = bracket newProducerEnv closeProducer $ \producer -> do
args <- getArgs
jobId <- perform mempty producer Job { jobMessage = unwords args }
putStrLn $ "Pushed job: " <> show jobId
|
7de02afd102d1839869fe01dbfb2df1f91a9415c3deb16ef0f263dcd41a350c0 | A-Nony-Mus/Autolisp-Utilities | attredit.lsp | ;;**************************************************************************************************************;;
;;attredit ;;
;;Click on an attribute, enter value, repeat until enter is pressed ;;
Written CAB 12/7/17 ; ;
;;**************************************************************************************************************;;
(defun c:attredit ( / pt ss)
(while (setq pt (getpoint "Select Attribute: "))
(setq ss (ssget pt))
(if ss
(if (eq (cdr (assoc 0 (entget (ssname ss 0)))) "INSERT")
(command ".attipedit" pt)
)
)
)
)
| null | https://raw.githubusercontent.com/A-Nony-Mus/Autolisp-Utilities/d03e8f3feefe9377fa9c7dddd756d38011962c52/attredit.lsp | lisp | **************************************************************************************************************;;
attredit ;;
Click on an attribute, enter value, repeat until enter is pressed ;;
;
**************************************************************************************************************;; | (defun c:attredit ( / pt ss)
(while (setq pt (getpoint "Select Attribute: "))
(setq ss (ssget pt))
(if ss
(if (eq (cdr (assoc 0 (entget (ssname ss 0)))) "INSERT")
(command ".attipedit" pt)
)
)
)
)
|
d65dcf0eaf48b296b2afe6670c11d5ede891d1d8bccc9dc0f267bdb0e81f5ac2 | JacquesCarette/Drasil | PrintingInformation.hs | # LANGUAGE TemplateHaskell #
-- | Defines types and functions to gather all the information needed for printing.
module Language.Drasil.Printing.PrintingInformation where
import Control.Lens (makeLenses, Lens', (^.))
import SysInfo.Drasil (sysinfodb, SystemInformation)
import Database.Drasil (ChunkDB)
import Language.Drasil (Stage(..))
-- | Notation can be scientific or for engineering.
data Notation = Scientific
| Engineering
-- | Able to be printed.
class HasPrintingOptions c where
-- | Holds the printing notation.
getSetting :: Lens' c Notation
-- | Holds the printing configuration.
newtype PrintingConfiguration = PC { _notation :: Notation }
makeLenses ''PrintingConfiguration
-- | Finds the notation used for the 'PrintingConfiguration'.
instance HasPrintingOptions PrintingConfiguration where getSetting = notation
-- | Printing information contains a database, a stage, and a printing configuration.
data PrintingInformation = PI
{ _ckdb :: ChunkDB
, _stg :: Stage
, _configuration :: PrintingConfiguration
}
makeLenses ''PrintingInformation
-- | Finds the notation used for the 'PrintingConfiguration' within the 'PrintingInformation'.
instance HasPrintingOptions PrintingInformation where getSetting = configuration . getSetting
-- | Builds a document's printing information based on the system information.
piSys :: SystemInformation -> Stage -> PrintingConfiguration -> PrintingInformation
piSys si = PI (si ^. sysinfodb)
-- | Default configuration is for engineering.
defaultConfiguration :: PrintingConfiguration
defaultConfiguration = PC Engineering
| null | https://raw.githubusercontent.com/JacquesCarette/Drasil/c00990e67f1072c0ae5b34847d0d8ff7c452af71/code/drasil-printers/lib/Language/Drasil/Printing/PrintingInformation.hs | haskell | | Defines types and functions to gather all the information needed for printing.
| Notation can be scientific or for engineering.
| Able to be printed.
| Holds the printing notation.
| Holds the printing configuration.
| Finds the notation used for the 'PrintingConfiguration'.
| Printing information contains a database, a stage, and a printing configuration.
| Finds the notation used for the 'PrintingConfiguration' within the 'PrintingInformation'.
| Builds a document's printing information based on the system information.
| Default configuration is for engineering. | # LANGUAGE TemplateHaskell #
module Language.Drasil.Printing.PrintingInformation where
import Control.Lens (makeLenses, Lens', (^.))
import SysInfo.Drasil (sysinfodb, SystemInformation)
import Database.Drasil (ChunkDB)
import Language.Drasil (Stage(..))
data Notation = Scientific
| Engineering
class HasPrintingOptions c where
getSetting :: Lens' c Notation
newtype PrintingConfiguration = PC { _notation :: Notation }
makeLenses ''PrintingConfiguration
instance HasPrintingOptions PrintingConfiguration where getSetting = notation
data PrintingInformation = PI
{ _ckdb :: ChunkDB
, _stg :: Stage
, _configuration :: PrintingConfiguration
}
makeLenses ''PrintingInformation
instance HasPrintingOptions PrintingInformation where getSetting = configuration . getSetting
piSys :: SystemInformation -> Stage -> PrintingConfiguration -> PrintingInformation
piSys si = PI (si ^. sysinfodb)
defaultConfiguration :: PrintingConfiguration
defaultConfiguration = PC Engineering
|
052762be193007c40620fc7df660d5d51e9f0270a886ac406aaac797a0d624d3 | vernemq/vernemq_demo_plugin | vernemq_demo_plugin.erl | -module(vernemq_demo_plugin).
-behaviour(auth_on_register_hook).
-behaviour(auth_on_subscribe_hook).
-behaviour(auth_on_publish_hook).
-export([auth_on_register/5,
auth_on_publish/6,
auth_on_subscribe/3]).
%% This file demonstrates the hooks you typically want to use
%% if your plugin deals with Authentication or Authorization.
%%
%% All it does is:
%% - authenticate every user and write the log
%% - authorize every PUBLISH and SUBSCRIBE and write it to the log
%%
You do n't need to implement all of these hooks , just the one
%% needed for your use case.
%%
%% IMPORTANT:
%% these hook functions run in the session context
%%
auth_on_register({_IpAddr, _Port} = Peer, {_MountPoint, _ClientId} = SubscriberId, UserName, Password, CleanSession) ->
error_logger:info_msg("auth_on_register: ~p ~p ~p ~p ~p", [Peer, SubscriberId, UserName, Password, CleanSession]),
%% do whatever you like with the params, all that matters
%% is the return value of this function
%%
1 . return ' ok ' - > CONNECT is authenticated
2 . return ' next ' - > leave it to other plugins to decide
3 . return { ok , [ { ModifierKey , NewVal } ... ] } - > CONNECT is authenticated , but we might want to set some options used throughout the client session :
%% - {mountpoint, NewMountPoint::string}
%% - {clean_session, NewCleanSession::boolean}
4 . return { error , invalid_credentials } - > CONNACK_CREDENTIALS is sent
5 . return { error , whatever } - > CONNACK_AUTH is sent
%% we return 'ok'
ok.
auth_on_publish(UserName, {_MountPoint, _ClientId} = SubscriberId, QoS, Topic, Payload, IsRetain) ->
error_logger:info_msg("auth_on_publish: ~p ~p ~p ~p ~p ~p", [UserName, SubscriberId, QoS, Topic, Payload, IsRetain]),
%% do whatever you like with the params, all that matters
%% is the return value of this function
%%
1 . return ' ok ' - > PUBLISH is authorized
2 . return ' next ' - > leave it to other plugins to decide
3 . return { ok , NewPayload::binary } - > PUBLISH is authorized , but we changed the payload
4 . return { ok , [ { ModifierKey , NewVal } ... ] } - > PUBLISH is authorized , but we might have changed different Publish Options :
%% - {topic, NewTopic::string}
- { payload , NewPayload::binary }
- { qos , NewQoS::0 .. 2 }
%% - {retain, NewRetainFlag::boolean}
5 . return { error , whatever } - > auth chain is stopped , and message is silently dropped ( unless it is a Last Will message )
%%
%% we return 'ok'
ok.
auth_on_subscribe(UserName, ClientId, [{_Topic, _QoS}|_] = Topics) ->
error_logger:info_msg("auth_on_subscribe: ~p ~p ~p", [UserName, ClientId, Topics]),
%% do whatever you like with the params, all that matters
%% is the return value of this function
%%
1 . return ' ok ' - > SUBSCRIBE is authorized
2 . return ' next ' - > leave it to other plugins to decide
3 . return { error , whatever } - > auth chain is stopped , and no SUBACK is sent
%% we return 'ok'
ok.
| null | https://raw.githubusercontent.com/vernemq/vernemq_demo_plugin/c21281609141733243dcb91c5dc1972996664390/src/vernemq_demo_plugin.erl | erlang | This file demonstrates the hooks you typically want to use
if your plugin deals with Authentication or Authorization.
All it does is:
- authenticate every user and write the log
- authorize every PUBLISH and SUBSCRIBE and write it to the log
needed for your use case.
IMPORTANT:
these hook functions run in the session context
do whatever you like with the params, all that matters
is the return value of this function
- {mountpoint, NewMountPoint::string}
- {clean_session, NewCleanSession::boolean}
we return 'ok'
do whatever you like with the params, all that matters
is the return value of this function
- {topic, NewTopic::string}
- {retain, NewRetainFlag::boolean}
we return 'ok'
do whatever you like with the params, all that matters
is the return value of this function
we return 'ok' | -module(vernemq_demo_plugin).
-behaviour(auth_on_register_hook).
-behaviour(auth_on_subscribe_hook).
-behaviour(auth_on_publish_hook).
-export([auth_on_register/5,
auth_on_publish/6,
auth_on_subscribe/3]).
You do n't need to implement all of these hooks , just the one
auth_on_register({_IpAddr, _Port} = Peer, {_MountPoint, _ClientId} = SubscriberId, UserName, Password, CleanSession) ->
error_logger:info_msg("auth_on_register: ~p ~p ~p ~p ~p", [Peer, SubscriberId, UserName, Password, CleanSession]),
1 . return ' ok ' - > CONNECT is authenticated
2 . return ' next ' - > leave it to other plugins to decide
3 . return { ok , [ { ModifierKey , NewVal } ... ] } - > CONNECT is authenticated , but we might want to set some options used throughout the client session :
4 . return { error , invalid_credentials } - > CONNACK_CREDENTIALS is sent
5 . return { error , whatever } - > CONNACK_AUTH is sent
ok.
auth_on_publish(UserName, {_MountPoint, _ClientId} = SubscriberId, QoS, Topic, Payload, IsRetain) ->
error_logger:info_msg("auth_on_publish: ~p ~p ~p ~p ~p ~p", [UserName, SubscriberId, QoS, Topic, Payload, IsRetain]),
1 . return ' ok ' - > PUBLISH is authorized
2 . return ' next ' - > leave it to other plugins to decide
3 . return { ok , NewPayload::binary } - > PUBLISH is authorized , but we changed the payload
4 . return { ok , [ { ModifierKey , NewVal } ... ] } - > PUBLISH is authorized , but we might have changed different Publish Options :
- { payload , NewPayload::binary }
- { qos , NewQoS::0 .. 2 }
5 . return { error , whatever } - > auth chain is stopped , and message is silently dropped ( unless it is a Last Will message )
ok.
auth_on_subscribe(UserName, ClientId, [{_Topic, _QoS}|_] = Topics) ->
error_logger:info_msg("auth_on_subscribe: ~p ~p ~p", [UserName, ClientId, Topics]),
1 . return ' ok ' - > SUBSCRIBE is authorized
2 . return ' next ' - > leave it to other plugins to decide
3 . return { error , whatever } - > auth chain is stopped , and no SUBACK is sent
ok.
|
b63026f84f8534b8d2387735afbbf7c8be2429666481ab8db1be99534ec971cf | ice1000/learn | perimeter-of-a-rectangle.hs | module Kata where
getPerimeter :: Int -> Int -> Int
getPerimeter x y = sum [x, x, y, y]
| null | https://raw.githubusercontent.com/ice1000/learn/4ce5ea1897c97f7b5b3aee46ccd994e3613a58dd/Haskell/CW-Kata/perimeter-of-a-rectangle.hs | haskell | module Kata where
getPerimeter :: Int -> Int -> Int
getPerimeter x y = sum [x, x, y, y]
| |
10e5dc29ef759c0ed4ef21a9c28b8907b511ec15d5969551590957b03e93cc66 | lordi/haskell-terminal | Theme.hs | module Hsterm.Theme (colorize) where
import Terminal.Types
import Data.Maybe (fromJust)
import Data.Colour.SRGB (Colour, sRGB24read)
colorMap = [ (Black, "#000000")
, (Red, "#ff6565")
, (Green, "#93d44f")
, (Yellow, "#eab93d")
, (Blue, "#204a87")
, (Magenta, "#ce5c00")
, (Cyan, "#89b6e2")
, (White, "#cccccc") ]
colorize :: TerminalColor -> Bool -> Colour
colorize termcol bold = sRGB24read . fromJust . lookup termcol cmap
where cmap = if bold then colorMap else colorMap
| null | https://raw.githubusercontent.com/lordi/haskell-terminal/037a1374c3e28a9cc00f9da0ec8b2887944ab943/src/Hsterm/Theme.hs | haskell | module Hsterm.Theme (colorize) where
import Terminal.Types
import Data.Maybe (fromJust)
import Data.Colour.SRGB (Colour, sRGB24read)
colorMap = [ (Black, "#000000")
, (Red, "#ff6565")
, (Green, "#93d44f")
, (Yellow, "#eab93d")
, (Blue, "#204a87")
, (Magenta, "#ce5c00")
, (Cyan, "#89b6e2")
, (White, "#cccccc") ]
colorize :: TerminalColor -> Bool -> Colour
colorize termcol bold = sRGB24read . fromJust . lookup termcol cmap
where cmap = if bold then colorMap else colorMap
| |
23f563ca73e9ceeda937e623115b7637e8a3a6a41e311003927e4e30a04a1516 | xvw/preface | pair.mli | val cases : count:int -> (string * unit Alcotest.test_case list) list
| null | https://raw.githubusercontent.com/xvw/preface/84a297e1ee2967ad4341dca875da8d2dc6d7638c/test/preface_laws_test/pair.mli | ocaml | val cases : count:int -> (string * unit Alcotest.test_case list) list
| |
e93fbbec9b3ed46dac8b059aaf7945c5e8e3e32571aff2ca4730392da8b76b1c | Workiva/eva | config.clj | Copyright 2015 - 2019 Workiva Inc.
;;
;; Licensed under the Eclipse Public License 1.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -1.0.php
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns eva.contextual.config
(:require [clojure.spec.alpha :as s]))
(s/def ::config-options #{::as-per-config ::override})
(def ^:private config (atom {}))
(defn keep?
[tag option]
(cond
(= ::override option) true ;; do not ignore tag
(= ::as-per-config option) (get @config tag true) ;; not ignored if not configured
:else (throw (IllegalArgumentException. (format "Invalid config option: %s" option)))))
(s/fdef keep? :args (s/cat :tag keyword? :option ::config-options))
(defn set-tag! [tag yes-no]
(swap! config assoc tag yes-no))
(defn enable! [tag]
(set-tag! tag true))
(defn disable! [tag]
(set-tag! tag false))
(defn reset! []
(clojure.core/reset! config {}))
| null | https://raw.githubusercontent.com/Workiva/eva/b7b8a6a5215cccb507a92aa67e0168dc777ffeac/core/src/eva/contextual/config.clj | clojure |
Licensed under the Eclipse Public License 1.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-1.0.php
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
do not ignore tag
not ignored if not configured | Copyright 2015 - 2019 Workiva Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(ns eva.contextual.config
(:require [clojure.spec.alpha :as s]))
(s/def ::config-options #{::as-per-config ::override})
(def ^:private config (atom {}))
(defn keep?
[tag option]
(cond
:else (throw (IllegalArgumentException. (format "Invalid config option: %s" option)))))
(s/fdef keep? :args (s/cat :tag keyword? :option ::config-options))
(defn set-tag! [tag yes-no]
(swap! config assoc tag yes-no))
(defn enable! [tag]
(set-tag! tag true))
(defn disable! [tag]
(set-tag! tag false))
(defn reset! []
(clojure.core/reset! config {}))
|
3cb176df7e4a4ca427af72cfcb96fbd347bd80b105928b5caf052cc09894a7c9 | jrm-code-project/LISP-Machine | pfix.lisp | -*- Mode : LISP ; Package : FILE - SYSTEM ; : ZL ; ; Patch - File : T -*-
;;; FOR A LITTLE WHILE WRITE-PROPERTY-LIST WAS BROKEN.
;;; THIS KLUDGE TRIES TO READ THE PROPERTY LIST ANYWAY.
;;; EVEN GIVEN POSSIBLY INCORRECT LENGTH.
( C ) Copyright 1987 , LISP MACHINE INC
;;; See filename "Copyright.Text" for more information.
;;; *********************************************************
;;; *********************************************************
* * * NOTE : This is an EXAMPLE , not LMI supported code . * * *
;;; *** information contained in this example is subject ***
;;; *** to change without notice. The ways of doing ***
;;; *** the things contained in the example may change ***
;;; *** between system releases. Some techniques which ***
* * * are mere examples in one release may become built * * *
;;; *** in system features in the next release. Use good ***
;;; *** judgement when copying these techniques. Most ***
;;; *** examples have been motivated by specific customer ***
;;; *** requests, and may not be the best engineered ***
;;; *** or most efficient solution for someone else. ***
;;; *********************************************************
;;; *********************************************************
(DEFUN FIX-UP-AFTER-3-112-BUG ()
(LABELS ((RECURSE (PATH)
(PRINT PATH)
(DO ((L (FS:DIRECTORY-LIST PATH) (CDR L))
(DIRS))
((NULL L)
(MAPC #'RECURSE DIRS))
(COND ((NULL (CAAR L)))
((GET (CAR L) :DIRECTORY)
(PUSH (SEND (SEND (CAAR L) :PATHNAME-AS-DIRECTORY) :NEW-PATHNAME
:NAME :WILD :TYPE :WILD :VERSION :WILD)
DIRS))))))
(RECURSE (FS:PARSE-PATHNAME "LM:~;*.*#*")))
(WRITE-CORE-FILE-SYSTEM-ONTO-DISK))
;;; USE DJ
(DEFUN GET-STRING (STREAM)
(LET* ((LEN (SEND STREAM :TYI))
(ARR (MAKE-ARRAY LEN :TYPE 'ART-STRING)))
(SEND STREAM :STRING-IN NIL ARR 0 LEN)
ARR))
(DEFUN READ-PROPERTY-LIST (STREAM &AUX LIST (PAK SI:PKG-KEYWORD-PACKAGE))
(SETQ LIST (MAKE-LIST (* (PROGN (SEND STREAM :TYI)) 2)))
(DO ((L LIST (CDDR L))
(LEN-PEEK))
((NULL L))
(SETQ LEN-PEEK (SEND STREAM :TYIPEEK))
(WHEN (OR (NOT LEN-PEEK) (>= LEN-PEEK #\SP))
LENGTHS OF PROPERTIES ARE ALMOST ALWAYS LESS THAN 32 .
;; OTHERWISE WE ARE PROBABLY UP AGAINST A NEW DIRECTORY ENTRY.
(FORMAT T "~&FOUND A LOSING PLIST: ~S~%" LEN-PEEK)
(RETURN (RPLACD L NIL)))
(RPLACA L (INTERN (GET-STRING STREAM) PAK))
(CASE (SEND STREAM :TYI)
(0)
(1 (SETF (CADR L) T))
(2 (SETF (CADR L) (INTERN (GET-STRING STREAM) PAK)))
(3 (SETF (CADR L) (INTERN (GET-STRING STREAM)
(PKG-FIND-PACKAGE (GET-STRING STREAM) :ASK))))
(4 (SETF (CADR L) (GET-BYTES STREAM 3)))
(5 (SETF (CADR L) (GET-STRING STREAM)))
(6 (SETF (CADR L)
(LET ((*READ-BASE* 10.)
(*PACKAGE* SI:PKG-USER-PACKAGE)
(*READTABLE* SI:INITIAL-READTABLE))
This can lose pretty badly with # < 's , etc . -- DLA
(CLI:READ STREAM))))
(7 (LET* ((*READ-BASE* 10.)
(*PACKAGE* SI:PKG-USER-PACKAGE)
(*READTABLE* SI:INITIAL-READTABLE)
(FORM (CLI:READ STREAM))
(DEFAULT-CONS-AREA WORKING-STORAGE-AREA)) ;<-*
(SETF (CADR L)
(IF (= (LENGTH FORM) 6)
(APPLY #'FS:MAKE-FASLOAD-PATHNAME FORM)
(EVAL FORM))))) ;Obsolete form for pathnames to be written.
(OTHERWISE (FERROR NIL "Invalid Plist property designator."))))
(LET ((N (LENGTH LIST)))
(COND ((NOT (ODDP N)))
((= N 1)
(SETQ LIST NIL))
('ELSE
(SETF (NTHCDR (1- N) LIST) NIL))))
LIST)
| null | https://raw.githubusercontent.com/jrm-code-project/LISP-Machine/0a448d27f40761fafabe5775ffc550637be537b2/lambda/gjcx/pkg/pfix.lisp | lisp | Package : FILE - SYSTEM ; : ZL ; ; Patch - File : T -*-
FOR A LITTLE WHILE WRITE-PROPERTY-LIST WAS BROKEN.
THIS KLUDGE TRIES TO READ THE PROPERTY LIST ANYWAY.
EVEN GIVEN POSSIBLY INCORRECT LENGTH.
See filename "Copyright.Text" for more information.
*********************************************************
*********************************************************
*** information contained in this example is subject ***
*** to change without notice. The ways of doing ***
*** the things contained in the example may change ***
*** between system releases. Some techniques which ***
*** in system features in the next release. Use good ***
*** judgement when copying these techniques. Most ***
*** examples have been motivated by specific customer ***
*** requests, and may not be the best engineered ***
*** or most efficient solution for someone else. ***
*********************************************************
*********************************************************
USE DJ
OTHERWISE WE ARE PROBABLY UP AGAINST A NEW DIRECTORY ENTRY.
<-*
Obsolete form for pathnames to be written. |
( C ) Copyright 1987 , LISP MACHINE INC
* * * NOTE : This is an EXAMPLE , not LMI supported code . * * *
* * * are mere examples in one release may become built * * *
(DEFUN FIX-UP-AFTER-3-112-BUG ()
(LABELS ((RECURSE (PATH)
(PRINT PATH)
(DO ((L (FS:DIRECTORY-LIST PATH) (CDR L))
(DIRS))
((NULL L)
(MAPC #'RECURSE DIRS))
(COND ((NULL (CAAR L)))
((GET (CAR L) :DIRECTORY)
(PUSH (SEND (SEND (CAAR L) :PATHNAME-AS-DIRECTORY) :NEW-PATHNAME
:NAME :WILD :TYPE :WILD :VERSION :WILD)
DIRS))))))
(RECURSE (FS:PARSE-PATHNAME "LM:~;*.*#*")))
(WRITE-CORE-FILE-SYSTEM-ONTO-DISK))
(DEFUN GET-STRING (STREAM)
(LET* ((LEN (SEND STREAM :TYI))
(ARR (MAKE-ARRAY LEN :TYPE 'ART-STRING)))
(SEND STREAM :STRING-IN NIL ARR 0 LEN)
ARR))
(DEFUN READ-PROPERTY-LIST (STREAM &AUX LIST (PAK SI:PKG-KEYWORD-PACKAGE))
(SETQ LIST (MAKE-LIST (* (PROGN (SEND STREAM :TYI)) 2)))
(DO ((L LIST (CDDR L))
(LEN-PEEK))
((NULL L))
(SETQ LEN-PEEK (SEND STREAM :TYIPEEK))
(WHEN (OR (NOT LEN-PEEK) (>= LEN-PEEK #\SP))
LENGTHS OF PROPERTIES ARE ALMOST ALWAYS LESS THAN 32 .
(FORMAT T "~&FOUND A LOSING PLIST: ~S~%" LEN-PEEK)
(RETURN (RPLACD L NIL)))
(RPLACA L (INTERN (GET-STRING STREAM) PAK))
(CASE (SEND STREAM :TYI)
(0)
(1 (SETF (CADR L) T))
(2 (SETF (CADR L) (INTERN (GET-STRING STREAM) PAK)))
(3 (SETF (CADR L) (INTERN (GET-STRING STREAM)
(PKG-FIND-PACKAGE (GET-STRING STREAM) :ASK))))
(4 (SETF (CADR L) (GET-BYTES STREAM 3)))
(5 (SETF (CADR L) (GET-STRING STREAM)))
(6 (SETF (CADR L)
(LET ((*READ-BASE* 10.)
(*PACKAGE* SI:PKG-USER-PACKAGE)
(*READTABLE* SI:INITIAL-READTABLE))
This can lose pretty badly with # < 's , etc . -- DLA
(CLI:READ STREAM))))
(7 (LET* ((*READ-BASE* 10.)
(*PACKAGE* SI:PKG-USER-PACKAGE)
(*READTABLE* SI:INITIAL-READTABLE)
(FORM (CLI:READ STREAM))
(SETF (CADR L)
(IF (= (LENGTH FORM) 6)
(APPLY #'FS:MAKE-FASLOAD-PATHNAME FORM)
(OTHERWISE (FERROR NIL "Invalid Plist property designator."))))
(LET ((N (LENGTH LIST)))
(COND ((NOT (ODDP N)))
((= N 1)
(SETQ LIST NIL))
('ELSE
(SETF (NTHCDR (1- N) LIST) NIL))))
LIST)
|
dbc0a9c81d7d9eba453f6afa9d3bdf529bb2f17f97d7d0f243cb1a9b0153ba15 | engstrand-config/farg | reload.scm | (define-module (farg reload)
#:use-module (guix gexp)
#:use-module (srfi srfi-1)
#:use-module (ice-9 ftw)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 textual-ports)
#:export (reload-terminal-colors))
Procedures adapted from pywal , see :
(define (set-special index color)
(format #f "\x1b]~d;~a\x1b\\" index color))
(define (set-color index color)
(format #f "\x1b]4;~d;~a\x1b\\" index color))
(define (get-color key palette overrides)
(let ((value (assoc-ref overrides key)))
(if (eq? value #f)
(palette key)
value)))
(define (create-sequences palette overrides)
(let ((text (get-color 'text palette overrides))
(background (get-color 'background palette overrides)))
(string-join
(append (map (lambda (x) (set-color x (get-color x palette overrides)))
(iota 16 0))
(list (set-special 10 text)
(set-special 11 background)
(set-special 12 text)
(set-special 13 text)
(set-special 17 text)
(set-special 19 background)
(set-special 232 background)
(set-special 256 text)
(set-special 257 background)))
"")))
(define* (reload-terminal-colors palette #:optional (overrides '()))
"Updates the colors of all open terminals using the colors in PALETTE.
By default, the colors 0-15 (directly from pywal) will be used, including
the special @code{'background} and @code{'text} colors. Each color that
is used can be overridden by specifying an additional OVERRIDES alist.
@example
;; Use default colors from palette
(reload-terminal-colors palette)
;; Override color index 0 and the named background color.
(reload-terminal-colors palette
`((0 . \"#FFFFFF\")
('background . \"#000000\")))
@end example
"
(let ((path "/dev/pts")
(sequences (create-sequences palette overrides)))
#~(begin
(use-modules (ice-9 rdelim))
(for-each
(lambda (file)
(call-with-output-file (string-append #$path "/" file)
(lambda (port) (display #$sequences port))))
(scandir #$path (lambda (f) (string->number f)))))))
| null | https://raw.githubusercontent.com/engstrand-config/farg/a4c3e7a459a8230decd01aa4f94b35df519c0acf/farg/reload.scm | scheme | Use default colors from palette
Override color index 0 and the named background color. | (define-module (farg reload)
#:use-module (guix gexp)
#:use-module (srfi srfi-1)
#:use-module (ice-9 ftw)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 textual-ports)
#:export (reload-terminal-colors))
Procedures adapted from pywal , see :
(define (set-special index color)
(format #f "\x1b]~d;~a\x1b\\" index color))
(define (set-color index color)
(format #f "\x1b]4;~d;~a\x1b\\" index color))
(define (get-color key palette overrides)
(let ((value (assoc-ref overrides key)))
(if (eq? value #f)
(palette key)
value)))
(define (create-sequences palette overrides)
(let ((text (get-color 'text palette overrides))
(background (get-color 'background palette overrides)))
(string-join
(append (map (lambda (x) (set-color x (get-color x palette overrides)))
(iota 16 0))
(list (set-special 10 text)
(set-special 11 background)
(set-special 12 text)
(set-special 13 text)
(set-special 17 text)
(set-special 19 background)
(set-special 232 background)
(set-special 256 text)
(set-special 257 background)))
"")))
(define* (reload-terminal-colors palette #:optional (overrides '()))
"Updates the colors of all open terminals using the colors in PALETTE.
By default, the colors 0-15 (directly from pywal) will be used, including
the special @code{'background} and @code{'text} colors. Each color that
is used can be overridden by specifying an additional OVERRIDES alist.
@example
(reload-terminal-colors palette)
(reload-terminal-colors palette
`((0 . \"#FFFFFF\")
('background . \"#000000\")))
@end example
"
(let ((path "/dev/pts")
(sequences (create-sequences palette overrides)))
#~(begin
(use-modules (ice-9 rdelim))
(for-each
(lambda (file)
(call-with-output-file (string-append #$path "/" file)
(lambda (port) (display #$sequences port))))
(scandir #$path (lambda (f) (string->number f)))))))
|
83b86710ebd0cf306eee1718ef3d05137200c59a9e85c267cff0f69ff4ed0a53 | yesodweb/wai | ForceSSLSpec.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Middleware.ForceSSLSpec
( main
, spec
) where
import Control.Monad (forM_)
import Data.ByteString (ByteString)
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid ((<>))
#endif
import Network.HTTP.Types (methodPost, status200, status301, status307)
import Network.Wai
import Test.Hspec
import Network.Wai.Middleware.ForceSSL (forceSSL)
import Network.Wai.Test
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "forceSSL" (forM_ hosts $ \host -> hostSpec host)
where
hosts = ["example.com", "example.com:80", "example.com:8080"]
hostSpec :: ByteString -> Spec
hostSpec host = describe ("forceSSL on host " <> show host <> "") $ do
it "redirects non-https requests to https" $ do
resp <- runApp host forceSSL defaultRequest
simpleStatus resp `shouldBe` status301
simpleHeaders resp `shouldBe` [("Location", "https://" <> host)]
it "redirects with 307 in the case of a non-GET request" $ do
resp <- runApp host forceSSL defaultRequest
{ requestMethod = methodPost }
simpleStatus resp `shouldBe` status307
simpleHeaders resp `shouldBe` [("Location", "https://" <> host)]
it "does not redirect already-secure requests" $ do
resp <- runApp host forceSSL defaultRequest { isSecure = True }
simpleStatus resp `shouldBe` status200
it "preserves the original host, path, and query string" $ do
resp <- runApp host forceSSL defaultRequest
{ rawPathInfo = "/foo/bar"
, rawQueryString = "?baz=bat"
}
simpleHeaders resp `shouldBe`
[("Location", "https://" <> host <> "/foo/bar?baz=bat")]
runApp :: ByteString -> Middleware -> Request -> IO SResponse
runApp host mw req = runSession
(request req { requestHeaderHost = Just host }) $ mw app
where
app _ respond = respond $ responseLBS status200 [] ""
| null | https://raw.githubusercontent.com/yesodweb/wai/5d1fd4926d0f210eb77c2bc0e546cb3cca6bc197/wai-extra/test/Network/Wai/Middleware/ForceSSLSpec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE CPP #
module Network.Wai.Middleware.ForceSSLSpec
( main
, spec
) where
import Control.Monad (forM_)
import Data.ByteString (ByteString)
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid ((<>))
#endif
import Network.HTTP.Types (methodPost, status200, status301, status307)
import Network.Wai
import Test.Hspec
import Network.Wai.Middleware.ForceSSL (forceSSL)
import Network.Wai.Test
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "forceSSL" (forM_ hosts $ \host -> hostSpec host)
where
hosts = ["example.com", "example.com:80", "example.com:8080"]
hostSpec :: ByteString -> Spec
hostSpec host = describe ("forceSSL on host " <> show host <> "") $ do
it "redirects non-https requests to https" $ do
resp <- runApp host forceSSL defaultRequest
simpleStatus resp `shouldBe` status301
simpleHeaders resp `shouldBe` [("Location", "https://" <> host)]
it "redirects with 307 in the case of a non-GET request" $ do
resp <- runApp host forceSSL defaultRequest
{ requestMethod = methodPost }
simpleStatus resp `shouldBe` status307
simpleHeaders resp `shouldBe` [("Location", "https://" <> host)]
it "does not redirect already-secure requests" $ do
resp <- runApp host forceSSL defaultRequest { isSecure = True }
simpleStatus resp `shouldBe` status200
it "preserves the original host, path, and query string" $ do
resp <- runApp host forceSSL defaultRequest
{ rawPathInfo = "/foo/bar"
, rawQueryString = "?baz=bat"
}
simpleHeaders resp `shouldBe`
[("Location", "https://" <> host <> "/foo/bar?baz=bat")]
runApp :: ByteString -> Middleware -> Request -> IO SResponse
runApp host mw req = runSession
(request req { requestHeaderHost = Just host }) $ mw app
where
app _ respond = respond $ responseLBS status200 [] ""
|
9fb7efa9311ea1cc9183e82a1c36158389acaf45e3ba912f5eec6afa3b558866 | lisp-mirror/clpm | info.lisp | ;;;; clpm config info
;;;;
This software is part of CLPM . See README.org for more information . See
;;;; LICENSE for license information.
(uiop:define-package #:clpm-cli/commands/config/info
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/commands/config/common
#:clpm-cli/interface-defs
#:clpm/config)
(:import-from #:uiop
#:*stdout*))
(in-package #:clpm-cli/commands/config/info)
(defparameter *config-info-ui*
(adopt:make-interface
:name "clpm config info"
:summary "Common Lisp Project Manager"
:usage "config info [options]"
:help "Common Lisp Project Manager"
:contents (list *group-common*)))
(define-cli-command (("config" "info") *config-info-ui*) (args options)
(declare (ignore args options))
(format *stdout* "Config directories: ~A~%~%" *clpm-config-directories*)
(format *stdout* "Current configuration:~%~A~%" (with-output-to-string (s) (print-config s)))
t)
| null | https://raw.githubusercontent.com/lisp-mirror/clpm/ad9a704fcdd0df5ce30ead106706ab6cc5fb3e5b/cli/commands/config/info.lisp | lisp | clpm config info
LICENSE for license information. | This software is part of CLPM . See README.org for more information . See
(uiop:define-package #:clpm-cli/commands/config/info
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/commands/config/common
#:clpm-cli/interface-defs
#:clpm/config)
(:import-from #:uiop
#:*stdout*))
(in-package #:clpm-cli/commands/config/info)
(defparameter *config-info-ui*
(adopt:make-interface
:name "clpm config info"
:summary "Common Lisp Project Manager"
:usage "config info [options]"
:help "Common Lisp Project Manager"
:contents (list *group-common*)))
(define-cli-command (("config" "info") *config-info-ui*) (args options)
(declare (ignore args options))
(format *stdout* "Config directories: ~A~%~%" *clpm-config-directories*)
(format *stdout* "Current configuration:~%~A~%" (with-output-to-string (s) (print-config s)))
t)
|
149601df374ec027de17f5e4554994e34780044e2c102236f5d14f1fbe4abb1f | riemann/riemann | logstash.clj | (ns riemann.logstash
"Forwards events to LogStash."
(:refer-clojure :exclude [replace])
(:import
(java.net Socket
DatagramSocket
DatagramPacket
InetAddress)
(java.io Writer OutputStreamWriter BufferedWriter))
(:use [clojure.string :only [split join replace]]
clojure.tools.logging
riemann.pool
riemann.common
less.awful.ssl))
(defprotocol LogStashClient
(open [client]
"Creates a LogStash client")
(send-line [client line]
"Sends a formatted line to LogStash")
(close [client]
"Cleans up (closes sockets etc.)"))
(defrecord LogStashTLSClient [^String host ^int port opts]
LogStashClient
(open [this]
(let [sock (socket (ssl-context (:key opts) (:cert opts) (:ca-cert opts))
host
port)]
(.startHandshake sock)
(assoc this
:socket sock
:out (BufferedWriter. (OutputStreamWriter. (.getOutputStream sock))))))
(send-line [this line]
(let [out (:out this)]
(.write ^BufferedWriter out ^String line)
(.flush ^BufferedWriter out)))
(close [this]
(.close ^BufferedWriter (:out this))
(.close ^Socket (:socket this))))
(defrecord LogStashTCPClient [^String host ^int port]
LogStashClient
(open [this]
(let [sock (Socket. host port)]
(assoc this
:socket sock
:out (OutputStreamWriter. (.getOutputStream sock)))))
(send-line [this line]
(let [out (:out this)]
(.write ^OutputStreamWriter out ^String line)
(.flush ^OutputStreamWriter out)))
(close [this]
(.close ^OutputStreamWriter (:out this))
(.close ^Socket (:socket this))))
(defrecord LogStashUDPClient [^String host ^int port]
LogStashClient
(open [this]
(assoc this
:socket (DatagramSocket.)
:host host
:port port))
(send-line [this line]
(let [bytes (.getBytes ^String line)
length (count line)
addr (InetAddress/getByName (:host this))
datagram (DatagramPacket. bytes length ^InetAddress addr port)]
(.send ^DatagramSocket (:socket this) datagram)))
(close [this]
(.close ^DatagramSocket (:socket this))))
(defn logstash
"Returns a function which accepts an event and sends it to logstash.
Silently drops events when logstash is down. Attempts to reconnect
automatically every five seconds. Use:
(logstash {:host \"logstash.local\" :port 2003})
Options:
- :pool-size The number of connections to keep open. Default 4.
- :reconnect-interval How many seconds to wait between attempts to connect.
Default 5.
- :claim-timeout How many seconds to wait for a logstash connection from
the pool. Default 0.1.
- :block-start Wait for the pool's initial connections to open
before returning.
- :protocol Protocol to use. Either :tcp (default), :tls or :udp.
TLS options:
- :key A PKCS8-encoded private key file
- :cert The corresponding public certificate
- :ca-cert The certificate of the CA which signed this key"
[opts]
(let [opts (merge {:host "127.0.0.1"
:port 9999
:protocol :tcp
:claim-timeout 0.1
:pool-size 4} opts)
pool (fixed-pool
(fn []
(info "Connecting to " (select-keys opts [:host :port :protocol]))
(let [host (:host opts)
port (:port opts)
client (open (condp = (:protocol opts)
:tcp (LogStashTCPClient. host port)
:udp (LogStashUDPClient. host port)
:tls (LogStashTLSClient. host port opts)))]
(info "Connected")
client))
(fn [client]
(info "Closing connection to "
(select-keys opts [:host :port]))
(close client))
{:size (:pool-size opts)
:block-start (:block-start opts)
:regenerate-interval (:reconnect-interval opts)})]
(fn [event]
(with-pool [client pool (:claim-timeout opts)]
(let [string (event-to-json (merge event {:source (:host event)}))]
(send-line client (str string "\n")))))))
| null | https://raw.githubusercontent.com/riemann/riemann/1649687c0bd913c378701ee0b964a9863bde7c7c/src/riemann/logstash.clj | clojure | (ns riemann.logstash
"Forwards events to LogStash."
(:refer-clojure :exclude [replace])
(:import
(java.net Socket
DatagramSocket
DatagramPacket
InetAddress)
(java.io Writer OutputStreamWriter BufferedWriter))
(:use [clojure.string :only [split join replace]]
clojure.tools.logging
riemann.pool
riemann.common
less.awful.ssl))
(defprotocol LogStashClient
(open [client]
"Creates a LogStash client")
(send-line [client line]
"Sends a formatted line to LogStash")
(close [client]
"Cleans up (closes sockets etc.)"))
(defrecord LogStashTLSClient [^String host ^int port opts]
LogStashClient
(open [this]
(let [sock (socket (ssl-context (:key opts) (:cert opts) (:ca-cert opts))
host
port)]
(.startHandshake sock)
(assoc this
:socket sock
:out (BufferedWriter. (OutputStreamWriter. (.getOutputStream sock))))))
(send-line [this line]
(let [out (:out this)]
(.write ^BufferedWriter out ^String line)
(.flush ^BufferedWriter out)))
(close [this]
(.close ^BufferedWriter (:out this))
(.close ^Socket (:socket this))))
(defrecord LogStashTCPClient [^String host ^int port]
LogStashClient
(open [this]
(let [sock (Socket. host port)]
(assoc this
:socket sock
:out (OutputStreamWriter. (.getOutputStream sock)))))
(send-line [this line]
(let [out (:out this)]
(.write ^OutputStreamWriter out ^String line)
(.flush ^OutputStreamWriter out)))
(close [this]
(.close ^OutputStreamWriter (:out this))
(.close ^Socket (:socket this))))
(defrecord LogStashUDPClient [^String host ^int port]
LogStashClient
(open [this]
(assoc this
:socket (DatagramSocket.)
:host host
:port port))
(send-line [this line]
(let [bytes (.getBytes ^String line)
length (count line)
addr (InetAddress/getByName (:host this))
datagram (DatagramPacket. bytes length ^InetAddress addr port)]
(.send ^DatagramSocket (:socket this) datagram)))
(close [this]
(.close ^DatagramSocket (:socket this))))
(defn logstash
"Returns a function which accepts an event and sends it to logstash.
Silently drops events when logstash is down. Attempts to reconnect
automatically every five seconds. Use:
(logstash {:host \"logstash.local\" :port 2003})
Options:
- :pool-size The number of connections to keep open. Default 4.
- :reconnect-interval How many seconds to wait between attempts to connect.
Default 5.
- :claim-timeout How many seconds to wait for a logstash connection from
the pool. Default 0.1.
- :block-start Wait for the pool's initial connections to open
before returning.
- :protocol Protocol to use. Either :tcp (default), :tls or :udp.
TLS options:
- :key A PKCS8-encoded private key file
- :cert The corresponding public certificate
- :ca-cert The certificate of the CA which signed this key"
[opts]
(let [opts (merge {:host "127.0.0.1"
:port 9999
:protocol :tcp
:claim-timeout 0.1
:pool-size 4} opts)
pool (fixed-pool
(fn []
(info "Connecting to " (select-keys opts [:host :port :protocol]))
(let [host (:host opts)
port (:port opts)
client (open (condp = (:protocol opts)
:tcp (LogStashTCPClient. host port)
:udp (LogStashUDPClient. host port)
:tls (LogStashTLSClient. host port opts)))]
(info "Connected")
client))
(fn [client]
(info "Closing connection to "
(select-keys opts [:host :port]))
(close client))
{:size (:pool-size opts)
:block-start (:block-start opts)
:regenerate-interval (:reconnect-interval opts)})]
(fn [event]
(with-pool [client pool (:claim-timeout opts)]
(let [string (event-to-json (merge event {:source (:host event)}))]
(send-line client (str string "\n")))))))
| |
ed780ed75f9200ae228151731073f96d30aeb6f82ec8a6b15fe3cc4bfa31dc96 | cj1128/sicp-review | lazy-eval.scm | ;; lazy eval
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp) (make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((let? exp) (eval (let->combination exp) env))
((let*? exp) (eval (let*->nested-lets exp) env))
((application? exp)
(m-apply (actual-value (operator exp) env)
(operands exp)
env))
(else
(error "Unknown expression type: EVAL" exp))))
;; redefine eval-if
(define (eval-if exp env)
(if (true? (actual-value (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define (actual-value exp env)
(force-it (eval exp env)))
(define (m-apply procedure arguments env)
(cond ((primitive-procedure? procedure)
(apply-primitive-procedure
procedure
(list-of-arg-values arguments env)))
((compound-procedure? procedure)
(eval-sequence
(procedure-body procedure)
(extend-environment
(procedure-parameters procedure)
(list-of-delayed-args arguments env)
(procedure-environment procedure))))
(else (error "Unknonw procedure type: APPLY" procedure))))
(define (list-of-arg-values exps env)
(if (no-operands? exps)
'()
(cons (actual-value (first-operand exps) env)
(list-of-arg-values (rest-operands exps) env))))
(define (list-of-delayed-args exps env)
(if (no-operands? exps)
'()
(cons (delay-it (first-operand exps)
env)
(list-of-delayed-args (rest-operands exps) env))))
;; implement thunks with memoization
;; this is force-it without memoization
;; (define (force-it obj)
;; (if (thunk? obj)
;; (actual-value (thunk-exp obj) (thunk-env obj))
;; obj))
(define (delay-it exp env)
(list 'thunk exp env))
(define (thunk? obj)
(tagged-list? obj 'thunk))
(define (thunk-exp thunk) (cadr thunk))
(define (thunk-env thunk) (caddr thunk))
(define (evaluated-thunk? obj)
(tagged-list? obj 'evaluated-thunk))
(define (thunk-value evaluated-thunk)
(cadr evaluated-thunk))
(define (force-it obj)
(cond ((thunk? obj)
(let ((result (actual-value (thunk-exp obj)
(thunk-env obj))))
(set-car! obj 'evaluated-thunk)
(set-car! (cdr obj) result)
(set-cdr! (cdr obj) '())
result))
((evaluated-thunk? obj) (thunk-value obj))
(else obj)))
| null | https://raw.githubusercontent.com/cj1128/sicp-review/efaa2f863b7f03c51641c22d701bac97e398a050/chapter-4/lazy-eval.scm | scheme | lazy eval
redefine eval-if
implement thunks with memoization
this is force-it without memoization
(define (force-it obj)
(if (thunk? obj)
(actual-value (thunk-exp obj) (thunk-env obj))
obj)) |
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp) (make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((let? exp) (eval (let->combination exp) env))
((let*? exp) (eval (let*->nested-lets exp) env))
((application? exp)
(m-apply (actual-value (operator exp) env)
(operands exp)
env))
(else
(error "Unknown expression type: EVAL" exp))))
(define (eval-if exp env)
(if (true? (actual-value (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define (actual-value exp env)
(force-it (eval exp env)))
(define (m-apply procedure arguments env)
(cond ((primitive-procedure? procedure)
(apply-primitive-procedure
procedure
(list-of-arg-values arguments env)))
((compound-procedure? procedure)
(eval-sequence
(procedure-body procedure)
(extend-environment
(procedure-parameters procedure)
(list-of-delayed-args arguments env)
(procedure-environment procedure))))
(else (error "Unknonw procedure type: APPLY" procedure))))
(define (list-of-arg-values exps env)
(if (no-operands? exps)
'()
(cons (actual-value (first-operand exps) env)
(list-of-arg-values (rest-operands exps) env))))
(define (list-of-delayed-args exps env)
(if (no-operands? exps)
'()
(cons (delay-it (first-operand exps)
env)
(list-of-delayed-args (rest-operands exps) env))))
(define (delay-it exp env)
(list 'thunk exp env))
(define (thunk? obj)
(tagged-list? obj 'thunk))
(define (thunk-exp thunk) (cadr thunk))
(define (thunk-env thunk) (caddr thunk))
(define (evaluated-thunk? obj)
(tagged-list? obj 'evaluated-thunk))
(define (thunk-value evaluated-thunk)
(cadr evaluated-thunk))
(define (force-it obj)
(cond ((thunk? obj)
(let ((result (actual-value (thunk-exp obj)
(thunk-env obj))))
(set-car! obj 'evaluated-thunk)
(set-car! (cdr obj) result)
(set-cdr! (cdr obj) '())
result))
((evaluated-thunk? obj) (thunk-value obj))
(else obj)))
|
88064cbeaf67e6493263bfcf3ca3b6f78cfe3a83b77ea7e8a279db63dc593271 | Chris00/ocaml-benchmark | iter.ml | open Bigarray
let n = 1_000
(* Bigarrays *)
type vec = (float, float64_elt, c_layout) Array1.t
let a = Array1.create float64 c_layout n
let () = Array1.fill a 1.
let ba f (x: vec) =
for i = 0 to n - 1 do f x.{i} done
let ba_unsafe f (x: vec) =
for i = 0 to n - 1 do f (Array1.unsafe_get x i) done
(* Arrays *)
let b = Array.make n 1.
let arr f (x: float array) =
for i = 0 to n-1 do f x.(i) done
let arr_unsafe f (x: float array) =
for i = 0 to n-1 do f (Array.unsafe_get x i) done
(* Lists *)
let c = Array.to_list b
open Benchmark
let () =
(* Simulate a simple side effect *)
let z = ref 0. in
let f x = z := x in
let res = throughputN ~repeat:3 3
[("ba", (fun () -> ba f a), ());
("ba_unsafe", (fun () -> ba_unsafe f a), ());
("arr", (fun () -> arr f b), ());
("arr_unsafe", (fun () -> arr_unsafe f b), ());
("list", (fun () -> List.iter f c), ())
] in
print_endline "Iterating a function with a simple side effect:";
tabulate res
| null | https://raw.githubusercontent.com/Chris00/ocaml-benchmark/72965ad1558c6f5d47b1559cee74423cbc5e970f/examples/iter.ml | ocaml | Bigarrays
Arrays
Lists
Simulate a simple side effect | open Bigarray
let n = 1_000
type vec = (float, float64_elt, c_layout) Array1.t
let a = Array1.create float64 c_layout n
let () = Array1.fill a 1.
let ba f (x: vec) =
for i = 0 to n - 1 do f x.{i} done
let ba_unsafe f (x: vec) =
for i = 0 to n - 1 do f (Array1.unsafe_get x i) done
let b = Array.make n 1.
let arr f (x: float array) =
for i = 0 to n-1 do f x.(i) done
let arr_unsafe f (x: float array) =
for i = 0 to n-1 do f (Array.unsafe_get x i) done
let c = Array.to_list b
open Benchmark
let () =
let z = ref 0. in
let f x = z := x in
let res = throughputN ~repeat:3 3
[("ba", (fun () -> ba f a), ());
("ba_unsafe", (fun () -> ba_unsafe f a), ());
("arr", (fun () -> arr f b), ());
("arr_unsafe", (fun () -> arr_unsafe f b), ());
("list", (fun () -> List.iter f c), ())
] in
print_endline "Iterating a function with a simple side effect:";
tabulate res
|
a461328bd6f538b7a07856f73b7262fbb8c1142e99c876a764bc02e81c0f8e07 | bldl/magnolisp | test-type-infer-6.rkt | #lang magnolisp
(typedef int #:: (foreign))
(function (seven-of x) #:: ([type (exists T (-> T T))])
7)
(function (f) #:: (export)
(seven-of (cast int 0)))
(f)
| null | https://raw.githubusercontent.com/bldl/magnolisp/191d529486e688e5dda2be677ad8fe3b654e0d4f/tests/test-type-infer-6.rkt | racket | #lang magnolisp
(typedef int #:: (foreign))
(function (seven-of x) #:: ([type (exists T (-> T T))])
7)
(function (f) #:: (export)
(seven-of (cast int 0)))
(f)
| |
e3d5474e56f18c2b4636827b319d2ff587e0133196698e1b9ca8c8c6256a5963 | huangjs/cl | forma1.lisp | (declare (special $floatformat floatmax floatmin floatsmall
floatbig floatbigbig float-enote))
(defmvar $floatformat t)
;;; defaults
(defmvar floatmax 6)
(defmvar floatmin -4)
(defmvar floatbig 2)
(defmvar floatbigbig 1)
(defmvar floatsmall 3)
(defmvar float-enote 2)
(putprop 'makestring1 (get 'makestring 'subr) 'subr)
(defun makestring (form)
(cond ((and $floatformat (floatp form)) (nicefloat form))
((makestring1 form))))
(defun nicefloat (flt)
(cond ((= flt 0.0) (list 48. 46. 48.))
((< flt 0.0) (cons 45. (niceflt (abs flt))))
((niceflt (abs flt)))))
(defun niceflt (aflt)
(declare (fixnum i))
(do ((i 0)
(simflt aflt)
(fac (cond ((< aflt 1.0) 1e1) (1e-1)))
(inc (cond ((< aflt 1.0) -1) (1))))
((and (< simflt 1e1) (not (< simflt 1.0)))
(floatcheck (exploden simflt) i))
(setq simflt (* simflt fac))
(incf i inc)))
(defun floatcheck (repres pwr)
(declare (fixnum pwr))
(cond
((or (> pwr (1- floatmax)) (< pwr floatmin))
(cons (car repres)
(cons 46.
(append (fracgen (cddr repres) float-enote nil)
(cons 69.(cond ((> pwr 0)
(cons 43 (exploden pwr)))
((exploden pwr))))))))
((< pwr 0.)
((lambda (frac)
(cons 48.
(cons 46.
(cond ((equal frac '(48.)) frac)
((append (fraczeros (1- (abs pwr)))
frac))))))
(fracgen (delete 46. repres) floatsmall nil)))
((cons (car repres)
(floatnone (cddr repres)
pwr
(cond ((< pwr 3.) floatbig)
(floatbigbig)))))))
(defun fraczeros (n)
(declare (fixnum n))
(cond ((zerop n) nil) ((cons 48. (fraczeros (1- n))))))
(defun floatnone (repres pwr floatfrac)
(declare (fixnum pwr floatfrac))
(cond ((zerop pwr) (cons 46. (fracgen repres floatfrac nil)))
((cons (cond (repres (car repres)) (48.))
(floatnone (cdr repres) (1- pwr) floatfrac)))))
(defun felimin (revrep)
(cond ((null revrep) (ncons 48.))
((= (car revrep) 48.) (felimin (cdr revrep)))
((reverse revrep))))
(defun fracgen (repres floatfrac result)
(declare (fixnum floatfrac))
(cond ((null repres) (felimin result))
((zerop floatfrac) (felimin result))
((fracgen (cdr repres)
(1- floatfrac)
(cons (car repres) result)))))
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/maxima/share/numeric/forma1.lisp | lisp | defaults | (declare (special $floatformat floatmax floatmin floatsmall
floatbig floatbigbig float-enote))
(defmvar $floatformat t)
(defmvar floatmax 6)
(defmvar floatmin -4)
(defmvar floatbig 2)
(defmvar floatbigbig 1)
(defmvar floatsmall 3)
(defmvar float-enote 2)
(putprop 'makestring1 (get 'makestring 'subr) 'subr)
(defun makestring (form)
(cond ((and $floatformat (floatp form)) (nicefloat form))
((makestring1 form))))
(defun nicefloat (flt)
(cond ((= flt 0.0) (list 48. 46. 48.))
((< flt 0.0) (cons 45. (niceflt (abs flt))))
((niceflt (abs flt)))))
(defun niceflt (aflt)
(declare (fixnum i))
(do ((i 0)
(simflt aflt)
(fac (cond ((< aflt 1.0) 1e1) (1e-1)))
(inc (cond ((< aflt 1.0) -1) (1))))
((and (< simflt 1e1) (not (< simflt 1.0)))
(floatcheck (exploden simflt) i))
(setq simflt (* simflt fac))
(incf i inc)))
(defun floatcheck (repres pwr)
(declare (fixnum pwr))
(cond
((or (> pwr (1- floatmax)) (< pwr floatmin))
(cons (car repres)
(cons 46.
(append (fracgen (cddr repres) float-enote nil)
(cons 69.(cond ((> pwr 0)
(cons 43 (exploden pwr)))
((exploden pwr))))))))
((< pwr 0.)
((lambda (frac)
(cons 48.
(cons 46.
(cond ((equal frac '(48.)) frac)
((append (fraczeros (1- (abs pwr)))
frac))))))
(fracgen (delete 46. repres) floatsmall nil)))
((cons (car repres)
(floatnone (cddr repres)
pwr
(cond ((< pwr 3.) floatbig)
(floatbigbig)))))))
(defun fraczeros (n)
(declare (fixnum n))
(cond ((zerop n) nil) ((cons 48. (fraczeros (1- n))))))
(defun floatnone (repres pwr floatfrac)
(declare (fixnum pwr floatfrac))
(cond ((zerop pwr) (cons 46. (fracgen repres floatfrac nil)))
((cons (cond (repres (car repres)) (48.))
(floatnone (cdr repres) (1- pwr) floatfrac)))))
(defun felimin (revrep)
(cond ((null revrep) (ncons 48.))
((= (car revrep) 48.) (felimin (cdr revrep)))
((reverse revrep))))
(defun fracgen (repres floatfrac result)
(declare (fixnum floatfrac))
(cond ((null repres) (felimin result))
((zerop floatfrac) (felimin result))
((fracgen (cdr repres)
(1- floatfrac)
(cons (car repres) result)))))
|
b2806cc07aed9b4c665cfd74166b1d9bfc6c0dab25c9906ad230292c091cdc20 | McCLIM/McCLIM | package.lisp | ;;; ---------------------------------------------------------------------------
;;; License: LGPL-2.1+ (See file 'Copyright' for details).
;;; ---------------------------------------------------------------------------
;;;
( c ) copyright 2019 - 2020 Jan Moringen < >
;;;
;;; ---------------------------------------------------------------------------
;;;
Package definition for unit tests of the clouseau system .
;;;
(cl:defpackage #:clouseau.test
(:use
#:cl
#:clouseau
#:fiveam)
(:shadowing-import-from #:clouseau
#:inspect)
(:export
#:run-tests))
(cl:in-package #:clouseau.test)
(def-suite :clouseau)
(defun run-tests ()
(run! :clouseau))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/7c890f1ac79f0c6f36866c47af89398e2f05b343/Apps/Clouseau/test/package.lisp | lisp | ---------------------------------------------------------------------------
License: LGPL-2.1+ (See file 'Copyright' for details).
---------------------------------------------------------------------------
---------------------------------------------------------------------------
| ( c ) copyright 2019 - 2020 Jan Moringen < >
Package definition for unit tests of the clouseau system .
(cl:defpackage #:clouseau.test
(:use
#:cl
#:clouseau
#:fiveam)
(:shadowing-import-from #:clouseau
#:inspect)
(:export
#:run-tests))
(cl:in-package #:clouseau.test)
(def-suite :clouseau)
(defun run-tests ()
(run! :clouseau))
|
620b474afe6138cfdb6e6b770bc485d8a37598d4fca807d95c1c7d95ddab277e | ghc/packages-dph | Int.hs | {-# OPTIONS_HADDOCK hide #-}
# LANGUAGE CPP #
#include "fusion-phases.h"
| PR instance for Ints
module Data.Array.Parallel.PArray.PData.Int () where
import Data.Array.Parallel.PArray.PData.Base
import Data.Typeable as T
import qualified Data.Array.Parallel.Unlifted as U
import qualified Data.Vector as V
import Text.PrettyPrint
import Prelude as P
import Data.Array.Parallel.Pretty
-- PR -------------------------------------------------------------------------
instance PR Int where
# NOINLINE validPR #
validPR _
= True
# NOINLINE nfPR #
nfPR (PInt xx)
= xx `seq` ()
# NOINLINE similarPR #
similarPR = (==)
# NOINLINE coversPR #
coversPR weak (PInt uarr) ix
| weak = ix <= U.length uarr
| otherwise = ix < U.length uarr
# NOINLINE pprpPR #
pprpPR i
= int i
# NOINLINE pprpDataPR #
pprpDataPR (PInt uarr)
= text "PInt" <+> pprp uarr
# NOINLINE typeRepPR #
typeRepPR x = T.typeOf x
# NOINLINE typeRepDataPR #
typeRepDataPR _ = T.typeOf (5 :: Int)
# NOINLINE typeRepDatasPR #
typeRepDatasPR _ = T.typeOf (5 :: Int)
-- Constructors -------------------------------
# INLINE_PDATA emptyPR #
emptyPR
= PInt U.empty
# INLINE_PDATA replicatePR #
replicatePR len x
= PInt (U.replicate len x)
# INLINE_PDATA replicatesPR #
replicatesPR segd (PInt arr)
= PInt (U.replicate_s segd arr)
# INLINE_PDATA appendPR #
appendPR (PInt arr1) (PInt arr2)
= PInt $ arr1 U.+:+ arr2
# INLINE_PDATA appendvsPR #
appendvsPR segdResult segd1 (PInts arr1) segd2 (PInts arr2)
= PInt $ U.append_vs segdResult segd1 arr1 segd2 arr2
-- Projections --------------------------------
# INLINE_PDATA lengthPR #
lengthPR (PInt uarr)
= U.length uarr
# INLINE_PDATA indexPR #
indexPR (PInt uarr) ix
= U.index "indexPR[Int]" uarr ix
# INLINE_PDATA indexsPR #
indexsPR (PInts pvecs) srcixs
= PInt $ U.map (\(src, ix) -> U.unsafeIndex2s pvecs src ix) srcixs
# INLINE_PDATA indexvsPR #
indexvsPR (PInts arrs) vsegd srcixs
= PInt $ U.indexs_avs arrs vsegd srcixs
# INLINE_PDATA extractPR #
extractPR (PInt arr) start len
= PInt $ U.extract arr start len
# INLINE_PDATA extractssPR #
extractssPR (PInts arrs) ssegd
= PInt $ U.extracts_ass ssegd arrs
# INLINE_PDATA extractvsPR #
extractvsPR (PInts arrs) vsegd
= PInt $ U.extracts_avs vsegd arrs
-- Pack and Combine ---------------------------
# NOINLINE packByTagPR #
packByTagPR (PInt arr1) arrTags tag
= PInt $ U.packByTag arr1 arrTags tag
# NOINLINE combine2PR #
combine2PR sel (PInt arr1) (PInt arr2)
= PInt $ U.combine2 (U.tagsSel2 sel)
(U.repSel2 sel)
arr1 arr2
-- Conversions --------------------------------
# NOINLINE fromVectorPR #
fromVectorPR xx
= PInt $U.fromList $ V.toList xx
# NOINLINE toVectorPR #
toVectorPR (PInt arr)
= V.fromList $ U.toList arr
-- PDatas -------------------------------------
# INLINE_PDATA emptydPR #
emptydPR
= PInts $ U.emptys
{-# INLINE_PDATA singletondPR #-}
singletondPR (PInt arr)
= PInts $ U.singletons arr
# INLINE_PDATA lengthdPR #
lengthdPR (PInts arrs)
= U.lengths arrs
# INLINE_PDATA indexdPR #
indexdPR (PInts arrs) ix
= PInt $ arrs `U.unsafeIndexs` ix
# INLINE_PDATA appenddPR #
appenddPR (PInts xs) (PInts ys)
= PInts $ xs `U.appends` ys
# NOINLINE fromVectordPR #
fromVectordPR pdatas
= PInts
$ U.fromVectors
$ V.map (\(PInt vec) -> vec) pdatas
# NOINLINE toVectordPR #
toVectordPR (PInts vec)
= V.map PInt $ U.toVectors vec
-- Show -----------------------------------------------------------------------
deriving instance Show (PData Int)
deriving instance Show (PDatas Int)
instance PprPhysical (U.Array Int) where
pprp uarr
= text (show $ U.toList uarr)
instance PprVirtual (PData Int) where
pprv (PInt vec)
= text (show $ U.toList vec)
| null | https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-lifted-vseg/Data/Array/Parallel/PArray/PData/Int.hs | haskell | # OPTIONS_HADDOCK hide #
PR -------------------------------------------------------------------------
Constructors -------------------------------
Projections --------------------------------
Pack and Combine ---------------------------
Conversions --------------------------------
PDatas -------------------------------------
# INLINE_PDATA singletondPR #
Show ----------------------------------------------------------------------- | # LANGUAGE CPP #
#include "fusion-phases.h"
| PR instance for Ints
module Data.Array.Parallel.PArray.PData.Int () where
import Data.Array.Parallel.PArray.PData.Base
import Data.Typeable as T
import qualified Data.Array.Parallel.Unlifted as U
import qualified Data.Vector as V
import Text.PrettyPrint
import Prelude as P
import Data.Array.Parallel.Pretty
instance PR Int where
# NOINLINE validPR #
validPR _
= True
# NOINLINE nfPR #
nfPR (PInt xx)
= xx `seq` ()
# NOINLINE similarPR #
similarPR = (==)
# NOINLINE coversPR #
coversPR weak (PInt uarr) ix
| weak = ix <= U.length uarr
| otherwise = ix < U.length uarr
# NOINLINE pprpPR #
pprpPR i
= int i
# NOINLINE pprpDataPR #
pprpDataPR (PInt uarr)
= text "PInt" <+> pprp uarr
# NOINLINE typeRepPR #
typeRepPR x = T.typeOf x
# NOINLINE typeRepDataPR #
typeRepDataPR _ = T.typeOf (5 :: Int)
# NOINLINE typeRepDatasPR #
typeRepDatasPR _ = T.typeOf (5 :: Int)
# INLINE_PDATA emptyPR #
emptyPR
= PInt U.empty
# INLINE_PDATA replicatePR #
replicatePR len x
= PInt (U.replicate len x)
# INLINE_PDATA replicatesPR #
replicatesPR segd (PInt arr)
= PInt (U.replicate_s segd arr)
# INLINE_PDATA appendPR #
appendPR (PInt arr1) (PInt arr2)
= PInt $ arr1 U.+:+ arr2
# INLINE_PDATA appendvsPR #
appendvsPR segdResult segd1 (PInts arr1) segd2 (PInts arr2)
= PInt $ U.append_vs segdResult segd1 arr1 segd2 arr2
# INLINE_PDATA lengthPR #
lengthPR (PInt uarr)
= U.length uarr
# INLINE_PDATA indexPR #
indexPR (PInt uarr) ix
= U.index "indexPR[Int]" uarr ix
# INLINE_PDATA indexsPR #
indexsPR (PInts pvecs) srcixs
= PInt $ U.map (\(src, ix) -> U.unsafeIndex2s pvecs src ix) srcixs
# INLINE_PDATA indexvsPR #
indexvsPR (PInts arrs) vsegd srcixs
= PInt $ U.indexs_avs arrs vsegd srcixs
# INLINE_PDATA extractPR #
extractPR (PInt arr) start len
= PInt $ U.extract arr start len
# INLINE_PDATA extractssPR #
extractssPR (PInts arrs) ssegd
= PInt $ U.extracts_ass ssegd arrs
# INLINE_PDATA extractvsPR #
extractvsPR (PInts arrs) vsegd
= PInt $ U.extracts_avs vsegd arrs
# NOINLINE packByTagPR #
packByTagPR (PInt arr1) arrTags tag
= PInt $ U.packByTag arr1 arrTags tag
# NOINLINE combine2PR #
combine2PR sel (PInt arr1) (PInt arr2)
= PInt $ U.combine2 (U.tagsSel2 sel)
(U.repSel2 sel)
arr1 arr2
# NOINLINE fromVectorPR #
fromVectorPR xx
= PInt $U.fromList $ V.toList xx
# NOINLINE toVectorPR #
toVectorPR (PInt arr)
= V.fromList $ U.toList arr
# INLINE_PDATA emptydPR #
emptydPR
= PInts $ U.emptys
singletondPR (PInt arr)
= PInts $ U.singletons arr
# INLINE_PDATA lengthdPR #
lengthdPR (PInts arrs)
= U.lengths arrs
# INLINE_PDATA indexdPR #
indexdPR (PInts arrs) ix
= PInt $ arrs `U.unsafeIndexs` ix
# INLINE_PDATA appenddPR #
appenddPR (PInts xs) (PInts ys)
= PInts $ xs `U.appends` ys
# NOINLINE fromVectordPR #
fromVectordPR pdatas
= PInts
$ U.fromVectors
$ V.map (\(PInt vec) -> vec) pdatas
# NOINLINE toVectordPR #
toVectordPR (PInts vec)
= V.map PInt $ U.toVectors vec
deriving instance Show (PData Int)
deriving instance Show (PDatas Int)
instance PprPhysical (U.Array Int) where
pprp uarr
= text (show $ U.toList uarr)
instance PprVirtual (PData Int) where
pprv (PInt vec)
= text (show $ U.toList vec)
|
76215ffec02d1964e86c7d4a9b15f079d7e135ff838fc3681fafa411c6e162e1 | mzp/coq-ruby | coq_commands.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(* $Id: coq_commands.ml 10994 2008-05-26 16:21:31Z jnarboux $ *)
let commands = [
[(* "Abort"; *)
"Add Abstract Ring A Aplus Amult Aone Azero Ainv Aeq T.";
"Add Abstract Semi Ring A Aplus Amult Aone Azero Aeq T.";
"Add Field";
"Add LoadPath";
"Add ML Path";
"Add Morphism";
"Add Printing If";
"Add Printing Let";
"Add Rec LoadPath";
"Add Rec ML Path";
"Add Ring A Aplus Amult Aone Azero Ainv Aeq T [ c1 ... cn ]. ";
"Add Semi Ring A Aplus Amult Aone Azero Aeq T [ c1 ... cn ].";
"Add Relation";
"Add Setoid";
"Axiom";];
[(* "Back"; *) ];
["Canonical Structure";
"Chapter";
"Coercion";
"Coercion Local";
"CoFixpoint";
"CoInductive";
];
["Declare ML Module";
"Defined.";
"Definition";
"Derive Dependent Inversion";
"Derive Dependent Inversion__clear";
"Derive Inversion";
"Derive Inversion__clear";
];
["End";
"End Silent.";
"Eval";
"Extract Constant";
"Extract Inductive";
"Extraction Inline";
"Extraction Language";
"Extraction NoInline";];
["Fact";
"Fixpoint";
"Focus";];
["Global Variable";
"Goal";
"Grammar";];
["Hint";
"Hint Constructors";
"Hint Extern";
"Hint Immediate";
"Hint Resolve";
"Hint Rewrite";
"Hint Unfold";
"Hypothesis";];
["Identity Coercion";
"Implicit Arguments";
"Inductive";
"Infix";
];
["Lemma";
"Load";
"Load Verbose";
"Local";
"Ltac";
];
["Module";
"Module Type";
"Mutual Inductive";];
["Notation";
"Next Obligation";];
["Opaque";
"Obligations Tactic";];
["Parameter";
"Proof.";
"Program Definition";
"Program Fixpoint";
"Program Lemma";
"Program Theorem";
];
["Qed.";
];
["Read Module";
"Record";
"Remark";
"Remove LoadPath";
"Remove Printing If";
"Remove Printing Let";
"Require";
"Require Export";
"Require Import";
"Reset Extraction Inline";
"Restore State";
];
[ "Save.";
"Scheme";
"Section";
"Set Extraction AutoInline";
"Set Extraction Optimize";
"Set Hyps__limit";
"Set Implicit Arguments";
" Set Printing Coercion " ;
" Set Printing Coercions " ;
" Set Printing Synth " ;
"Set Printing Coercions";
"Set Printing Synth";*)
"Set Printing Wildcard";
"Set Silent.";
"Set Undo";
" Show " ;
" Show Conjectures " ;
" Show Implicits " ;
" Show Intro " ;
" Show Intros " ;
" Show Programs " ;
" Show Proof " ;
" Show Script " ;
" Show Tree " ;
"Show Conjectures";
"Show Implicits";
"Show Intro";
"Show Intros";
"Show Programs";
"Show Proof";
"Show Script";
"Show Tree";*)
"Structure";
(* "Suspend"; *)
"Syntactic Definition";
"Syntax";];
[
"Test Printing If";
"Test Printing Let";
"Test Printing Synth";
"Test Printing Wildcard";
"Theorem";
"Time";
"Transparent";];
[(* "Undo"; *)
"Unfocus";
"Unset Extraction AutoInline";
"Unset Extraction Optimize";
"Unset Hyps__limit";
"Unset Implicit Arguments";
" Unset Printing Coercion " ;
" Unset Printing Coercions " ;
" Unset Printing Synth " ;
"Unset Printing Coercion";
"Unset Printing Coercions";
"Unset Printing Synth"; *)
"Unset Printing Wildcard";
"Unset Silent.";
"Unset Undo";];
["Variable";
"Variables";];
["Write State";];
]
let state_preserving = [
"Check";
"Eval";
"Eval lazy in";
"Eval vm_compute in";
"Eval compute in";
"Extraction";
"Extraction Library";
"Extraction Module";
"Inspect";
"Locate";
"Obligations";
"Print";
"Print All.";
"Print Classes";
"Print Coercion Paths";
"Print Coercions";
"Print Extraction Inline";
"Print Grammar";
"Print Graph";
"Print Hint";
"Print Hint *";
"Print HintDb";
"Print Implicit";
"Print LoadPath";
"Print ML Modules";
"Print ML Path";
"Print Module";
"Print Module Type";
"Print Modules";
"Print Proof";
"Print Rewrite HintDb";
"Print Setoids";
"Print Scope";
"Print Scopes.";
"Print Section";
"Print Table Printing If.";
"Print Table Printing Let.";
"Print Tables.";
"Print Term";
"Print Visibility";
"Pwd.";
"Recursive Extraction";
"Recursive Extraction Library";
"Search";
"SearchAbout";
"SearchPattern";
"SearchRewrite";
"Show";
"Show Conjectures";
"Show Existentials";
"Show Implicits";
"Show Intro";
"Show Intros";
"Show Proof";
"Show Script";
"Show Tree";
"Test Printing If";
"Test Printing Let";
"Test Printing Synth";
"Test Printing Wildcard";
"Whelp Hint";
"Whelp Locate";
]
let tactics =
[
[
"abstract";
"absurd";
"apply";
"apply __ with";
"assert";
"assert (__:__)";
"assert (__:=__)";
"assumption";
"auto";
"auto with";
"autorewrite";
];
[
"case";
"case __ with";
"casetype";
"cbv";
"cbv in";
"change";
"change __ in";
"clear";
"clearbody";
"cofix";
"compare";
"compute";
"compute in";
"congruence";
"constructor";
"constructor __ with";
"contradiction";
"cut";
"cutrewrite";
];
[
"decide equality";
"decompose";
"decompose record";
"decompose sum";
"dependent inversion";
"dependent inversion __ with";
"dependent inversion__clear";
"dependent inversion__clear __ with";
"dependent rewrite ->";
"dependent rewrite <-";
"destruct";
"discriminate";
"do";
"double induction";
];
[
"eapply";
"eauto";
"eauto with";
"eexact";
"elim";
"elim __ using";
"elim __ with";
"elimtype";
"exact";
"exists";
];
[
"fail";
"field";
"first";
"firstorder";
"firstorder using";
"firstorder with";
"fix";
"fix __ with";
"fold";
"fold __ in";
"fourier";
"functional induction";
];
[
"generalize";
"generalize dependent";
];
[
"hnf";
];
[
"idtac";
"induction";
"info";
"injection";
"instantiate (__:=__)";
"intro";
"intro after";
"intro __ after";
"intros";
"intros until";
"intuition";
"inversion";
"inversion __ in";
"inversion __ using";
"inversion __ using __ in";
"inversion__clear";
"inversion__clear __ in";
];
[
"jp <n>";
"jp";
];
[
"lapply";
"lazy";
"lazy in";
"left";
];
[
"move __ after";
];
[
"omega";
];
[
"pattern";
"pose";
"pose __:=__)";
"progress";
];
[
"quote";
];
[
"red";
"red in";
"refine";
"reflexivity";
"rename __ into";
"repeat";
"replace __ with";
"rewrite";
"rewrite __ in";
"rewrite <-";
"rewrite <- __ in";
"right";
"ring";
];
[
"set";
"set (__:=__)";
"setoid__replace";
"setoid__rewrite";
"simpl";
"simpl __ in";
"simple destruct";
"simple induction";
"simple inversion";
"simplify__eq";
"solve";
"split";
" " ;
" split__Rmult " ;
"split__Rmult";
*)
"subst";
"symmetry";
"symmetry in";
];
[
"tauto";
"transitivity";
"trivial";
"try";
];
[
"unfold";
"unfold __ in";
];
]
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/ide/coq_commands.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
$Id: coq_commands.ml 10994 2008-05-26 16:21:31Z jnarboux $
"Abort";
"Back";
"Suspend";
"Undo"; | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let commands = [
"Add Abstract Ring A Aplus Amult Aone Azero Ainv Aeq T.";
"Add Abstract Semi Ring A Aplus Amult Aone Azero Aeq T.";
"Add Field";
"Add LoadPath";
"Add ML Path";
"Add Morphism";
"Add Printing If";
"Add Printing Let";
"Add Rec LoadPath";
"Add Rec ML Path";
"Add Ring A Aplus Amult Aone Azero Ainv Aeq T [ c1 ... cn ]. ";
"Add Semi Ring A Aplus Amult Aone Azero Aeq T [ c1 ... cn ].";
"Add Relation";
"Add Setoid";
"Axiom";];
["Canonical Structure";
"Chapter";
"Coercion";
"Coercion Local";
"CoFixpoint";
"CoInductive";
];
["Declare ML Module";
"Defined.";
"Definition";
"Derive Dependent Inversion";
"Derive Dependent Inversion__clear";
"Derive Inversion";
"Derive Inversion__clear";
];
["End";
"End Silent.";
"Eval";
"Extract Constant";
"Extract Inductive";
"Extraction Inline";
"Extraction Language";
"Extraction NoInline";];
["Fact";
"Fixpoint";
"Focus";];
["Global Variable";
"Goal";
"Grammar";];
["Hint";
"Hint Constructors";
"Hint Extern";
"Hint Immediate";
"Hint Resolve";
"Hint Rewrite";
"Hint Unfold";
"Hypothesis";];
["Identity Coercion";
"Implicit Arguments";
"Inductive";
"Infix";
];
["Lemma";
"Load";
"Load Verbose";
"Local";
"Ltac";
];
["Module";
"Module Type";
"Mutual Inductive";];
["Notation";
"Next Obligation";];
["Opaque";
"Obligations Tactic";];
["Parameter";
"Proof.";
"Program Definition";
"Program Fixpoint";
"Program Lemma";
"Program Theorem";
];
["Qed.";
];
["Read Module";
"Record";
"Remark";
"Remove LoadPath";
"Remove Printing If";
"Remove Printing Let";
"Require";
"Require Export";
"Require Import";
"Reset Extraction Inline";
"Restore State";
];
[ "Save.";
"Scheme";
"Section";
"Set Extraction AutoInline";
"Set Extraction Optimize";
"Set Hyps__limit";
"Set Implicit Arguments";
" Set Printing Coercion " ;
" Set Printing Coercions " ;
" Set Printing Synth " ;
"Set Printing Coercions";
"Set Printing Synth";*)
"Set Printing Wildcard";
"Set Silent.";
"Set Undo";
" Show " ;
" Show Conjectures " ;
" Show Implicits " ;
" Show Intro " ;
" Show Intros " ;
" Show Programs " ;
" Show Proof " ;
" Show Script " ;
" Show Tree " ;
"Show Conjectures";
"Show Implicits";
"Show Intro";
"Show Intros";
"Show Programs";
"Show Proof";
"Show Script";
"Show Tree";*)
"Structure";
"Syntactic Definition";
"Syntax";];
[
"Test Printing If";
"Test Printing Let";
"Test Printing Synth";
"Test Printing Wildcard";
"Theorem";
"Time";
"Transparent";];
"Unfocus";
"Unset Extraction AutoInline";
"Unset Extraction Optimize";
"Unset Hyps__limit";
"Unset Implicit Arguments";
" Unset Printing Coercion " ;
" Unset Printing Coercions " ;
" Unset Printing Synth " ;
"Unset Printing Coercion";
"Unset Printing Coercions";
"Unset Printing Synth"; *)
"Unset Printing Wildcard";
"Unset Silent.";
"Unset Undo";];
["Variable";
"Variables";];
["Write State";];
]
let state_preserving = [
"Check";
"Eval";
"Eval lazy in";
"Eval vm_compute in";
"Eval compute in";
"Extraction";
"Extraction Library";
"Extraction Module";
"Inspect";
"Locate";
"Obligations";
"Print";
"Print All.";
"Print Classes";
"Print Coercion Paths";
"Print Coercions";
"Print Extraction Inline";
"Print Grammar";
"Print Graph";
"Print Hint";
"Print Hint *";
"Print HintDb";
"Print Implicit";
"Print LoadPath";
"Print ML Modules";
"Print ML Path";
"Print Module";
"Print Module Type";
"Print Modules";
"Print Proof";
"Print Rewrite HintDb";
"Print Setoids";
"Print Scope";
"Print Scopes.";
"Print Section";
"Print Table Printing If.";
"Print Table Printing Let.";
"Print Tables.";
"Print Term";
"Print Visibility";
"Pwd.";
"Recursive Extraction";
"Recursive Extraction Library";
"Search";
"SearchAbout";
"SearchPattern";
"SearchRewrite";
"Show";
"Show Conjectures";
"Show Existentials";
"Show Implicits";
"Show Intro";
"Show Intros";
"Show Proof";
"Show Script";
"Show Tree";
"Test Printing If";
"Test Printing Let";
"Test Printing Synth";
"Test Printing Wildcard";
"Whelp Hint";
"Whelp Locate";
]
let tactics =
[
[
"abstract";
"absurd";
"apply";
"apply __ with";
"assert";
"assert (__:__)";
"assert (__:=__)";
"assumption";
"auto";
"auto with";
"autorewrite";
];
[
"case";
"case __ with";
"casetype";
"cbv";
"cbv in";
"change";
"change __ in";
"clear";
"clearbody";
"cofix";
"compare";
"compute";
"compute in";
"congruence";
"constructor";
"constructor __ with";
"contradiction";
"cut";
"cutrewrite";
];
[
"decide equality";
"decompose";
"decompose record";
"decompose sum";
"dependent inversion";
"dependent inversion __ with";
"dependent inversion__clear";
"dependent inversion__clear __ with";
"dependent rewrite ->";
"dependent rewrite <-";
"destruct";
"discriminate";
"do";
"double induction";
];
[
"eapply";
"eauto";
"eauto with";
"eexact";
"elim";
"elim __ using";
"elim __ with";
"elimtype";
"exact";
"exists";
];
[
"fail";
"field";
"first";
"firstorder";
"firstorder using";
"firstorder with";
"fix";
"fix __ with";
"fold";
"fold __ in";
"fourier";
"functional induction";
];
[
"generalize";
"generalize dependent";
];
[
"hnf";
];
[
"idtac";
"induction";
"info";
"injection";
"instantiate (__:=__)";
"intro";
"intro after";
"intro __ after";
"intros";
"intros until";
"intuition";
"inversion";
"inversion __ in";
"inversion __ using";
"inversion __ using __ in";
"inversion__clear";
"inversion__clear __ in";
];
[
"jp <n>";
"jp";
];
[
"lapply";
"lazy";
"lazy in";
"left";
];
[
"move __ after";
];
[
"omega";
];
[
"pattern";
"pose";
"pose __:=__)";
"progress";
];
[
"quote";
];
[
"red";
"red in";
"refine";
"reflexivity";
"rename __ into";
"repeat";
"replace __ with";
"rewrite";
"rewrite __ in";
"rewrite <-";
"rewrite <- __ in";
"right";
"ring";
];
[
"set";
"set (__:=__)";
"setoid__replace";
"setoid__rewrite";
"simpl";
"simpl __ in";
"simple destruct";
"simple induction";
"simple inversion";
"simplify__eq";
"solve";
"split";
" " ;
" split__Rmult " ;
"split__Rmult";
*)
"subst";
"symmetry";
"symmetry in";
];
[
"tauto";
"transitivity";
"trivial";
"try";
];
[
"unfold";
"unfold __ in";
];
]
|
8b22ef617380b800286725bca5d15f48bae79fcac56c43d5ece0c0cdbfc61111 | ocurrent/ocaml-ci | gitlab.ml | include Git_forge.Make (struct
let prefix = "gitlab"
let request_abbrev = "MR"
let request_prefix = "merge-request"
let org_url ~org = Printf.sprintf "" org
let repo_url ~org ~repo = Printf.sprintf "" org repo
let commit_url ~org ~repo ~hash =
Printf.sprintf "/-/commit/%s" org repo hash
let branch_url ~org ~repo ref =
Fmt.str "/-/tree/%s" org repo ref
let request_url ~org ~repo id =
Fmt.str "/-/merge_requests/%s" org repo id
let parse_ref r =
match Astring.String.cuts ~sep:"/" r with
| "refs" :: "heads" :: branch ->
let branch = Astring.String.concat ~sep:"/" branch in
`Branch branch
| [ "refs"; "merge-requests"; id; "head" ] ->
let id = int_of_string id in
`Request id
| _ -> `Unknown r
end)
| null | https://raw.githubusercontent.com/ocurrent/ocaml-ci/0d4e392299e6039f19edb4556eb65129936e1940/web-ui/view/gitlab.ml | ocaml | include Git_forge.Make (struct
let prefix = "gitlab"
let request_abbrev = "MR"
let request_prefix = "merge-request"
let org_url ~org = Printf.sprintf "" org
let repo_url ~org ~repo = Printf.sprintf "" org repo
let commit_url ~org ~repo ~hash =
Printf.sprintf "/-/commit/%s" org repo hash
let branch_url ~org ~repo ref =
Fmt.str "/-/tree/%s" org repo ref
let request_url ~org ~repo id =
Fmt.str "/-/merge_requests/%s" org repo id
let parse_ref r =
match Astring.String.cuts ~sep:"/" r with
| "refs" :: "heads" :: branch ->
let branch = Astring.String.concat ~sep:"/" branch in
`Branch branch
| [ "refs"; "merge-requests"; id; "head" ] ->
let id = int_of_string id in
`Request id
| _ -> `Unknown r
end)
| |
e9ccd57d62ea2a3d95d5c7977601715ea6cd7d680fa81b8b594cdc14ff8d58a1 | realworldocaml/book | trimmer.ml | open Stdune
open Dune_cache_storage
module Trimming_result = struct
type t = { trimmed_bytes : int64 }
let empty = { trimmed_bytes = 0L }
CR - someday amokhov : Right now Dune does n't support large ( > 1Gb ) files on
32 - bit platforms due to the pervasive use of [ int ] for representing
individual file sizes . It 's not fundamentally difficult to switch to
[ int64 ] , so we should do it if it becomes a real issue .
32-bit platforms due to the pervasive use of [int] for representing
individual file sizes. It's not fundamentally difficult to switch to
[int64], so we should do it if it becomes a real issue. *)
let add t ~(bytes : int) =
{ trimmed_bytes = Int64.add t.trimmed_bytes (Int64.of_int bytes) }
end
let trim_broken_metadata_entries ~trimmed_so_far =
List.fold_left Version.Metadata.all ~init:trimmed_so_far
~f:(fun trimmed_so_far version ->
let metadata_entries = Layout.Versioned.list_metadata_entries version in
let file_path =
Layout.Versioned.file_path (Version.Metadata.file_version version)
in
List.fold_left metadata_entries ~init:trimmed_so_far
~f:(fun trimmed_so_far (path, rule_or_action_digest) ->
let should_be_removed =
match Metadata.Versioned.restore version ~rule_or_action_digest with
| Not_found_in_cache ->
(* A concurrent process must have removed this metadata file. No
need to try removing such "phantom" metadata files again. *)
false
| Error _exn ->
(* If a metadata file can't be restored, let's trim it. *)
true
| Restored metadata -> (
match metadata with
| Metadata.Value _ ->
(* We do not expect to see any value entries in the cache. Let's
keep them untrimmed for now. *)
false
| Metadata.Artifacts { entries; _ } ->
List.exists entries
~f:(fun { Artifacts.Metadata_entry.file_digest; _ } ->
let reference = file_path ~file_digest in
not (Path.exists reference)))
in
match should_be_removed with
| true -> (
match Path.stat path with
| Ok stats ->
let bytes = stats.st_size in
(* If another process deletes [path] and the [unlink_no_err] below
is a no-op, we take the credit and increase
[trimmed_so_far]. *)
Path.unlink_no_err path;
Trimming_result.add trimmed_so_far ~bytes
| Error _ ->
(* Alas, here we can't take any (non-zero) credit, since we don't
know the size of the deleted file. *)
trimmed_so_far)
| false -> trimmed_so_far))
let garbage_collect () =
trim_broken_metadata_entries ~trimmed_so_far:Trimming_result.empty
let files_in_cache_for_all_supported_versions () =
List.concat_map Version.File.all ~f:(fun file_version ->
Layout.Versioned.list_file_entries file_version)
(* We call a cached file "unused" if there are currently no hard links to it
from build directories. Note that [st_nlink] can return 0 if the file has
been removed since we scanned the tree -- in this case we do not want to
claim that its removal is the result of cache trimming and we, therefore,
skip it while trimming. *)
let file_exists_and_is_unused ~stats = stats.Unix.st_nlink = 1
Dune uses [ ctime ] to prioritise entries for deletion . How does this work ?
- In the [ Hardlink ] mode , an entry to become unused when it loses the last
hard link that points to it from a build directory . When this happens , the
entry 's [ ctime ] is modified . This means that the trimmer will start deleting
entries starting from the one that became unused first .
- In the [ Copy ] mode , all entries have hard link count of 1 , and so they all
appear to be unused to the trimmer . However , copying an entry to the cache ,
as well as copying it from the cache to a build directory , both change the
entry 's [ ctime ] . This means that the trimmer will start deleting entries
starting from the one that was least recently created or used .
- In the [Hardlink] mode, an entry to become unused when it loses the last
hard link that points to it from a build directory. When this happens, the
entry's [ctime] is modified. This means that the trimmer will start deleting
entries starting from the one that became unused first.
- In the [Copy] mode, all entries have hard link count of 1, and so they all
appear to be unused to the trimmer. However, copying an entry to the cache,
as well as copying it from the cache to a build directory, both change the
entry's [ctime]. This means that the trimmer will start deleting entries
starting from the one that was least recently created or used. *)
let trim ~goal =
let files = files_in_cache_for_all_supported_versions () |> List.map ~f:fst in
let files =
List.sort
~compare:(fun (_, _, ctime1) (_, _, ctime2) ->
Float.compare ctime1 ctime2)
(List.filter_map files ~f:(fun path ->
match Path.stat path with
| Ok stats ->
if file_exists_and_is_unused ~stats then
Some (path, stats.st_size, stats.st_ctime)
else None
| Error _ -> None))
in
let delete (trimmed_so_far : Trimming_result.t) (path, bytes, _) =
if trimmed_so_far.trimmed_bytes >= goal then trimmed_so_far
else (
Path.unlink_no_err path;
(* CR-someday amokhov: We should really be using block_size * #blocks
because that's how much we save actually. *)
Trimming_result.add trimmed_so_far ~bytes)
in
let trimmed_so_far =
List.fold_left ~init:Trimming_result.empty ~f:delete files
in
trim_broken_metadata_entries ~trimmed_so_far
let overhead_size () =
let files = files_in_cache_for_all_supported_versions () |> List.map ~f:fst in
let stats =
let f p =
try
let stats = Path.stat_exn p in
if file_exists_and_is_unused ~stats then Int64.of_int stats.st_size
else 0L
with Unix.Unix_error (Unix.ENOENT, _, _) -> 0L
in
List.map ~f files
in
List.fold_left ~f:Int64.add ~init:0L stats
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/dune_/src/dune_cache/trimmer.ml | ocaml | A concurrent process must have removed this metadata file. No
need to try removing such "phantom" metadata files again.
If a metadata file can't be restored, let's trim it.
We do not expect to see any value entries in the cache. Let's
keep them untrimmed for now.
If another process deletes [path] and the [unlink_no_err] below
is a no-op, we take the credit and increase
[trimmed_so_far].
Alas, here we can't take any (non-zero) credit, since we don't
know the size of the deleted file.
We call a cached file "unused" if there are currently no hard links to it
from build directories. Note that [st_nlink] can return 0 if the file has
been removed since we scanned the tree -- in this case we do not want to
claim that its removal is the result of cache trimming and we, therefore,
skip it while trimming.
CR-someday amokhov: We should really be using block_size * #blocks
because that's how much we save actually. | open Stdune
open Dune_cache_storage
module Trimming_result = struct
type t = { trimmed_bytes : int64 }
let empty = { trimmed_bytes = 0L }
CR - someday amokhov : Right now Dune does n't support large ( > 1Gb ) files on
32 - bit platforms due to the pervasive use of [ int ] for representing
individual file sizes . It 's not fundamentally difficult to switch to
[ int64 ] , so we should do it if it becomes a real issue .
32-bit platforms due to the pervasive use of [int] for representing
individual file sizes. It's not fundamentally difficult to switch to
[int64], so we should do it if it becomes a real issue. *)
let add t ~(bytes : int) =
{ trimmed_bytes = Int64.add t.trimmed_bytes (Int64.of_int bytes) }
end
let trim_broken_metadata_entries ~trimmed_so_far =
List.fold_left Version.Metadata.all ~init:trimmed_so_far
~f:(fun trimmed_so_far version ->
let metadata_entries = Layout.Versioned.list_metadata_entries version in
let file_path =
Layout.Versioned.file_path (Version.Metadata.file_version version)
in
List.fold_left metadata_entries ~init:trimmed_so_far
~f:(fun trimmed_so_far (path, rule_or_action_digest) ->
let should_be_removed =
match Metadata.Versioned.restore version ~rule_or_action_digest with
| Not_found_in_cache ->
false
| Error _exn ->
true
| Restored metadata -> (
match metadata with
| Metadata.Value _ ->
false
| Metadata.Artifacts { entries; _ } ->
List.exists entries
~f:(fun { Artifacts.Metadata_entry.file_digest; _ } ->
let reference = file_path ~file_digest in
not (Path.exists reference)))
in
match should_be_removed with
| true -> (
match Path.stat path with
| Ok stats ->
let bytes = stats.st_size in
Path.unlink_no_err path;
Trimming_result.add trimmed_so_far ~bytes
| Error _ ->
trimmed_so_far)
| false -> trimmed_so_far))
let garbage_collect () =
trim_broken_metadata_entries ~trimmed_so_far:Trimming_result.empty
let files_in_cache_for_all_supported_versions () =
List.concat_map Version.File.all ~f:(fun file_version ->
Layout.Versioned.list_file_entries file_version)
let file_exists_and_is_unused ~stats = stats.Unix.st_nlink = 1
Dune uses [ ctime ] to prioritise entries for deletion . How does this work ?
- In the [ Hardlink ] mode , an entry to become unused when it loses the last
hard link that points to it from a build directory . When this happens , the
entry 's [ ctime ] is modified . This means that the trimmer will start deleting
entries starting from the one that became unused first .
- In the [ Copy ] mode , all entries have hard link count of 1 , and so they all
appear to be unused to the trimmer . However , copying an entry to the cache ,
as well as copying it from the cache to a build directory , both change the
entry 's [ ctime ] . This means that the trimmer will start deleting entries
starting from the one that was least recently created or used .
- In the [Hardlink] mode, an entry to become unused when it loses the last
hard link that points to it from a build directory. When this happens, the
entry's [ctime] is modified. This means that the trimmer will start deleting
entries starting from the one that became unused first.
- In the [Copy] mode, all entries have hard link count of 1, and so they all
appear to be unused to the trimmer. However, copying an entry to the cache,
as well as copying it from the cache to a build directory, both change the
entry's [ctime]. This means that the trimmer will start deleting entries
starting from the one that was least recently created or used. *)
let trim ~goal =
let files = files_in_cache_for_all_supported_versions () |> List.map ~f:fst in
let files =
List.sort
~compare:(fun (_, _, ctime1) (_, _, ctime2) ->
Float.compare ctime1 ctime2)
(List.filter_map files ~f:(fun path ->
match Path.stat path with
| Ok stats ->
if file_exists_and_is_unused ~stats then
Some (path, stats.st_size, stats.st_ctime)
else None
| Error _ -> None))
in
let delete (trimmed_so_far : Trimming_result.t) (path, bytes, _) =
if trimmed_so_far.trimmed_bytes >= goal then trimmed_so_far
else (
Path.unlink_no_err path;
Trimming_result.add trimmed_so_far ~bytes)
in
let trimmed_so_far =
List.fold_left ~init:Trimming_result.empty ~f:delete files
in
trim_broken_metadata_entries ~trimmed_so_far
let overhead_size () =
let files = files_in_cache_for_all_supported_versions () |> List.map ~f:fst in
let stats =
let f p =
try
let stats = Path.stat_exn p in
if file_exists_and_is_unused ~stats then Int64.of_int stats.st_size
else 0L
with Unix.Unix_error (Unix.ENOENT, _, _) -> 0L
in
List.map ~f files
in
List.fold_left ~f:Int64.add ~init:0L stats
|
5a09a7ca3f6901f90930933c6f1b6fc147344604dbd7020bb7f3010c1c27d4b5 | namin/biohacker | tnst.lisp | ;; -*- Mode: Lisp; -*-
A simple domain theory for TGizmo
Last Edited : 1/29/93 , by KDF
Copyright ( c ) 1991 - 1993 , , Northwestern University ,
and , the Xerox Corporation .
;;; All rights reserved.
;;; See the file legal.txt for a paragraph stating scope of permission
;;; and disclaimer of warranty. The above copyright notice and that
;;; paragraph must be included in any separate copy of this file.
(in-package :COMMON-LISP-USER)
(defentity (Container ?can)
(quantity (pressure ?can))) ;; at bottom
(defentity (fluid-path ?path))
(defentity (heat-path ?path))
(defentity (Physob ?phob)
(quantity (heat ?phob))
(quantity (temperature ?phob))
(> (A (heat ?phob)) ZERO)
(> (A (temperature ?phob)) ZERO)
(qprop (temperature ?phob) (heat ?phob)))
(defentity (Temperature-Source ?phob)
(quantity (heat ?phob))
(quantity (temperature ?phob))
(> (A (heat ?phob)) ZERO)
(> (A (temperature ?phob)) ZERO))
(defrule Contained-Stuff-Existence
((Container ?can)(Phase ?st)(Substance ?sub))
;; Assume that every kind of substance can exist in
;; in every phase inside every container.
(quantity ((amount-of ?sub ?st) ?can))
(>= (A ((amount-of ?sub ?st) ?can)) ZERO))
(defview (Contained-Stuff (C-S ?sub ?st ?can))
:INDIVIDUALS ((?can (container ?can)
(substance ?sub)
(phase ?st)))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub ?st) ?can)) ZERO))
:RELATIONS ((Only-During (Exists (C-S ?sub ?st ?can)))
(Contained-stuff (C-S ?sub ?st ?can))
(quantity (TBoil (C-S ?sub ?st ?can)))
(> (A (TBoil (C-S ?sub ?st ?can))) ZERO)))
(defentity (Contained-Stuff (C-S ?sub liquid ?can))
(Contained-Liquid (C-S ?sub liquid ?can)))
(defentity (Contained-Liquid (C-S ?sub liquid ?can))
(physob (C-S ?sub liquid ?can))
(quantity (level (C-S ?sub liquid ?can)))
(qprop (level (C-S ?sub liquid ?can))
((Amount-of ?sub liquid) ?can))
(qprop (pressure ?can) (level (C-S ?sub liquid ?can))))
(defentity (Contained-Stuff (C-S ?sub gas ?can))
(Contained-gas (C-S ?sub gas ?can)))
(defentity (Contained-Gas (C-S ?sub gas ?can))
(physob (C-S ?sub gas ?can))
(qprop (pressure ?can)
(temperature (C-S ?sub gas ?can)))
(qprop (pressure ?can)
((amount-of ?sub gas) ?can)))
;;;; Flow processes
(defprocess (heat-flow ?src ?path ?dst)
:INDIVIDUALS ((?src (Quantity (heat ?src)))
(?path (Heat-Connection ?path ?src ?dst))
(?dst (Quantity (heat ?dst))))
:PRECONDITIONS ((Heat-Aligned ?path))
:QUANTITY-CONDITIONS ((> (A (temperature ?src))
(A (temperature ?dst))))
:RELATIONS ((Quantity (flow-rate ?self))
(> (A (flow-rate ?self)) zero)
(Qprop (flow-rate ?self) (temperature ?src))
(Qprop- (flow-rate ?self) (temperature ?dst)))
:INFLUENCES ((I- (heat ?src) (flow-rate ?self))
(I+ (heat ?dst) (flow-rate ?self))))
(defprocess (fluid-flow (C-S ?sub ?st ?src) ?path ?dst)
:INDIVIDUALS ((?src (container ?src)
(substance ?sub) (phase ?st))
(?path (fluid-Connection ?path ?src ?dst))
(?dst (container ?dst)))
:PRECONDITIONS ((Aligned ?path))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub ?st) ?src)) ZERO)
( > ( A ( ( amount - of ? sub ? st ) ? dst ) ) ZERO ) ; simplification
(> (A (pressure ?src)) (A (pressure ?dst))))
:RELATIONS ((Quantity (flow-rate ?self))
(> (A (flow-rate ?self)) zero)
(Qprop (flow-rate ?self) (pressure ?src))
(Qprop- (flow-rate ?self) (pressure ?dst)))
:INFLUENCES ((I- ((amount-of ?sub ?st) ?src) (flow-rate ?self))
(I+ ((amount-of ?sub ?st) ?dst) (flow-rate ?self))))
;;;; Phase changes
(defprocess (boiling (C-S ?sub liquid ?can)
(heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can)))
:INDIVIDUALS ((?sub (substance ?sub))
(?can (container ?can)
(Contained-Liquid (C-S ?sub liquid ?can)))
(?hpath (heat-path ?hpath))
(?ht-src (heat-connection ?hpath ?ht-src (C-S ?sub liquid ?can))))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub liquid) ?can)) zero)
(Active (heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can)))
(>= (A (temperature (C-S ?sub liquid ?can)))
(A (tboil (C-S ?sub liquid ?can)))))
:RELATIONS ((quantity (generation-rate ?self))
(:IMPLIES (Exists (C-S ?sub gas ?can))
(= (A (temperature (C-S ?sub gas ?can)))
(A (temperature (C-S ?sub liquid ?can)))))
(> (A (generation-rate ?self)) zero))
:INFLUENCES ((I+ ((amount-of ?sub gas) ?can) (generation-rate ?self))
(I- ((amount-of ?sub liquid) ?can) (generation-rate ?self))
(I- (heat (C-S ?sub liquid ?can))
(flow-rate (heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can))))))
| null | https://raw.githubusercontent.com/namin/biohacker/6b5da4c51c9caa6b5e1a68b046af171708d1af64/BPS/tgizmo/tnst.lisp | lisp | -*- Mode: Lisp; -*-
All rights reserved.
See the file legal.txt for a paragraph stating scope of permission
and disclaimer of warranty. The above copyright notice and that
paragraph must be included in any separate copy of this file.
at bottom
Assume that every kind of substance can exist in
in every phase inside every container.
Flow processes
simplification
Phase changes |
A simple domain theory for TGizmo
Last Edited : 1/29/93 , by KDF
Copyright ( c ) 1991 - 1993 , , Northwestern University ,
and , the Xerox Corporation .
(in-package :COMMON-LISP-USER)
(defentity (Container ?can)
(defentity (fluid-path ?path))
(defentity (heat-path ?path))
(defentity (Physob ?phob)
(quantity (heat ?phob))
(quantity (temperature ?phob))
(> (A (heat ?phob)) ZERO)
(> (A (temperature ?phob)) ZERO)
(qprop (temperature ?phob) (heat ?phob)))
(defentity (Temperature-Source ?phob)
(quantity (heat ?phob))
(quantity (temperature ?phob))
(> (A (heat ?phob)) ZERO)
(> (A (temperature ?phob)) ZERO))
(defrule Contained-Stuff-Existence
((Container ?can)(Phase ?st)(Substance ?sub))
(quantity ((amount-of ?sub ?st) ?can))
(>= (A ((amount-of ?sub ?st) ?can)) ZERO))
(defview (Contained-Stuff (C-S ?sub ?st ?can))
:INDIVIDUALS ((?can (container ?can)
(substance ?sub)
(phase ?st)))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub ?st) ?can)) ZERO))
:RELATIONS ((Only-During (Exists (C-S ?sub ?st ?can)))
(Contained-stuff (C-S ?sub ?st ?can))
(quantity (TBoil (C-S ?sub ?st ?can)))
(> (A (TBoil (C-S ?sub ?st ?can))) ZERO)))
(defentity (Contained-Stuff (C-S ?sub liquid ?can))
(Contained-Liquid (C-S ?sub liquid ?can)))
(defentity (Contained-Liquid (C-S ?sub liquid ?can))
(physob (C-S ?sub liquid ?can))
(quantity (level (C-S ?sub liquid ?can)))
(qprop (level (C-S ?sub liquid ?can))
((Amount-of ?sub liquid) ?can))
(qprop (pressure ?can) (level (C-S ?sub liquid ?can))))
(defentity (Contained-Stuff (C-S ?sub gas ?can))
(Contained-gas (C-S ?sub gas ?can)))
(defentity (Contained-Gas (C-S ?sub gas ?can))
(physob (C-S ?sub gas ?can))
(qprop (pressure ?can)
(temperature (C-S ?sub gas ?can)))
(qprop (pressure ?can)
((amount-of ?sub gas) ?can)))
(defprocess (heat-flow ?src ?path ?dst)
:INDIVIDUALS ((?src (Quantity (heat ?src)))
(?path (Heat-Connection ?path ?src ?dst))
(?dst (Quantity (heat ?dst))))
:PRECONDITIONS ((Heat-Aligned ?path))
:QUANTITY-CONDITIONS ((> (A (temperature ?src))
(A (temperature ?dst))))
:RELATIONS ((Quantity (flow-rate ?self))
(> (A (flow-rate ?self)) zero)
(Qprop (flow-rate ?self) (temperature ?src))
(Qprop- (flow-rate ?self) (temperature ?dst)))
:INFLUENCES ((I- (heat ?src) (flow-rate ?self))
(I+ (heat ?dst) (flow-rate ?self))))
(defprocess (fluid-flow (C-S ?sub ?st ?src) ?path ?dst)
:INDIVIDUALS ((?src (container ?src)
(substance ?sub) (phase ?st))
(?path (fluid-Connection ?path ?src ?dst))
(?dst (container ?dst)))
:PRECONDITIONS ((Aligned ?path))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub ?st) ?src)) ZERO)
(> (A (pressure ?src)) (A (pressure ?dst))))
:RELATIONS ((Quantity (flow-rate ?self))
(> (A (flow-rate ?self)) zero)
(Qprop (flow-rate ?self) (pressure ?src))
(Qprop- (flow-rate ?self) (pressure ?dst)))
:INFLUENCES ((I- ((amount-of ?sub ?st) ?src) (flow-rate ?self))
(I+ ((amount-of ?sub ?st) ?dst) (flow-rate ?self))))
(defprocess (boiling (C-S ?sub liquid ?can)
(heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can)))
:INDIVIDUALS ((?sub (substance ?sub))
(?can (container ?can)
(Contained-Liquid (C-S ?sub liquid ?can)))
(?hpath (heat-path ?hpath))
(?ht-src (heat-connection ?hpath ?ht-src (C-S ?sub liquid ?can))))
:QUANTITY-CONDITIONS ((> (A ((amount-of ?sub liquid) ?can)) zero)
(Active (heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can)))
(>= (A (temperature (C-S ?sub liquid ?can)))
(A (tboil (C-S ?sub liquid ?can)))))
:RELATIONS ((quantity (generation-rate ?self))
(:IMPLIES (Exists (C-S ?sub gas ?can))
(= (A (temperature (C-S ?sub gas ?can)))
(A (temperature (C-S ?sub liquid ?can)))))
(> (A (generation-rate ?self)) zero))
:INFLUENCES ((I+ ((amount-of ?sub gas) ?can) (generation-rate ?self))
(I- ((amount-of ?sub liquid) ?can) (generation-rate ?self))
(I- (heat (C-S ?sub liquid ?can))
(flow-rate (heat-flow ?ht-src ?hpath (C-S ?sub liquid ?can))))))
|
987f2fe7cd817627c69d7e41646ffddd8355d7ca0519528ab73395e5901e03f3 | huangz1990/real-world-haskell-cn | AltSupply.hs | file : ch15 / AltSupply.hs
unwrapS :: Supply s a -> State [s] a
unwrapS (S s) = s
instance Monad (Supply s) where
s >>= m = S (unwrapS s >>= unwrapS . m)
return = S . return
| null | https://raw.githubusercontent.com/huangz1990/real-world-haskell-cn/f67b07dd846b1950d17ff941d650089fcbbe9586/code/ch15/AltSupply.hs | haskell | file : ch15 / AltSupply.hs
unwrapS :: Supply s a -> State [s] a
unwrapS (S s) = s
instance Monad (Supply s) where
s >>= m = S (unwrapS s >>= unwrapS . m)
return = S . return
| |
f4e5557bc469602287eaaa5bae6c402f796414871d6b24e0024ace2cea5bd56c | roddyyaga/ocoi | jwt_utils.ml | open Base
let make_token ~jwk claims =
let header = Jose.Header.make_header ~typ:"JWT" jwk in
let payload =
let open Jose.Jwt in
List.fold ~init:empty_payload
~f:(fun payload (key, value) -> payload |> add_claim key (`String value))
claims
in
Jose.Jwt.sign ~header ~payload jwk |> function
| Ok t -> t
| Error (`Msg m) -> failwith m
let make_and_encode ~jwk claims = make_token ~jwk claims |> Jose.Jwt.to_string
let verify_and_decode ~jwk token_string =
let open Result.Monad_infix in
Jose.Jwt.of_string token_string >>= Jose.Jwt.validate ~jwk >>| fun token ->
token.payload
let get_claim claim payload =
Yojson.Safe.Util.(payload |> member claim |> to_string_option)
| null | https://raw.githubusercontent.com/roddyyaga/ocoi/0a07e9457add9890cb507ac8bb4e55044d86a640/ocoi/lib/handlers/jwt_utils.ml | ocaml | open Base
let make_token ~jwk claims =
let header = Jose.Header.make_header ~typ:"JWT" jwk in
let payload =
let open Jose.Jwt in
List.fold ~init:empty_payload
~f:(fun payload (key, value) -> payload |> add_claim key (`String value))
claims
in
Jose.Jwt.sign ~header ~payload jwk |> function
| Ok t -> t
| Error (`Msg m) -> failwith m
let make_and_encode ~jwk claims = make_token ~jwk claims |> Jose.Jwt.to_string
let verify_and_decode ~jwk token_string =
let open Result.Monad_infix in
Jose.Jwt.of_string token_string >>= Jose.Jwt.validate ~jwk >>| fun token ->
token.payload
let get_claim claim payload =
Yojson.Safe.Util.(payload |> member claim |> to_string_option)
| |
80ce783d6867119bb1676ef2c09d0bb177c7062be2466e95e29e4bde1e5bd402 | syntax-objects/syntax-parse-example | syntax-class-contract-test.rkt | #lang racket/base
(module+ test
(require rackunit
syntax/macro-testing
(for-syntax
racket/base
racket/contract
(only-in syntax/parse str)
(only-in syntax/parse/experimental/reflect reify-syntax-class)
syntax-parse-example/syntax-class-contract/syntax-class-contract))
(test-case "str"
(define-syntax add-hello
(contract
(-> (syntax-class-contract (reify-syntax-class str))
syntax?)
(lambda (stx)
(let ([orig-str (syntax-e (cadr (syntax-e stx)))])
(with-syntax ([new-str (string-append "hello" " " orig-str)])
#'new-str)))
'this-macro
'the-macro-user))
(check-equal? (add-hello "world") "hello world")
(check-exn exn:fail?
(lambda () (convert-compile-time-error (add-hello 42)))))
)
| null | https://raw.githubusercontent.com/syntax-objects/syntax-parse-example/0675ce0717369afcde284202ec7df661d7af35aa/syntax-class-contract/syntax-class-contract-test.rkt | racket | #lang racket/base
(module+ test
(require rackunit
syntax/macro-testing
(for-syntax
racket/base
racket/contract
(only-in syntax/parse str)
(only-in syntax/parse/experimental/reflect reify-syntax-class)
syntax-parse-example/syntax-class-contract/syntax-class-contract))
(test-case "str"
(define-syntax add-hello
(contract
(-> (syntax-class-contract (reify-syntax-class str))
syntax?)
(lambda (stx)
(let ([orig-str (syntax-e (cadr (syntax-e stx)))])
(with-syntax ([new-str (string-append "hello" " " orig-str)])
#'new-str)))
'this-macro
'the-macro-user))
(check-equal? (add-hello "world") "hello world")
(check-exn exn:fail?
(lambda () (convert-compile-time-error (add-hello 42)))))
)
| |
b3a34074e09a519e24c12e80b3f815d83a0ec434f4ad153a3acc05676a35720d | yi-editor/yi | Rectangle.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Rectangle
-- License : GPL-2
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- emacs-style rectangle manipulation functions.
module Yi.Rectangle where
import Control.Monad (forM_)
import Data.List (sort, transpose)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text, concat, justifyLeft, length)
import Yi.Buffer
import Yi.Editor (EditorM, getRegE, setRegE, withCurrentBuffer)
import qualified Yi.Rope as R
import Yi.String (lines', mapLines, unlines')
-- | Get the selected region as a rectangle.
-- Returns the region extended to lines, plus the start and end columns of the rectangle.
getRectangle :: BufferM (Region, Int, Int)
getRectangle = do
r <- getSelectRegionB
extR <- unitWiseRegion Line r
[lowCol,highCol] <- sort <$> mapM colOf [regionStart r, regionEnd r]
return (extR, lowCol, highCol)
-- | Split text at the boundaries given
multiSplit :: [Int] -> R.YiString -> [R.YiString]
multiSplit [] l = [l]
multiSplit (x:xs) l = left : multiSplit (fmap (subtract x) xs) right
where (left, right) = R.splitAt x l
onRectangle :: (Int -> Int -> R.YiString -> R.YiString) -> BufferM ()
onRectangle f = do
(reg, l, r) <- getRectangle
modifyRegionB (mapLines (f l r)) reg
openRectangle :: BufferM ()
openRectangle = onRectangle openLine
where
openLine l r line =
left <> R.replicateChar (r - l) ' ' <> right
where (left, right) = R.splitAt l line
stringRectangle :: R.YiString -> BufferM ()
stringRectangle inserted = onRectangle stringLine
where stringLine l r line = left <> inserted <> right
where [left,_,right] = multiSplit [l,r] line
killRectangle :: EditorM ()
killRectangle = do
cutted <- withCurrentBuffer $ do
(reg, l, r) <- getRectangle
text <- readRegionB reg
let (cutted, rest) = unzip $ fmap cut $ R.lines' text
cut :: R.YiString -> (R.YiString, R.YiString)
cut line = let [left,mid,right] = multiSplit [l,r] line
in (mid, left <> right)
replaceRegionB reg (R.unlines rest)
return cutted
setRegE (R.unlines cutted)
yankRectangle :: EditorM ()
yankRectangle = do
text <- R.lines' <$> getRegE
withCurrentBuffer $ forM_ text $ \t -> do
savingPointB $ insertN t
lineDown
| null | https://raw.githubusercontent.com/yi-editor/yi/58c239e3a77cef8f4f77e94677bd6a295f585f5f/yi-core/src/Yi/Rectangle.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE PackageImports #
# OPTIONS_HADDOCK show-extensions #
|
Module : Yi.Rectangle
License : GPL-2
Maintainer :
Stability : experimental
Portability : portable
emacs-style rectangle manipulation functions.
| Get the selected region as a rectangle.
Returns the region extended to lines, plus the start and end columns of the rectangle.
| Split text at the boundaries given |
module Yi.Rectangle where
import Control.Monad (forM_)
import Data.List (sort, transpose)
import Data.Monoid ((<>))
import qualified Data.Text as T (Text, concat, justifyLeft, length)
import Yi.Buffer
import Yi.Editor (EditorM, getRegE, setRegE, withCurrentBuffer)
import qualified Yi.Rope as R
import Yi.String (lines', mapLines, unlines')
getRectangle :: BufferM (Region, Int, Int)
getRectangle = do
r <- getSelectRegionB
extR <- unitWiseRegion Line r
[lowCol,highCol] <- sort <$> mapM colOf [regionStart r, regionEnd r]
return (extR, lowCol, highCol)
multiSplit :: [Int] -> R.YiString -> [R.YiString]
multiSplit [] l = [l]
multiSplit (x:xs) l = left : multiSplit (fmap (subtract x) xs) right
where (left, right) = R.splitAt x l
onRectangle :: (Int -> Int -> R.YiString -> R.YiString) -> BufferM ()
onRectangle f = do
(reg, l, r) <- getRectangle
modifyRegionB (mapLines (f l r)) reg
openRectangle :: BufferM ()
openRectangle = onRectangle openLine
where
openLine l r line =
left <> R.replicateChar (r - l) ' ' <> right
where (left, right) = R.splitAt l line
stringRectangle :: R.YiString -> BufferM ()
stringRectangle inserted = onRectangle stringLine
where stringLine l r line = left <> inserted <> right
where [left,_,right] = multiSplit [l,r] line
killRectangle :: EditorM ()
killRectangle = do
cutted <- withCurrentBuffer $ do
(reg, l, r) <- getRectangle
text <- readRegionB reg
let (cutted, rest) = unzip $ fmap cut $ R.lines' text
cut :: R.YiString -> (R.YiString, R.YiString)
cut line = let [left,mid,right] = multiSplit [l,r] line
in (mid, left <> right)
replaceRegionB reg (R.unlines rest)
return cutted
setRegE (R.unlines cutted)
yankRectangle :: EditorM ()
yankRectangle = do
text <- R.lines' <$> getRegE
withCurrentBuffer $ forM_ text $ \t -> do
savingPointB $ insertN t
lineDown
|
2c51e62f62de7c1385c5a612e78df5a2a5e329da0e1299f92123007feef97acf | ori-sky/hs-etch | Context.hs | # LANGUAGE FlexibleContexts #
module Etch.CodeGen.Context where
import qualified Data.HashMap.Lazy as HM
import Data.Maybe (maybe)
import Data.Text (Text)
import Control.Monad.State
type Scope a = HM.HashMap Text a
data Context a = Context { contextScopes :: [Scope a]
, contextNextID :: Integer
}
defaultContext :: Context a
defaultContext = Context { contextScopes = [HM.empty]
, contextNextID = 0
}
contextLookup :: Text -> Context a -> Maybe a
contextLookup name Context { contextScopes = scopes } = f scopes
where f (scope:xs) = maybe (f xs) pure (HM.lookup name scope)
f [] = Nothing
contextInsert :: Text -> a -> Context a -> Context a
contextInsert name value ctx@(Context { contextScopes = (scope:xs) }) =
ctx { contextScopes = HM.insert name value scope : xs }
contextInsert _ _ ctx = ctx
contextInsertScope :: Scope a -> Context a -> Context a
contextInsertScope scope ctx@(Context { contextScopes = scopes }) =
ctx { contextScopes = scope : scopes }
contextStateNextID :: MonadState (Context a) m => m Integer
contextStateNextID = do
ctx@(Context { contextNextID = nextID }) <- get
put ctx { contextNextID = succ nextID }
pure nextID
| null | https://raw.githubusercontent.com/ori-sky/hs-etch/9a90df6090f0a9bf64962bd41ffe4469a9dcbd68/src/Etch/CodeGen/Context.hs | haskell | # LANGUAGE FlexibleContexts #
module Etch.CodeGen.Context where
import qualified Data.HashMap.Lazy as HM
import Data.Maybe (maybe)
import Data.Text (Text)
import Control.Monad.State
type Scope a = HM.HashMap Text a
data Context a = Context { contextScopes :: [Scope a]
, contextNextID :: Integer
}
defaultContext :: Context a
defaultContext = Context { contextScopes = [HM.empty]
, contextNextID = 0
}
contextLookup :: Text -> Context a -> Maybe a
contextLookup name Context { contextScopes = scopes } = f scopes
where f (scope:xs) = maybe (f xs) pure (HM.lookup name scope)
f [] = Nothing
contextInsert :: Text -> a -> Context a -> Context a
contextInsert name value ctx@(Context { contextScopes = (scope:xs) }) =
ctx { contextScopes = HM.insert name value scope : xs }
contextInsert _ _ ctx = ctx
contextInsertScope :: Scope a -> Context a -> Context a
contextInsertScope scope ctx@(Context { contextScopes = scopes }) =
ctx { contextScopes = scope : scopes }
contextStateNextID :: MonadState (Context a) m => m Integer
contextStateNextID = do
ctx@(Context { contextNextID = nextID }) <- get
put ctx { contextNextID = succ nextID }
pure nextID
| |
dcbf7409ef6e74456f50bfbdf2d0490dbf3252a99f7e6b78cbb3f60297bc193c | benzap/eden | operator.cljc | (ns eden.stdlib.operator
(:require
[eden.def :refer [set-var!]]))
(def operator
{:add +
:sub -
:mult *
:div /
:not not
:and #(and %1 %2)
:or #(or %1 %2)})
(defn import-stdlib-operator [eden]
(-> eden
(set-var! 'operator operator)))
| null | https://raw.githubusercontent.com/benzap/eden/dbfa63dc18dbc5ef18a9b2b16dbb7af0e633f6d0/src/eden/stdlib/operator.cljc | clojure | (ns eden.stdlib.operator
(:require
[eden.def :refer [set-var!]]))
(def operator
{:add +
:sub -
:mult *
:div /
:not not
:and #(and %1 %2)
:or #(or %1 %2)})
(defn import-stdlib-operator [eden]
(-> eden
(set-var! 'operator operator)))
| |
b9ec0d286585a2864b39e52f35b0b9b81edfb5c54779e256f90ae887a38de2ca | oklm-wsh/MrMime | rfc5322.ml | let locate buff off len f =
let idx = ref 0 in
while !idx < len && f (Internal_buffer.get buff (off + !idx))
do incr idx done;
!idx
type phrase =
[ `Dot
| `Word of Rfc822.word
| `Encoded of (string * Rfc2047.raw) ] list
type domain =
[ `Domain of string list
| `Literal of Rfc5321.literal_domain ]
type mailbox =
{ name : phrase option
; local : Rfc822.local
; domain : domain * domain list }
type group =
{ name : phrase
; mailbox : mailbox list }
type address = [ `Group of group | `Mailbox of mailbox ]
type month =
| Jan | Feb | Mar | Apr | May | Jun
| Jul | Aug | Sep | Oct | Nov | Dec
type day =
| Mon | Tue | Wed
| Thu | Fri | Sat
| Sun
type zone =
| UT | GMT
| EST | EDT
| CST | CDT
| MST | MDT
| PST | PDT
| Military_zone of char
| TZ of int
type date =
{ day : day option
; date : int * month * int
; time : int * int * int option
; zone : zone }
type unstructured =
[ `Text of string | `CR of int | `LF of int | `CRLF | `WSP
| `Encoded of (string * Rfc2047.raw) ] list
type phrase_or_msg_id =
[ `Phrase of phrase | `MsgID of Rfc822.msg_id ]
type resent =
[ `ResentDate of date
| `ResentFrom of mailbox list
| `ResentSender of mailbox
| `ResentTo of address list
| `ResentCc of address list
| `ResentBcc of address list
| `ResentMessageID of Rfc822.msg_id
| `ResentReplyTo of address list ]
type trace =
[ `Trace of ((Rfc822.local * (domain * domain list)) option
* ([ `Addr of Rfc822.local * (domain * domain list)
| `Domain of domain
| `Word of Rfc822.word ] list * date option) list) ]
type field_header =
[ `Date of date
| `From of mailbox list
| `Sender of mailbox
| `ReplyTo of address list
| `To of address list
| `Cc of address list
| `Bcc of address list
| `MessageID of Rfc822.msg_id
| `InReplyTo of phrase_or_msg_id list
| `References of phrase_or_msg_id list
| `Subject of unstructured
| `Comments of unstructured
| `Keywords of phrase list
| `Field of string * unstructured
| `Unsafe of string * unstructured ]
type skip =
[ `Skip of string ]
type field =
[ field_header | resent | trace | skip ]
open Parser
open Parser.Convenience
type err += Incomplete_address_literal
let domain_literal =
(option () Rfc822.cfws)
*> char '['
*> (many ((option (false, false, false) Rfc822.fws)
*> ((Rfc6532.str Rfc822.is_dtext) <|> (Rfc822.quoted_pair >>| String.make 1)))
>>| String.concat "")
<* (option (false, false, false) Rfc822.fws)
<* char ']'
<* (option () Rfc822.cfws)
>>= fun content ->
{ f = fun i s fail succ ->
let b = Input.create_by ~proof:(Input.proof i) (String.length content) in
Input.write b (Internal_buffer.from_string ~proof:(Input.proof b) content) 0 (String.length content);
let compute = function
| Read _ -> fail i s [] Incomplete_address_literal
| Fail (_, err) -> fail i s [] err
| Done v -> succ i s v
in
compute @@ only b Rfc5321.address_literal }
let domain =
(Rfc822.obs_domain >>| fun domain -> `Domain domain)
<|> (domain_literal >>| fun literal -> `Literal literal)
<|> (Rfc822.dot_atom >>| fun domain -> `Domain domain)
let addr_spec =
Rfc822.local_part
>>= fun local -> char '@'
*> domain
>>= fun domain -> return (local, domain)
let word' =
(option () Rfc822.cfws
*> (Rfc2047.inline_encoded_string >>| fun x -> `Encoded x)
<* option () Rfc822.cfws)
<|> (Rfc822.word >>| fun x -> `Word x)
let obs_phrase =
word'
>>= fun first ->
fix (fun m -> (lift2 (function
| (`Dot | `Word _ | `Encoded _) as x -> fun r -> x :: r
| `CFWS -> fun r -> r)
(word'
<|> (char '.' >>| fun _ -> `Dot)
<|> (Rfc822.cfws >>| fun () -> `CFWS))
m)
<|> return [])
>>| fun rest -> first :: rest
let phrase = obs_phrase <|> (one word')
let display_name = phrase
let obs_domain_list =
let first =
fix (fun m -> (lift2 (fun _ _ -> ()) (Rfc822.cfws
<|> (char ',' >>| fun _ -> ())) m)
<|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Sep -> fun r -> r
| `Domain x -> fun r -> x :: r)
(char ','
*> (option () Rfc822.cfws)
*> (option `Sep (char '@'
*> domain
>>| fun x -> `Domain x)))
m)
<|> return [])
in
first *> char '@' *> domain
>>= fun x -> rest
>>| fun r -> x :: r
let obs_route = obs_domain_list <* char ':'
let obs_angle_addr =
(option () Rfc822.cfws)
*> char '<'
*> obs_route
>>= fun domains -> addr_spec
>>= fun (local, domain) -> char '>'
*> (option () Rfc822.cfws)
>>| fun () -> (local, (domain, domains))
let angle_addr =
obs_angle_addr
<|> ((option () Rfc822.cfws)
*> char '<'
*> addr_spec
>>= fun (local, domain) -> char '>'
*> (option () Rfc822.cfws)
>>| fun _ -> (local, (domain, [])))
let name_addr =
(option None (display_name >>| fun x -> Some x))
>>= fun name -> angle_addr
>>| fun addr -> (name, addr)
let mailbox =
(name_addr
<|> (addr_spec >>| fun (local, domain) -> (None, (local, (domain, [])))))
>>| (fun (name, (local, domain)) -> { name; local; domain; })
let obs_mbox_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Mailbox x -> fun r -> x :: r
| `Sep -> fun r -> r)
(char ',' *> (option `Sep ((mailbox >>| fun m -> `Mailbox m)
<|> (Rfc822.cfws >>| fun () -> `Sep))))
m)
<|> return [])
in
(many' ((option () Rfc822.cfws) *> char ','))
*> mailbox
>>= fun x -> rest
>>| fun r -> x :: r
let obs_group_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let one' p =
lift2 (fun _ _ -> ()) p (many' p)
in
one' ((option () Rfc822.cfws) *> char ',') *> (option () Rfc822.cfws)
let mailbox_list =
obs_mbox_list
<|> (mailbox >>= fun x -> (many (char ',' *> mailbox)) >>| fun r -> x :: r)
let group_list =
mailbox_list
<|> (obs_group_list >>| fun () -> [])
<|> (Rfc822.cfws >>| fun () -> [])
let group =
display_name
>>= fun name -> char ':'
*> (option [] group_list <?> "group-list")
>>= fun lst -> char ';'
*> (option () Rfc822.cfws)
>>| fun _ -> { name; mailbox = lst; }
let address =
(group >>| fun g -> `Group g)
<|> (mailbox >>| fun m -> `Mailbox m)
let obs_addr_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Addr x -> fun r -> x :: r
| `Sep -> fun r -> r)
(char ',' *> (option `Sep ((address >>| fun a -> `Addr a)
<|> (Rfc822.cfws >>| fun () -> `Sep))))
m)
<|> return [])
in
(many' ((option () Rfc822.cfws) *> char ','))
*> address
>>= fun x -> rest
>>| fun r -> x :: r
let address_list =
obs_addr_list
<|> (address >>= fun x -> (many (char ',' *> address)) >>| fun r -> x :: r)
let is_digit = function '0' .. '9' -> true | _ -> false
let obs_hour =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
<* (option () Rfc822.cfws)
>>| int_of_string
let obs_minute =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
>>| int_of_string
let obs_second =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
>>| int_of_string
let hour = obs_hour <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let minute = obs_minute <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let second = obs_second <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let obs_year =
(option () Rfc822.cfws)
*> (repeat (Some 2) None is_digit)
<* (option () Rfc822.cfws)
>>| int_of_string
let year =
(Rfc822.fws
*> (repeat (Some 4) None is_digit)
<* Rfc822.fws
>>| int_of_string)
<|> obs_year
let obs_day =
(option () Rfc822.cfws)
*> (repeat (Some 1) (Some 2) is_digit)
<* (option () Rfc822.cfws)
>>| int_of_string
let day =
obs_day
<|> ((option (false, false, false) Rfc822.fws)
*> (repeat (Some 1) (Some 2) is_digit)
<* Rfc822.fws
>>| int_of_string)
let month =
let string s = string (fun x -> x) s in
(string "Jan" *> return Jan)
<|> (string "Feb" *> return Feb)
<|> (string "Mar" *> return Mar)
<|> (string "Apr" *> return Apr)
<|> (string "May" *> return May)
<|> (string "Jun" *> return Jun)
<|> (string "Jul" *> return Jul)
<|> (string "Aug" *> return Aug)
<|> (string "Sep" *> return Sep)
<|> (string "Oct" *> return Oct)
<|> (string "Nov" *> return Nov)
<|> (string "Dec" *> return Dec)
let day_name =
let string s = string (fun x -> x) s in
(string "Mon" *> return Mon)
<|> (string "Tue" *> return Tue)
<|> (string "Wed" *> return Wed)
<|> (string "Thu" *> return Thu)
<|> (string "Fri" *> return Fri)
<|> (string "Sat" *> return Sat)
<|> (string "Sun" *> return Sun)
let obs_day_of_week =
(option () Rfc822.cfws)
*> day_name
<* (option () Rfc822.cfws)
let day_of_week =
obs_day_of_week
<|> ((option (false, false, false) Rfc822.fws) *> day_name)
let date =
lift3 (fun day month year -> (day, month, year)) day month year
let time_of_day =
hour
>>= fun hour -> char ':' *> minute
>>= fun minute -> option None ((option () Rfc822.cfws)
*> char ':'
*> second
>>| fun second -> Some second)
>>| fun second -> (hour, minute, second)
let is_military_zone = function
| '\065' .. '\073'
| '\075' .. '\090'
| '\097' .. '\105'
| '\107' .. '\122' -> true
| _ -> false
let obs_zone =
let string s = string (fun x -> x) s in
(string "UT" *> return UT)
<|> (string "GMT" *> return GMT)
<|> (string "EST" *> return EST)
<|> (string "EDT" *> return EDT)
<|> (string "CST" *> return CST)
<|> (string "CDT" *> return CDT)
<|> (string "MST" *> return MST)
<|> (string "MDT" *> return MDT)
<|> (string "PST" *> return PST)
<|> (string "PDT" *> return PDT)
<|> (satisfy is_military_zone >>= fun z -> return (Military_zone z))
let zone =
(Rfc822.fws *> satisfy (function '+' | '-' -> true | _ -> false)
>>= fun sign -> repeat (Some 4) (Some 4) is_digit
>>| fun zone ->
if sign = '-'
then TZ (- (int_of_string zone))
else TZ (int_of_string zone))
<|> ((option () Rfc822.cfws) *> obs_zone)
let time = lift2 (fun time zone -> (time, zone)) time_of_day zone
let date_time =
lift3
(fun day date (time, zone) -> { day; date; time; zone; })
(option None (day_of_week >>= fun day -> char ',' *> return (Some day)))
date time
<* (option () Rfc822.cfws)
let is_obs_utext = function
| '\000' -> true
| c -> Rfc822.is_obs_no_ws_ctl c || Rfc822.is_vchar c
let obs_unstruct : unstructured t =
let many' p = fix (fun m -> (lift2 (fun _ r -> r + 1) p m) <|> return 0) in
let word =
(Rfc2047.inline_encoded_string >>| fun e -> `Encoded e)
<|> (Rfc6532.str is_obs_utext >>| fun e -> `Text e) in
let safe_lfcr =
many' (char '\n') >>= fun lf ->
many' (char '\r') >>= fun cr ->
peek_chr >>= fun chr -> match lf, cr, chr with
| 0, 0, _ -> return []
| n, 0, _ -> return [`LF n]
| n, 1, Some '\n' ->
{ f = fun i s _fail succ ->
Input.rollback i (Internal_buffer.from_string ~proof:(Input.proof i) "\r");
succ i s () } *> return (if n <> 0 then [`LF n] else [])
| n, m, Some '\n' ->
{ f = fun i s _fail succ ->
Input.rollback i (Internal_buffer.from_string ~proof:(Input.proof i) "\r");
succ i s () } *> return (if n <> 0 then [`LF n; `CR (m - 1)] else [`CR (m - 1)])
| n, _m, _ -> return [`LF n; `CR 128]
in
many
((safe_lfcr >>= fun pre -> many (word >>= fun word -> safe_lfcr >>| fun rst -> word :: rst) >>| fun rst -> List.concat (pre :: rst))
<|> (Rfc822.fws >>| function
| true, true, true -> [`WSP; `CRLF; `WSP]
| false, true, true -> [`CRLF; `WSP]
| true, true, false -> [`WSP; `CRLF]
| false, true, false -> [`CRLF]
| true, false, true -> [`WSP; `WSP]
| true, false, false
| false, false, true -> [`WSP]
| false, false, false -> []))
>>| List.concat
let make n f =
let rec aux acc = function
| 0 -> List.rev acc
| n -> aux (f n :: acc) (n - 1)
in
aux [] n
let unstructured =
let many' p = fix (fun m -> (lift2 (fun _ r -> r + 1) p m) <|> return 0) in
obs_unstruct
<|> (many (option (false, false, false) Rfc822.fws
>>= fun (has_wsp, has_crlf, has_wsp') -> Rfc6532.str Rfc822.is_vchar
>>| fun text -> match has_wsp, has_crlf, has_wsp' with
| true, true, true -> [`WSP; `CRLF; `WSP; `Text text]
| false, true, true -> [`CRLF; `WSP; `Text text]
| true, true, false -> [`WSP; `CRLF; `Text text]
| false, true, false -> [`CRLF; `Text text]
| true, false, true -> [`WSP; `WSP; `Text text]
| true, false, false
| false, false, true -> [`WSP; `Text text]
| false, false, false -> [`Text text])
>>= fun pre -> many' (char '\x09' <|> char '\x20')
>>| fun n -> List.concat pre @ make n (fun _ -> `WSP))
let phrase_or_msg_id =
many ((phrase >>| fun v -> `Phrase v) <|> (Rfc822.msg_id >>| fun v -> `MsgID v))
let obs_phrase_list =
(option [] (phrase <|> (Rfc822.cfws *> return [])))
>>= fun pre -> many (char ',' *> (option [] (phrase <|> (Rfc822.cfws *> return []))))
>>| fun rst -> (pre :: rst)
let keywords =
let sep s p = fix (fun m -> lift2 (fun x r -> x :: r) p ((s *> m) <|> return [])) in
obs_phrase_list
<|> (sep (char ',') phrase)
let is_ftext = function
| '\033' .. '\057'
| '\059' .. '\126' -> true
| _ -> false
let implode l =
let s = Bytes.create (List.length l) in
let rec aux i = function
| [] -> s
| x :: r -> Bytes.set s i x; aux (i + 1) r
in
aux 0 l
let received_token =
(addr_spec >>| fun (local, domain) -> `Addr (local, (domain, [])))
<|> (angle_addr >>| fun v -> `Addr v)
<|> (domain >>| fun v -> `Domain v)
<|> (Rfc822.word >>| fun v -> `Word v)
let received =
many received_token
>>= fun lst -> option None (char ';' *> date_time >>| fun v -> Some v)
>>| fun rst -> (lst, rst)
let path =
((angle_addr >>| fun v -> Some v)
<|> (option () Rfc822.cfws
*> char '<'
*> option () Rfc822.cfws
*> char '>'
*> option () Rfc822.cfws
*> return None))
<|> (addr_spec >>| fun (local, domain) -> Some (local, (domain, [])))
let field_name =
one (satisfy is_ftext) >>| implode
let trace path =
let r =
string
String.lowercase_ascii
"Received"
*> (many (satisfy (function '\x09' | '\x20' -> true | _ -> false)))
*> char ':'
*> received <* Rfc822.crlf
in match path with
(* we recognize Return-Path *)
| Some path -> one r >>| fun traces -> (path, traces)
(* we recognize Received *)
| None -> received <* Rfc822.crlf >>= fun pre -> many r >>| fun rst -> (None, pre :: rst)
let field extend field_name =
match String.lowercase_ascii field_name with
| "date" -> date_time <* Rfc822.crlf >>| fun v -> `Date v
| "from" -> mailbox_list <* Rfc822.crlf >>| fun v -> `From v
| "sender" -> mailbox <* Rfc822.crlf >>| fun v -> `Sender v
| "reply-to" -> address_list <* Rfc822.crlf >>| fun v -> `ReplyTo v
| "to" -> address_list <* Rfc822.crlf >>| fun v -> `To v
| "cc" -> address_list <* Rfc822.crlf >>| fun v -> `Cc v
| "bcc" -> address_list <* Rfc822.crlf >>| fun v -> `Bcc v
| "message-id" -> Rfc822.msg_id <* Rfc822.crlf >>| fun v -> `MessageID v
| "in-reply-to" -> phrase_or_msg_id <* Rfc822.crlf >>| fun v -> `InReplyTo v
| "references" -> phrase_or_msg_id <* Rfc822.crlf >>| fun v -> `References v
| "subject" -> unstructured <* Rfc822.crlf >>| fun v -> `Subject v
| "comments" -> unstructured <* Rfc822.crlf >>| fun v -> `Comments v
| "keywords" -> keywords <* Rfc822.crlf >>| fun v -> `Keywords v
| "resent-date" -> date_time <* Rfc822.crlf >>| fun v -> `ResentDate v
| "resent-from" -> mailbox_list <* Rfc822.crlf >>| fun v -> `ResentFrom v
| "resent-sender" -> mailbox <* Rfc822.crlf >>| fun v -> `ResentSender v
| "resent-to" -> address_list <* Rfc822.crlf >>| fun v -> `ResentTo v
| "resent-cc" -> address_list <* Rfc822.crlf >>| fun v -> `ResentCc v
| "resent-bcc" -> address_list <* Rfc822.crlf >>| fun v -> `ResentBcc v
| "resent-message-id" -> Rfc822.msg_id <* Rfc822.crlf >>| fun v -> `ResentMessageID v
| "resent-reply-to" -> address_list <* Rfc822.crlf >>| fun v -> `ResentReplyTo v
| "received" -> trace None >>| fun v -> `Trace v
| "return-path" -> path <* Rfc822.crlf
>>= fun v -> trace (Some v)
>>| fun v -> `Trace v
| _ ->
((extend field_name)
<|> (unstructured <* Rfc822.crlf >>| fun v -> `Field (field_name, v)))
let sp = Format.sprintf
let field extend field_name =
(field extend field_name)
<|> ((unstructured <* Rfc822.crlf >>| fun v -> `Unsafe (field_name, v)) <?> (sp "Unsafe %s" field_name))
type err += Nothing_to_do
let skip =
let fix' f =
let rec u a = lazy (f r a)
and r a = { f = fun i s fail succ ->
Lazy.(force (u a)).f i s fail succ }
in r
in
{ f = fun i s fail' succ ->
let buffer = Buffer.create 16 in
let consume =
{ f = fun i s _fail succ ->
let n = Input.transmit i @@ fun buff off len ->
let len' = locate buff off len ((<>) '\r') in
Buffer.add_bytes buffer (Internal_buffer.sub_string buff off len');
len'
in
succ i s n }
in
let succ' i s () =
succ i s (Buffer.contents buffer) in
let r = (fix' @@ fun m consumed -> consume >>= fun n -> peek_chr >>= fun chr ->
match consumed + n, chr with
| 0, _ -> fail Nothing_to_do
| n, Some _ -> (Rfc822.crlf <|> m n)
| _n, None -> return ()) in
(r 0).f i s fail' succ' }
let header extend =
many ((field_name
<* (many (satisfy (function '\x09' | '\x20' -> true | _ -> false)))
<* char ':'
>>= fun field_name -> field extend (Bytes.to_string field_name))
<|> (skip >>| fun v -> `Skip v))
let line buffer boundary =
{ f = fun i s _fail succ ->
let store buff off len =
let len' = locate buff off len ((<>) '\r') in
Buffer.add_bytes buffer (Internal_buffer.sub_string buff off len');
len'
in
let _ = Input.transmit i store in
succ i s () } *> peek_chr >>= function
| None -> return (`End false)
| Some '\r' ->
(boundary *> return (`End true))
<|> (Rfc822.crlf *> { f = fun i s _fail succ ->
Buffer.add_char buffer '\n';
succ i s `Continue })
<|> (advance 1 *> { f = fun i s _fail succ ->
Buffer.add_char buffer '\r';
succ i s `Continue })
| Some _chr -> return `Continue
let decode boundary rollback buffer =
(fix @@ fun m -> line buffer boundary >>= function
| `End r -> return (r, Buffer.to_bytes buffer)
| _ -> m)
>>= function
| true, content -> rollback *> return content
| false, content -> return content
let decode boundary rollback =
decode boundary rollback (Buffer.create 16)
let decode boundary rollback =
{ f = fun i s fail succ - >
let buffer = Buffer.create 16 in
let store buff off len =
let len ' = locate buff off len ( ( < > ) ' \r ' ) in
Buffer.add_string buffer ( ) ;
len '
in
( fix @@ fun m - >
{ f = fun i s fail succ - >
let n = Input.transmit i store in
succ i s n } * > peek_chr > > = function
| Some ' \r ' - >
( boundary * > return ( true , Buffer.contents buffer ) )
< | > ( Rfc822.crlf > > = fun ( ) - > Buffer.add_char buffer ' \n ' ;
m )
< | > ( advance 1 > > = fun ( ) - > Buffer.add_char buffer ' \r ' ;
m )
| Some chr - > m
| None - > return ( false , Buffer.contents buffer)).f i s fail succ }
> > = function
| true , content - > rollback * > return content
| false , content - > return content
let decode boundary rollback =
{ f = fun i s fail succ ->
let buffer = Buffer.create 16 in
let store buff off len =
let len' = locate buff off len ((<>) '\r') in
Buffer.add_string buffer (Internal_buffer.sub_string buff off len');
len'
in
(fix @@ fun m ->
{ f = fun i s fail succ ->
let n = Input.transmit i store in
succ i s n } *> peek_chr >>= function
| Some '\r' ->
(boundary *> return (true, Buffer.contents buffer))
<|> (Rfc822.crlf >>= fun () -> Buffer.add_char buffer '\n';
m)
<|> (advance 1 >>= fun () -> Buffer.add_char buffer '\r';
m)
| Some chr -> m
| None -> return (false, Buffer.contents buffer)).f i s fail succ }
>>= function
| true, content -> rollback *> return content
| false, content -> return content
*)
| null | https://raw.githubusercontent.com/oklm-wsh/MrMime/4d2a9dc75905927a092c0424cff7462e2b26bb96/lib/rfc5322.ml | ocaml | we recognize Return-Path
we recognize Received | let locate buff off len f =
let idx = ref 0 in
while !idx < len && f (Internal_buffer.get buff (off + !idx))
do incr idx done;
!idx
type phrase =
[ `Dot
| `Word of Rfc822.word
| `Encoded of (string * Rfc2047.raw) ] list
type domain =
[ `Domain of string list
| `Literal of Rfc5321.literal_domain ]
type mailbox =
{ name : phrase option
; local : Rfc822.local
; domain : domain * domain list }
type group =
{ name : phrase
; mailbox : mailbox list }
type address = [ `Group of group | `Mailbox of mailbox ]
type month =
| Jan | Feb | Mar | Apr | May | Jun
| Jul | Aug | Sep | Oct | Nov | Dec
type day =
| Mon | Tue | Wed
| Thu | Fri | Sat
| Sun
type zone =
| UT | GMT
| EST | EDT
| CST | CDT
| MST | MDT
| PST | PDT
| Military_zone of char
| TZ of int
type date =
{ day : day option
; date : int * month * int
; time : int * int * int option
; zone : zone }
type unstructured =
[ `Text of string | `CR of int | `LF of int | `CRLF | `WSP
| `Encoded of (string * Rfc2047.raw) ] list
type phrase_or_msg_id =
[ `Phrase of phrase | `MsgID of Rfc822.msg_id ]
type resent =
[ `ResentDate of date
| `ResentFrom of mailbox list
| `ResentSender of mailbox
| `ResentTo of address list
| `ResentCc of address list
| `ResentBcc of address list
| `ResentMessageID of Rfc822.msg_id
| `ResentReplyTo of address list ]
type trace =
[ `Trace of ((Rfc822.local * (domain * domain list)) option
* ([ `Addr of Rfc822.local * (domain * domain list)
| `Domain of domain
| `Word of Rfc822.word ] list * date option) list) ]
type field_header =
[ `Date of date
| `From of mailbox list
| `Sender of mailbox
| `ReplyTo of address list
| `To of address list
| `Cc of address list
| `Bcc of address list
| `MessageID of Rfc822.msg_id
| `InReplyTo of phrase_or_msg_id list
| `References of phrase_or_msg_id list
| `Subject of unstructured
| `Comments of unstructured
| `Keywords of phrase list
| `Field of string * unstructured
| `Unsafe of string * unstructured ]
type skip =
[ `Skip of string ]
type field =
[ field_header | resent | trace | skip ]
open Parser
open Parser.Convenience
type err += Incomplete_address_literal
let domain_literal =
(option () Rfc822.cfws)
*> char '['
*> (many ((option (false, false, false) Rfc822.fws)
*> ((Rfc6532.str Rfc822.is_dtext) <|> (Rfc822.quoted_pair >>| String.make 1)))
>>| String.concat "")
<* (option (false, false, false) Rfc822.fws)
<* char ']'
<* (option () Rfc822.cfws)
>>= fun content ->
{ f = fun i s fail succ ->
let b = Input.create_by ~proof:(Input.proof i) (String.length content) in
Input.write b (Internal_buffer.from_string ~proof:(Input.proof b) content) 0 (String.length content);
let compute = function
| Read _ -> fail i s [] Incomplete_address_literal
| Fail (_, err) -> fail i s [] err
| Done v -> succ i s v
in
compute @@ only b Rfc5321.address_literal }
let domain =
(Rfc822.obs_domain >>| fun domain -> `Domain domain)
<|> (domain_literal >>| fun literal -> `Literal literal)
<|> (Rfc822.dot_atom >>| fun domain -> `Domain domain)
let addr_spec =
Rfc822.local_part
>>= fun local -> char '@'
*> domain
>>= fun domain -> return (local, domain)
let word' =
(option () Rfc822.cfws
*> (Rfc2047.inline_encoded_string >>| fun x -> `Encoded x)
<* option () Rfc822.cfws)
<|> (Rfc822.word >>| fun x -> `Word x)
let obs_phrase =
word'
>>= fun first ->
fix (fun m -> (lift2 (function
| (`Dot | `Word _ | `Encoded _) as x -> fun r -> x :: r
| `CFWS -> fun r -> r)
(word'
<|> (char '.' >>| fun _ -> `Dot)
<|> (Rfc822.cfws >>| fun () -> `CFWS))
m)
<|> return [])
>>| fun rest -> first :: rest
let phrase = obs_phrase <|> (one word')
let display_name = phrase
let obs_domain_list =
let first =
fix (fun m -> (lift2 (fun _ _ -> ()) (Rfc822.cfws
<|> (char ',' >>| fun _ -> ())) m)
<|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Sep -> fun r -> r
| `Domain x -> fun r -> x :: r)
(char ','
*> (option () Rfc822.cfws)
*> (option `Sep (char '@'
*> domain
>>| fun x -> `Domain x)))
m)
<|> return [])
in
first *> char '@' *> domain
>>= fun x -> rest
>>| fun r -> x :: r
let obs_route = obs_domain_list <* char ':'
let obs_angle_addr =
(option () Rfc822.cfws)
*> char '<'
*> obs_route
>>= fun domains -> addr_spec
>>= fun (local, domain) -> char '>'
*> (option () Rfc822.cfws)
>>| fun () -> (local, (domain, domains))
let angle_addr =
obs_angle_addr
<|> ((option () Rfc822.cfws)
*> char '<'
*> addr_spec
>>= fun (local, domain) -> char '>'
*> (option () Rfc822.cfws)
>>| fun _ -> (local, (domain, [])))
let name_addr =
(option None (display_name >>| fun x -> Some x))
>>= fun name -> angle_addr
>>| fun addr -> (name, addr)
let mailbox =
(name_addr
<|> (addr_spec >>| fun (local, domain) -> (None, (local, (domain, [])))))
>>| (fun (name, (local, domain)) -> { name; local; domain; })
let obs_mbox_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Mailbox x -> fun r -> x :: r
| `Sep -> fun r -> r)
(char ',' *> (option `Sep ((mailbox >>| fun m -> `Mailbox m)
<|> (Rfc822.cfws >>| fun () -> `Sep))))
m)
<|> return [])
in
(many' ((option () Rfc822.cfws) *> char ','))
*> mailbox
>>= fun x -> rest
>>| fun r -> x :: r
let obs_group_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let one' p =
lift2 (fun _ _ -> ()) p (many' p)
in
one' ((option () Rfc822.cfws) *> char ',') *> (option () Rfc822.cfws)
let mailbox_list =
obs_mbox_list
<|> (mailbox >>= fun x -> (many (char ',' *> mailbox)) >>| fun r -> x :: r)
let group_list =
mailbox_list
<|> (obs_group_list >>| fun () -> [])
<|> (Rfc822.cfws >>| fun () -> [])
let group =
display_name
>>= fun name -> char ':'
*> (option [] group_list <?> "group-list")
>>= fun lst -> char ';'
*> (option () Rfc822.cfws)
>>| fun _ -> { name; mailbox = lst; }
let address =
(group >>| fun g -> `Group g)
<|> (mailbox >>| fun m -> `Mailbox m)
let obs_addr_list =
let many' p =
fix (fun m -> (lift2 (fun _ _ -> ()) p m) <|> return ())
in
let rest =
fix (fun m -> (lift2 (function `Addr x -> fun r -> x :: r
| `Sep -> fun r -> r)
(char ',' *> (option `Sep ((address >>| fun a -> `Addr a)
<|> (Rfc822.cfws >>| fun () -> `Sep))))
m)
<|> return [])
in
(many' ((option () Rfc822.cfws) *> char ','))
*> address
>>= fun x -> rest
>>| fun r -> x :: r
let address_list =
obs_addr_list
<|> (address >>= fun x -> (many (char ',' *> address)) >>| fun r -> x :: r)
let is_digit = function '0' .. '9' -> true | _ -> false
let obs_hour =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
<* (option () Rfc822.cfws)
>>| int_of_string
let obs_minute =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
>>| int_of_string
let obs_second =
(option () Rfc822.cfws)
*> repeat (Some 2) (Some 2) is_digit
>>| int_of_string
let hour = obs_hour <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let minute = obs_minute <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let second = obs_second <|> (repeat (Some 2) (Some 2) is_digit >>| int_of_string)
let obs_year =
(option () Rfc822.cfws)
*> (repeat (Some 2) None is_digit)
<* (option () Rfc822.cfws)
>>| int_of_string
let year =
(Rfc822.fws
*> (repeat (Some 4) None is_digit)
<* Rfc822.fws
>>| int_of_string)
<|> obs_year
let obs_day =
(option () Rfc822.cfws)
*> (repeat (Some 1) (Some 2) is_digit)
<* (option () Rfc822.cfws)
>>| int_of_string
let day =
obs_day
<|> ((option (false, false, false) Rfc822.fws)
*> (repeat (Some 1) (Some 2) is_digit)
<* Rfc822.fws
>>| int_of_string)
let month =
let string s = string (fun x -> x) s in
(string "Jan" *> return Jan)
<|> (string "Feb" *> return Feb)
<|> (string "Mar" *> return Mar)
<|> (string "Apr" *> return Apr)
<|> (string "May" *> return May)
<|> (string "Jun" *> return Jun)
<|> (string "Jul" *> return Jul)
<|> (string "Aug" *> return Aug)
<|> (string "Sep" *> return Sep)
<|> (string "Oct" *> return Oct)
<|> (string "Nov" *> return Nov)
<|> (string "Dec" *> return Dec)
let day_name =
let string s = string (fun x -> x) s in
(string "Mon" *> return Mon)
<|> (string "Tue" *> return Tue)
<|> (string "Wed" *> return Wed)
<|> (string "Thu" *> return Thu)
<|> (string "Fri" *> return Fri)
<|> (string "Sat" *> return Sat)
<|> (string "Sun" *> return Sun)
let obs_day_of_week =
(option () Rfc822.cfws)
*> day_name
<* (option () Rfc822.cfws)
let day_of_week =
obs_day_of_week
<|> ((option (false, false, false) Rfc822.fws) *> day_name)
let date =
lift3 (fun day month year -> (day, month, year)) day month year
let time_of_day =
hour
>>= fun hour -> char ':' *> minute
>>= fun minute -> option None ((option () Rfc822.cfws)
*> char ':'
*> second
>>| fun second -> Some second)
>>| fun second -> (hour, minute, second)
let is_military_zone = function
| '\065' .. '\073'
| '\075' .. '\090'
| '\097' .. '\105'
| '\107' .. '\122' -> true
| _ -> false
let obs_zone =
let string s = string (fun x -> x) s in
(string "UT" *> return UT)
<|> (string "GMT" *> return GMT)
<|> (string "EST" *> return EST)
<|> (string "EDT" *> return EDT)
<|> (string "CST" *> return CST)
<|> (string "CDT" *> return CDT)
<|> (string "MST" *> return MST)
<|> (string "MDT" *> return MDT)
<|> (string "PST" *> return PST)
<|> (string "PDT" *> return PDT)
<|> (satisfy is_military_zone >>= fun z -> return (Military_zone z))
let zone =
(Rfc822.fws *> satisfy (function '+' | '-' -> true | _ -> false)
>>= fun sign -> repeat (Some 4) (Some 4) is_digit
>>| fun zone ->
if sign = '-'
then TZ (- (int_of_string zone))
else TZ (int_of_string zone))
<|> ((option () Rfc822.cfws) *> obs_zone)
let time = lift2 (fun time zone -> (time, zone)) time_of_day zone
let date_time =
lift3
(fun day date (time, zone) -> { day; date; time; zone; })
(option None (day_of_week >>= fun day -> char ',' *> return (Some day)))
date time
<* (option () Rfc822.cfws)
let is_obs_utext = function
| '\000' -> true
| c -> Rfc822.is_obs_no_ws_ctl c || Rfc822.is_vchar c
let obs_unstruct : unstructured t =
let many' p = fix (fun m -> (lift2 (fun _ r -> r + 1) p m) <|> return 0) in
let word =
(Rfc2047.inline_encoded_string >>| fun e -> `Encoded e)
<|> (Rfc6532.str is_obs_utext >>| fun e -> `Text e) in
let safe_lfcr =
many' (char '\n') >>= fun lf ->
many' (char '\r') >>= fun cr ->
peek_chr >>= fun chr -> match lf, cr, chr with
| 0, 0, _ -> return []
| n, 0, _ -> return [`LF n]
| n, 1, Some '\n' ->
{ f = fun i s _fail succ ->
Input.rollback i (Internal_buffer.from_string ~proof:(Input.proof i) "\r");
succ i s () } *> return (if n <> 0 then [`LF n] else [])
| n, m, Some '\n' ->
{ f = fun i s _fail succ ->
Input.rollback i (Internal_buffer.from_string ~proof:(Input.proof i) "\r");
succ i s () } *> return (if n <> 0 then [`LF n; `CR (m - 1)] else [`CR (m - 1)])
| n, _m, _ -> return [`LF n; `CR 128]
in
many
((safe_lfcr >>= fun pre -> many (word >>= fun word -> safe_lfcr >>| fun rst -> word :: rst) >>| fun rst -> List.concat (pre :: rst))
<|> (Rfc822.fws >>| function
| true, true, true -> [`WSP; `CRLF; `WSP]
| false, true, true -> [`CRLF; `WSP]
| true, true, false -> [`WSP; `CRLF]
| false, true, false -> [`CRLF]
| true, false, true -> [`WSP; `WSP]
| true, false, false
| false, false, true -> [`WSP]
| false, false, false -> []))
>>| List.concat
let make n f =
let rec aux acc = function
| 0 -> List.rev acc
| n -> aux (f n :: acc) (n - 1)
in
aux [] n
let unstructured =
let many' p = fix (fun m -> (lift2 (fun _ r -> r + 1) p m) <|> return 0) in
obs_unstruct
<|> (many (option (false, false, false) Rfc822.fws
>>= fun (has_wsp, has_crlf, has_wsp') -> Rfc6532.str Rfc822.is_vchar
>>| fun text -> match has_wsp, has_crlf, has_wsp' with
| true, true, true -> [`WSP; `CRLF; `WSP; `Text text]
| false, true, true -> [`CRLF; `WSP; `Text text]
| true, true, false -> [`WSP; `CRLF; `Text text]
| false, true, false -> [`CRLF; `Text text]
| true, false, true -> [`WSP; `WSP; `Text text]
| true, false, false
| false, false, true -> [`WSP; `Text text]
| false, false, false -> [`Text text])
>>= fun pre -> many' (char '\x09' <|> char '\x20')
>>| fun n -> List.concat pre @ make n (fun _ -> `WSP))
let phrase_or_msg_id =
many ((phrase >>| fun v -> `Phrase v) <|> (Rfc822.msg_id >>| fun v -> `MsgID v))
let obs_phrase_list =
(option [] (phrase <|> (Rfc822.cfws *> return [])))
>>= fun pre -> many (char ',' *> (option [] (phrase <|> (Rfc822.cfws *> return []))))
>>| fun rst -> (pre :: rst)
let keywords =
let sep s p = fix (fun m -> lift2 (fun x r -> x :: r) p ((s *> m) <|> return [])) in
obs_phrase_list
<|> (sep (char ',') phrase)
let is_ftext = function
| '\033' .. '\057'
| '\059' .. '\126' -> true
| _ -> false
let implode l =
let s = Bytes.create (List.length l) in
let rec aux i = function
| [] -> s
| x :: r -> Bytes.set s i x; aux (i + 1) r
in
aux 0 l
let received_token =
(addr_spec >>| fun (local, domain) -> `Addr (local, (domain, [])))
<|> (angle_addr >>| fun v -> `Addr v)
<|> (domain >>| fun v -> `Domain v)
<|> (Rfc822.word >>| fun v -> `Word v)
let received =
many received_token
>>= fun lst -> option None (char ';' *> date_time >>| fun v -> Some v)
>>| fun rst -> (lst, rst)
let path =
((angle_addr >>| fun v -> Some v)
<|> (option () Rfc822.cfws
*> char '<'
*> option () Rfc822.cfws
*> char '>'
*> option () Rfc822.cfws
*> return None))
<|> (addr_spec >>| fun (local, domain) -> Some (local, (domain, [])))
let field_name =
one (satisfy is_ftext) >>| implode
let trace path =
let r =
string
String.lowercase_ascii
"Received"
*> (many (satisfy (function '\x09' | '\x20' -> true | _ -> false)))
*> char ':'
*> received <* Rfc822.crlf
in match path with
| Some path -> one r >>| fun traces -> (path, traces)
| None -> received <* Rfc822.crlf >>= fun pre -> many r >>| fun rst -> (None, pre :: rst)
let field extend field_name =
match String.lowercase_ascii field_name with
| "date" -> date_time <* Rfc822.crlf >>| fun v -> `Date v
| "from" -> mailbox_list <* Rfc822.crlf >>| fun v -> `From v
| "sender" -> mailbox <* Rfc822.crlf >>| fun v -> `Sender v
| "reply-to" -> address_list <* Rfc822.crlf >>| fun v -> `ReplyTo v
| "to" -> address_list <* Rfc822.crlf >>| fun v -> `To v
| "cc" -> address_list <* Rfc822.crlf >>| fun v -> `Cc v
| "bcc" -> address_list <* Rfc822.crlf >>| fun v -> `Bcc v
| "message-id" -> Rfc822.msg_id <* Rfc822.crlf >>| fun v -> `MessageID v
| "in-reply-to" -> phrase_or_msg_id <* Rfc822.crlf >>| fun v -> `InReplyTo v
| "references" -> phrase_or_msg_id <* Rfc822.crlf >>| fun v -> `References v
| "subject" -> unstructured <* Rfc822.crlf >>| fun v -> `Subject v
| "comments" -> unstructured <* Rfc822.crlf >>| fun v -> `Comments v
| "keywords" -> keywords <* Rfc822.crlf >>| fun v -> `Keywords v
| "resent-date" -> date_time <* Rfc822.crlf >>| fun v -> `ResentDate v
| "resent-from" -> mailbox_list <* Rfc822.crlf >>| fun v -> `ResentFrom v
| "resent-sender" -> mailbox <* Rfc822.crlf >>| fun v -> `ResentSender v
| "resent-to" -> address_list <* Rfc822.crlf >>| fun v -> `ResentTo v
| "resent-cc" -> address_list <* Rfc822.crlf >>| fun v -> `ResentCc v
| "resent-bcc" -> address_list <* Rfc822.crlf >>| fun v -> `ResentBcc v
| "resent-message-id" -> Rfc822.msg_id <* Rfc822.crlf >>| fun v -> `ResentMessageID v
| "resent-reply-to" -> address_list <* Rfc822.crlf >>| fun v -> `ResentReplyTo v
| "received" -> trace None >>| fun v -> `Trace v
| "return-path" -> path <* Rfc822.crlf
>>= fun v -> trace (Some v)
>>| fun v -> `Trace v
| _ ->
((extend field_name)
<|> (unstructured <* Rfc822.crlf >>| fun v -> `Field (field_name, v)))
let sp = Format.sprintf
let field extend field_name =
(field extend field_name)
<|> ((unstructured <* Rfc822.crlf >>| fun v -> `Unsafe (field_name, v)) <?> (sp "Unsafe %s" field_name))
type err += Nothing_to_do
let skip =
let fix' f =
let rec u a = lazy (f r a)
and r a = { f = fun i s fail succ ->
Lazy.(force (u a)).f i s fail succ }
in r
in
{ f = fun i s fail' succ ->
let buffer = Buffer.create 16 in
let consume =
{ f = fun i s _fail succ ->
let n = Input.transmit i @@ fun buff off len ->
let len' = locate buff off len ((<>) '\r') in
Buffer.add_bytes buffer (Internal_buffer.sub_string buff off len');
len'
in
succ i s n }
in
let succ' i s () =
succ i s (Buffer.contents buffer) in
let r = (fix' @@ fun m consumed -> consume >>= fun n -> peek_chr >>= fun chr ->
match consumed + n, chr with
| 0, _ -> fail Nothing_to_do
| n, Some _ -> (Rfc822.crlf <|> m n)
| _n, None -> return ()) in
(r 0).f i s fail' succ' }
let header extend =
many ((field_name
<* (many (satisfy (function '\x09' | '\x20' -> true | _ -> false)))
<* char ':'
>>= fun field_name -> field extend (Bytes.to_string field_name))
<|> (skip >>| fun v -> `Skip v))
let line buffer boundary =
{ f = fun i s _fail succ ->
let store buff off len =
let len' = locate buff off len ((<>) '\r') in
Buffer.add_bytes buffer (Internal_buffer.sub_string buff off len');
len'
in
let _ = Input.transmit i store in
succ i s () } *> peek_chr >>= function
| None -> return (`End false)
| Some '\r' ->
(boundary *> return (`End true))
<|> (Rfc822.crlf *> { f = fun i s _fail succ ->
Buffer.add_char buffer '\n';
succ i s `Continue })
<|> (advance 1 *> { f = fun i s _fail succ ->
Buffer.add_char buffer '\r';
succ i s `Continue })
| Some _chr -> return `Continue
let decode boundary rollback buffer =
(fix @@ fun m -> line buffer boundary >>= function
| `End r -> return (r, Buffer.to_bytes buffer)
| _ -> m)
>>= function
| true, content -> rollback *> return content
| false, content -> return content
let decode boundary rollback =
decode boundary rollback (Buffer.create 16)
let decode boundary rollback =
{ f = fun i s fail succ - >
let buffer = Buffer.create 16 in
let store buff off len =
let len ' = locate buff off len ( ( < > ) ' \r ' ) in
Buffer.add_string buffer ( ) ;
len '
in
( fix @@ fun m - >
{ f = fun i s fail succ - >
let n = Input.transmit i store in
succ i s n } * > peek_chr > > = function
| Some ' \r ' - >
( boundary * > return ( true , Buffer.contents buffer ) )
< | > ( Rfc822.crlf > > = fun ( ) - > Buffer.add_char buffer ' \n ' ;
m )
< | > ( advance 1 > > = fun ( ) - > Buffer.add_char buffer ' \r ' ;
m )
| Some chr - > m
| None - > return ( false , Buffer.contents buffer)).f i s fail succ }
> > = function
| true , content - > rollback * > return content
| false , content - > return content
let decode boundary rollback =
{ f = fun i s fail succ ->
let buffer = Buffer.create 16 in
let store buff off len =
let len' = locate buff off len ((<>) '\r') in
Buffer.add_string buffer (Internal_buffer.sub_string buff off len');
len'
in
(fix @@ fun m ->
{ f = fun i s fail succ ->
let n = Input.transmit i store in
succ i s n } *> peek_chr >>= function
| Some '\r' ->
(boundary *> return (true, Buffer.contents buffer))
<|> (Rfc822.crlf >>= fun () -> Buffer.add_char buffer '\n';
m)
<|> (advance 1 >>= fun () -> Buffer.add_char buffer '\r';
m)
| Some chr -> m
| None -> return (false, Buffer.contents buffer)).f i s fail succ }
>>= function
| true, content -> rollback *> return content
| false, content -> return content
*)
|
e286b15d3fb006110eecf25f15d1928f375c9a17193c50dbf94dc7ed6c9e4d88 | pariyatti/kosa | routes.clj | (ns kosa.routes
(:refer-clojure :exclude [resources])
(:require [kosa.library.handler]
[kosa.library.artefacts.image.handler :as image-handler]
[kosa.library.artefacts.image.spec]
[kosa.api.handler :as api-handler]
[kosa.auth.handler :as auth-handler]
[kosa.mobile.handler]
[kosa.mobile.today.pali-word.handler :as pali-word-handler]
[kosa.mobile.today.pali-word.spec]
[kosa.mobile.today.words-of-buddha.handler]
[kosa.mobile.today.doha.handler]
[kosa.mobile.today.stacked-inspiration.handler :as stacked-inspiration-handler]
[kosa.mobile.today.stacked-inspiration.spec]
[kuti.dispatch :refer [resources]]
[kuti.dispatch.json :as dispatch-json]
[muuntaja.core :as m]
[reitit.ring :as rr]
[reitit.coercion.spec :as c]
[reitit.dev.pretty :as pretty]
[ring.util.response :as resp]
[kosa.middleware.validation :refer [wrap-spec-validation]]
[kosa.middleware]
[buddy.auth :as auth]))
(defn pong [_request]
(resp/response "pong"))
(defn redirect [location]
{:status 307
:headers {"Location" location}
:body (str "Redirect to " location)})
(def default-handler
(rr/routes
(rr/create-resource-handler {:path "/uploads" :root "storage"})
(rr/create-resource-handler {:path "/css" :root "public/css"})
(rr/create-resource-handler {:path "/js" :root "public/js"})
(rr/create-resource-handler {:path "/cljs" :root "public/cljs"})
(rr/create-resource-handler {:path "/images" :root "public/images"})
(rr/create-resource-handler {:path "/" :root "public"})
(rr/routes
(rr/redirect-trailing-slash-handler {:method :strip})
(rr/create-default-handler
{:not-found (constantly {:status 404, :body "404: Not Found."})
:method-not-allowed (constantly {:status 405, :body "405: Method Not Allowed."})
:not-acceptable (constantly {:status 406, :body "406: Not Acceptable."})}))))
(def router
"auth exceptions are hard-coded in `kosa.middleware.auth/always-allow?`"
(rr/router
["/" [["" {:name ::root
:handler (fn [req] (if (auth/authenticated? req)
(redirect "/mobile")
(redirect "/login")))}]
["ping" {:name ::ping
:handler pong}]
["status" {:name ::status
:handler api-handler/status}]
["api/v1/today.json" {:name :kosa.routes.api/today
:handler api-handler/today}]
["api/v1/today/pali-words/{id}.json" {:name :kosa.routes.api/show-pali-word
:get kosa.mobile.today.pali-word.handler/show-json}]
["api/v1/today/words-of-buddha/{id}.json" {:name :kosa.routes.api/show-words-of-buddha
:get kosa.mobile.today.words-of-buddha.handler/show-json}]
["api/v1/today/doha/{id}.json" {:name :kosa.routes.api/show-doha
:get kosa.mobile.today.doha.handler/show-json}]
["api/v1/today/stacked-inspiration/{id}.json" {:name :kosa.routes.api/show-stacked-inspiration
:get kosa.mobile.today.stacked-inspiration.handler/show-json}]
["api/v1/search.json" {:name :kosa.routes.api/search
:handler api-handler/search}]
["login" {:name ::login
:handler auth-handler/login}]
["library" [["" {:name ::library-index
:handler kosa.library.handler/index}]
["/artefacts/" (resources :images)]]]
["mobile" [["" {:name ::mobile-index
:handler kosa.mobile.handler/index}]
["/today/" (resources :pali-words
:stacked-inspirations)]]]]]
;; CRUD resources conflict between /new and /:id
;; consider {:conflicting true} instead
{:conflicts nil
;; WARNING: these diffs are very handy, but very slow:
: reitit.middleware/transform reitit.ring.middleware.dev/print-request-diffs
:data {:muuntaja dispatch-json/muuntaja-instance
:coercion c/coercion
:middleware kosa.middleware/router-bundle}
:exception pretty/exception}))
| null | https://raw.githubusercontent.com/pariyatti/kosa/7d2dee40104f2e86d0fe6c2d2d2171b2e059def7/src/kosa/routes.clj | clojure | CRUD resources conflict between /new and /:id
consider {:conflicting true} instead
WARNING: these diffs are very handy, but very slow: | (ns kosa.routes
(:refer-clojure :exclude [resources])
(:require [kosa.library.handler]
[kosa.library.artefacts.image.handler :as image-handler]
[kosa.library.artefacts.image.spec]
[kosa.api.handler :as api-handler]
[kosa.auth.handler :as auth-handler]
[kosa.mobile.handler]
[kosa.mobile.today.pali-word.handler :as pali-word-handler]
[kosa.mobile.today.pali-word.spec]
[kosa.mobile.today.words-of-buddha.handler]
[kosa.mobile.today.doha.handler]
[kosa.mobile.today.stacked-inspiration.handler :as stacked-inspiration-handler]
[kosa.mobile.today.stacked-inspiration.spec]
[kuti.dispatch :refer [resources]]
[kuti.dispatch.json :as dispatch-json]
[muuntaja.core :as m]
[reitit.ring :as rr]
[reitit.coercion.spec :as c]
[reitit.dev.pretty :as pretty]
[ring.util.response :as resp]
[kosa.middleware.validation :refer [wrap-spec-validation]]
[kosa.middleware]
[buddy.auth :as auth]))
(defn pong [_request]
(resp/response "pong"))
(defn redirect [location]
{:status 307
:headers {"Location" location}
:body (str "Redirect to " location)})
(def default-handler
(rr/routes
(rr/create-resource-handler {:path "/uploads" :root "storage"})
(rr/create-resource-handler {:path "/css" :root "public/css"})
(rr/create-resource-handler {:path "/js" :root "public/js"})
(rr/create-resource-handler {:path "/cljs" :root "public/cljs"})
(rr/create-resource-handler {:path "/images" :root "public/images"})
(rr/create-resource-handler {:path "/" :root "public"})
(rr/routes
(rr/redirect-trailing-slash-handler {:method :strip})
(rr/create-default-handler
{:not-found (constantly {:status 404, :body "404: Not Found."})
:method-not-allowed (constantly {:status 405, :body "405: Method Not Allowed."})
:not-acceptable (constantly {:status 406, :body "406: Not Acceptable."})}))))
(def router
"auth exceptions are hard-coded in `kosa.middleware.auth/always-allow?`"
(rr/router
["/" [["" {:name ::root
:handler (fn [req] (if (auth/authenticated? req)
(redirect "/mobile")
(redirect "/login")))}]
["ping" {:name ::ping
:handler pong}]
["status" {:name ::status
:handler api-handler/status}]
["api/v1/today.json" {:name :kosa.routes.api/today
:handler api-handler/today}]
["api/v1/today/pali-words/{id}.json" {:name :kosa.routes.api/show-pali-word
:get kosa.mobile.today.pali-word.handler/show-json}]
["api/v1/today/words-of-buddha/{id}.json" {:name :kosa.routes.api/show-words-of-buddha
:get kosa.mobile.today.words-of-buddha.handler/show-json}]
["api/v1/today/doha/{id}.json" {:name :kosa.routes.api/show-doha
:get kosa.mobile.today.doha.handler/show-json}]
["api/v1/today/stacked-inspiration/{id}.json" {:name :kosa.routes.api/show-stacked-inspiration
:get kosa.mobile.today.stacked-inspiration.handler/show-json}]
["api/v1/search.json" {:name :kosa.routes.api/search
:handler api-handler/search}]
["login" {:name ::login
:handler auth-handler/login}]
["library" [["" {:name ::library-index
:handler kosa.library.handler/index}]
["/artefacts/" (resources :images)]]]
["mobile" [["" {:name ::mobile-index
:handler kosa.mobile.handler/index}]
["/today/" (resources :pali-words
:stacked-inspirations)]]]]]
{:conflicts nil
: reitit.middleware/transform reitit.ring.middleware.dev/print-request-diffs
:data {:muuntaja dispatch-json/muuntaja-instance
:coercion c/coercion
:middleware kosa.middleware/router-bundle}
:exception pretty/exception}))
|
626e9fe59ba3c222a5139c22f4a00256950000b069168dfedfcff81f9301b5a2 | Guest0x0/trebor | Unification.ml |
open Syntax
open Value
open Eval
let rec make_fun n body =
if n = 0
then body
else make_fun (n - 1) (Core.Fun("", body))
let close_value g level value =
Quote.value_to_core g level value
|> make_fun level
|> eval g 0 []
let env_to_elim level env =
let args =
env
|> List.mapi (fun idx (kind, typ, _) -> kind, stuck_local (level - idx - 1))
|> List.filter_map (function (Bound, arg) -> Some arg | _ -> None)
in
List.fold_right (fun arg elim -> App(elim, arg)) args EmptyElim
type renaming =
{ dom : int
; cod : int
; map : (int * value) list }
let empty_renaming = { dom = 0; cod = 0; map = [] }
let add_boundvar ren =
{ dom = ren.dom + 1
; cod = ren.cod + 1
; map = (ren.dom, stuck_local ren.cod) :: ren.map }
exception UnificationFailure
let rec invert_value g value dst ren =
match force g value with
| Stuck(Local lvl, EmptyElim) when not (List.mem_assoc lvl ren.map) ->
{ ren with map = (lvl, dst) :: ren.map }
| Pair(fst, snd) ->
let ren = invert_value g fst (project dst Fst) ren in
invert_value g snd (project dst Snd) ren
| _ ->
raise UnificationFailure
let rec elim_to_renaming g level = function
| EmptyElim ->
{ empty_renaming with dom = level }
| App(elim', arg) ->
let ren = elim_to_renaming g level elim' in
invert_value g arg (stuck_local ren.cod) { ren with cod = ren.cod + 1 }
| Proj(_, _) ->
raise RuntimeError
let rec value_should_be_pruned g ren value =
match force g value with
| Stuck(Local lvl, EmptyElim) -> not (List.mem_assoc lvl ren.map)
| Pair(fst, snd) -> value_should_be_pruned g ren fst
|| value_should_be_pruned g ren snd
| _ -> raise UnificationFailure
let rec rename_value g m ren value =
match force g value with
| Stuck(Meta m', _) when m' = m ->
raise UnificationFailure
| Stuck(Meta m', elim) ->
let (m', elim') = prune_meta g m' (value_should_be_pruned g ren) elim in
rename_elim g m ren (Core.Meta m') elim'
| Stuck(head, elim) ->
rename_elim g m ren (rename_head g m ren head) elim
| Type ulevel ->
Core.Type ulevel
| TyFun(name, kind, a, b) ->
Core.TyFun( name, kind
, rename_value g m ren a
, rename_value g m (add_boundvar ren) (b @@ stuck_local ren.dom))
| Fun(name, f) ->
Core.Fun(name, rename_value g m (add_boundvar ren) (f @@ stuck_local ren.dom))
| TyPair(name, a, b) ->
Core.TyPair( name
, rename_value g m ren a
, rename_value g m (add_boundvar ren) (b @@ stuck_local ren.dom))
| Pair(fst, snd) ->
Core.Pair(rename_value g m ren fst, rename_value g m ren snd)
| TyEq((lhs, lhs_typ), (rhs, rhs_typ)) ->
Core.TyEq( (rename_value g m ren lhs, rename_value g m ren lhs_typ)
, (rename_value g m ren rhs, rename_value g m ren rhs_typ) )
and rename_head g m ren = function
| Local lvl ->
begin match List.assoc lvl ren.map with
| value -> Quote.value_to_core g ren.cod value
| exception Not_found -> raise UnificationFailure
end
| Coe { ulevel; coerced; lhs; rhs; eq } ->
Core.Coe { ulevel
; coerced = rename_value g m ren coerced
; lhs = rename_value g m ren lhs
; rhs = rename_value g m ren rhs
; eq = lazy(rename_value g m ren @@ Lazy.force eq) }
| head ->
Quote.head_to_core g ren.dom head
and rename_elim g m ren headC = function
| EmptyElim ->
headC
| App(elim', arg) ->
Core.App(rename_elim g m ren headC elim', rename_value g m ren arg)
| Proj(elim', field) ->
Core.Proj(rename_elim g m ren headC elim', field)
and prune_meta g m f elim =
let open struct
type state =
{ result_typ : typ
; env : Core.env
; pruned_elim : elimination
; new_meta_elim : elimination
; pruning_ren : renaming }
end in
let rec loop typ elim =
match elim with
| EmptyElim ->
{ result_typ = typ
; env = []
; pruned_elim = EmptyElim
; new_meta_elim = EmptyElim
; pruning_ren = empty_renaming }
| App(elim', argv) ->
let state = loop typ elim' in
begin match force g state.result_typ with
| TyFun(name, _, a, b) ->
if f argv
then
{ result_typ = b (stuck_local state.pruning_ren.dom)
; env =
(Bound, name, rename_value g (-1) state.pruning_ren a) :: state.env
; pruned_elim = state.pruned_elim
; new_meta_elim = state.new_meta_elim
; pruning_ren = { state.pruning_ren with dom = state.pruning_ren.dom + 1 } }
else
{ result_typ = b (stuck_local state.pruning_ren.dom)
; env = state.env
; pruned_elim = App(state.pruned_elim, argv)
; new_meta_elim = App( state.new_meta_elim
, Stuck(Local state.pruning_ren.dom, EmptyElim) )
; pruning_ren = add_boundvar state.pruning_ren }
| _ ->
raise RuntimeError
end
| _ ->
raise RuntimeError
in
let (Free typ | Solved(typ, _)) = g#find_meta m in
let state = loop typ elim in
if state.pruning_ren.dom = state.pruning_ren.cod
then (m, elim)
else
let new_meta_typ =
List.fold_left
(fun ret_typ (_, name, typ) -> Core.TyFun(name, Explicit, typ, ret_typ))
(rename_value g (-1) state.pruning_ren state.result_typ) state.env
|> Eval.eval g 0 []
in
let new_meta = g#fresh_meta new_meta_typ in
let solution =
close_value g state.pruning_ren.dom
@@ Stuck(Meta new_meta, state.new_meta_elim)
in
g#solve_meta m solution;
(new_meta, state.pruned_elim)
let rec discard_defined_vars g env =
match env with
| [] ->
([], empty_renaming)
| (kind, name, typ) :: env' ->
let env', ren = discard_defined_vars g env' in
match kind with
| Bound -> ( (kind, name, rename_value g (-1) ren typ) :: env', add_boundvar ren )
| Defined -> (env', { ren with dom = ren.dom + 1 })
let env_to_tyfun g (env : Value.env) ret_typ =
let env', ren = discard_defined_vars g env in
Eval.eval g 0 []
@@ List.fold_left
(fun ret_typ (_, name, arg_typ) -> Core.TyFun(name, Explicit, arg_typ, ret_typ))
(rename_value g (-1) ren ret_typ) env'
let decompose_pair g meta elim =
let rec loop elim =
match elim with
| EmptyElim ->
begin match g#find_meta meta with
| Free typ -> typ, 0, [], meta, elim
| _ -> raise RuntimeError
end
| App(elim', arg) ->
let typ, level, env, meta', elim' = loop elim' in
begin match Eval.force g typ with
| TyFun(name, _, a, b) ->
( b (stuck_local level)
, level + 1
, (Bound, name, a) :: env
, meta'
, App(elim', arg) )
| _ ->
raise RuntimeError
end
| Proj(elim', field) ->
let typ, level, env, meta', elim' = loop elim' in
begin match typ with
| TyPair(_, fst_typ, snd_typ) ->
let fst_meta = g#fresh_meta (env_to_tyfun g env fst_typ) in
let fstV = Stuck(Meta fst_meta, env_to_elim level env) in
let snd_typ = snd_typ fstV in
let snd_meta = g#fresh_meta (env_to_tyfun g env snd_typ) in
let sndV = Stuck(Meta snd_meta, env_to_elim level env) in
g#solve_meta meta' (close_value g level @@ Pair(fstV, sndV));
begin match field with
| Fst -> (fst_typ, level, env, fst_meta, elim')
| Snd -> (snd_typ, level, env, snd_meta, elim')
end
| _ ->
raise UnificationFailure
end
in
let _, _, _, meta', elim' = loop elim in
meta', elim'
let rec unify_value g level env typ v1 v2 =
match force g typ, force g v1, force g v2 with
| TyFun(name, _, a, b), f1, f2 ->
let var = stuck_local level in
unify_value g (level + 1) ((Bound, name, a) :: env) (b var)
(apply f1 var) (apply f2 var)
| TyPair(_, a, b), p1, p2 ->
let fst1 = project p1 Fst in
let fst2 = project p2 Fst in
unify_value g level env a fst1 fst2;
unify_value g level env (b fst1) (project p1 Snd) (project p2 Snd)
| _, Stuck(Meta m1, elim1), Stuck(Meta m2, elim2) when m1 = m2 ->
let (Free typ | Solved(typ, _)) = g#find_meta m1 in
ignore (unify_elim g level env (Meta m1) typ elim1 elim2)
| _, Stuck(Meta meta, elim), v
| _, v, Stuck(Meta meta, elim) ->
let meta, elim = decompose_pair g meta elim in
let ren = elim_to_renaming g level elim in
let body = rename_value g meta ren v in
g#solve_meta meta (Eval.eval g 0 [] @@ make_fun ren.cod body)
| TyEq _, _, _ ->
()
| Type _, typv1, typv2 ->
unify_typ_aux `Equal g level env typv1 typv2
| Stuck _, Stuck(head1, elim1), Stuck(head2, elim2) ->
let typ = unify_head g level env head1 head2 in
ignore (unify_elim g level env head1 typ elim1 elim2)
| _ ->
raise RuntimeError
and unify_head g level env head1 head2 =
match head1, head2 with
| TopVar(shift1, name1), TopVar(shift2, name2) when shift1 = shift2 && name1 = name2 ->
let (AxiomDecl typ | Definition(typ, _)) = g#find_global name1 in
typ shift1
| Local lvl1, Local lvl2 when lvl1 = lvl2 ->
let (_, _, typ) = List.nth env (level - lvl1 - 1) in
typ
| Coe coe1, Coe coe2 when coe1.ulevel = coe2.ulevel ->
unify_value g level env (Type coe1.ulevel) coe1.lhs coe2.lhs;
unify_value g level env (Type coe1.ulevel) coe1.rhs coe2.rhs;
unify_value g level env coe1.lhs coe1.coerced coe2.coerced;
coe1.rhs
| _ ->
raise UnificationFailure
and unify_elim g level env head head_typ elim1 elim2 =
match elim1, elim2 with
| EmptyElim, EmptyElim ->
head_typ
| App(elim1', arg1), App(elim2', arg2) ->
begin match force g @@ unify_elim g level env head head_typ elim1' elim2' with
| TyFun(_, _, a, b) -> unify_value g level env a arg1 arg2; b arg1
| _ -> raise RuntimeError
end
| Proj(elim1', field1), Proj(elim2', field2) when field1 = field2 ->
begin match force g @@ unify_elim g level env head head_typ elim1' elim2', field1 with
| TyPair(_, a, _), Fst -> a
| TyPair(_, _, b), Snd -> b (Stuck(head, Proj(elim1', Fst)))
| _ -> raise RuntimeError
end
| _ ->
raise UnificationFailure
and unify_typ_aux (mode : [`Subtyp | `Equal]) g level env sub sup =
match force g sub, force g sup with
| Type ulevel1, Type ulevel2 ->
begin match mode with
| `Subtyp when ulevel1 <= ulevel2 -> ()
| `Equal when ulevel1 = ulevel2 -> ()
| _ -> raise UnificationFailure
end
| TyFun(name, kind1, a1, b1), TyFun(_, kind2, a2, b2) when kind1 = kind2 ->
unify_typ_aux mode g level env a2 a1;
let var = stuck_local level in
unify_typ_aux mode g (level + 1) ((Bound, name, a2) :: env) (b1 var) (b2 var)
| TyPair(name, a1, b1), TyPair(_, a2, b2) ->
unify_typ_aux mode g level env a1 a2;
let var = stuck_local level in
unify_typ_aux mode g (level + 1) ((Bound, name, a1) :: env) (b1 var) (b2 var)
| TyEq((lhs1, lhs_typ1), (rhs1, rhs_typ1))
, TyEq((lhs2, lhs_typ2), (rhs2, rhs_typ2)) ->
unify_typ_aux mode g level env lhs_typ1 lhs_typ2;
unify_typ_aux mode g level env rhs_typ1 rhs_typ2;
unify_value g level env lhs_typ1 lhs1 lhs2;
unify_value g level env rhs_typ1 rhs1 rhs2
| Stuck(Meta m1, elim1), Stuck(Meta m2, elim2) when m1 = m2 ->
let (Free typ | Solved(typ, _)) = g#find_meta m1 in
ignore (unify_elim g level env (Meta m1) typ elim1 elim2)
| Stuck(Meta meta, elim), v
| v, Stuck(Meta meta, elim) ->
let meta, elim = decompose_pair g meta elim in
let ren = elim_to_renaming g level elim in
let body = rename_value g meta ren v in
g#solve_meta meta (Eval.eval g 0 [] @@ make_fun ren.cod body)
| Stuck(head1, elim1), Stuck(head2, elim2) ->
let typ = unify_head g level env head1 head2 in
ignore (unify_elim g level env head1 typ elim1 elim2)
| _ ->
raise UnificationFailure
let unify_typ g = unify_typ_aux `Equal g
let subtyp g = unify_typ_aux `Subtyp g
let refine_to_function g level env typ =
match Eval.force g typ with
| TyFun(_, Explicit, a, b) ->
(a, b)
| Stuck(Meta _ , _ ) - >
let arg_meta = ( env_to_tyfun g env @@ Type ulevel ) in
let elim = env_to_elim level env in
let a = Stuck(Type ulevel , Meta ( " " , arg_meta ) , elim ) in
let env ' = ( " " , a , ` Bound ) : : env in
let ret_meta = g#fresh_meta ( env_to_tyfun g env ' @@ Type ulevel ) in
let b v = Stuck(Type ulevel , Meta ( " " , ret_meta ) , App(elim , a , v ) ) in
level ( TyFun ( " " , Explicit , a , b ) ) ;
( a , b )
| Stuck(Meta _, _) ->
let arg_meta = g#fresh_meta (env_to_tyfun g env @@ Type ulevel) in
let elim = env_to_elim level env in
let a = Stuck(Type ulevel, Meta("", arg_meta), elim) in
let env' = ("", a, `Bound) :: env in
let ret_meta = g#fresh_meta (env_to_tyfun g env' @@ Type ulevel) in
let b v = Stuck(Type ulevel, Meta("", ret_meta), App(elim, a, v)) in
subtyp g level typ (TyFun("", Explicit, a, b));
(a, b)
*)
| _ ->
raise UnificationFailure
let refine_to_pair g level env typ =
match Eval.force g typ with
| TyPair(_, a, b) ->
(a, b)
| Stuck(Type ulevel , , _ ) - >
let fst_meta = g#fresh_meta ( env_to_tyfun g env @@ Type ulevel ) in
let elim = env_to_elim level env in
let fst_typ = Stuck(Type ulevel , Meta ( " " , fst_meta ) , elim ) in
let env ' = ( " " , fst_typ , ` Bound ) : : env in
let snd_meta = g#fresh_meta ( env_to_tyfun g env ' @@ Type ulevel ) in
let snd_typ v = Stuck(Type ulevel , Meta ( " " , snd_meta ) , App(elim , fst_typ , v ) ) in
level ( TyPair ( " " , fst_typ , snd_typ ) ) ;
( fst_typ , snd_typ )
| Stuck(Type ulevel, Meta _, _) ->
let fst_meta = g#fresh_meta (env_to_tyfun g env @@ Type ulevel) in
let elim = env_to_elim level env in
let fst_typ = Stuck(Type ulevel, Meta("", fst_meta), elim) in
let env' = ("", fst_typ, `Bound) :: env in
let snd_meta = g#fresh_meta (env_to_tyfun g env' @@ Type ulevel) in
let snd_typ v = Stuck(Type ulevel, Meta("", snd_meta), App(elim, fst_typ, v)) in
subtyp g level typ (TyPair("", fst_typ, snd_typ));
(fst_typ, snd_typ)
*)
| _ ->
raise UnificationFailure
| null | https://raw.githubusercontent.com/Guest0x0/trebor/c6b6c099e3e848979bd8f501d28c4c2f35f1235e/Kernel/Unification.ml | ocaml |
open Syntax
open Value
open Eval
let rec make_fun n body =
if n = 0
then body
else make_fun (n - 1) (Core.Fun("", body))
let close_value g level value =
Quote.value_to_core g level value
|> make_fun level
|> eval g 0 []
let env_to_elim level env =
let args =
env
|> List.mapi (fun idx (kind, typ, _) -> kind, stuck_local (level - idx - 1))
|> List.filter_map (function (Bound, arg) -> Some arg | _ -> None)
in
List.fold_right (fun arg elim -> App(elim, arg)) args EmptyElim
type renaming =
{ dom : int
; cod : int
; map : (int * value) list }
let empty_renaming = { dom = 0; cod = 0; map = [] }
let add_boundvar ren =
{ dom = ren.dom + 1
; cod = ren.cod + 1
; map = (ren.dom, stuck_local ren.cod) :: ren.map }
exception UnificationFailure
let rec invert_value g value dst ren =
match force g value with
| Stuck(Local lvl, EmptyElim) when not (List.mem_assoc lvl ren.map) ->
{ ren with map = (lvl, dst) :: ren.map }
| Pair(fst, snd) ->
let ren = invert_value g fst (project dst Fst) ren in
invert_value g snd (project dst Snd) ren
| _ ->
raise UnificationFailure
let rec elim_to_renaming g level = function
| EmptyElim ->
{ empty_renaming with dom = level }
| App(elim', arg) ->
let ren = elim_to_renaming g level elim' in
invert_value g arg (stuck_local ren.cod) { ren with cod = ren.cod + 1 }
| Proj(_, _) ->
raise RuntimeError
let rec value_should_be_pruned g ren value =
match force g value with
| Stuck(Local lvl, EmptyElim) -> not (List.mem_assoc lvl ren.map)
| Pair(fst, snd) -> value_should_be_pruned g ren fst
|| value_should_be_pruned g ren snd
| _ -> raise UnificationFailure
let rec rename_value g m ren value =
match force g value with
| Stuck(Meta m', _) when m' = m ->
raise UnificationFailure
| Stuck(Meta m', elim) ->
let (m', elim') = prune_meta g m' (value_should_be_pruned g ren) elim in
rename_elim g m ren (Core.Meta m') elim'
| Stuck(head, elim) ->
rename_elim g m ren (rename_head g m ren head) elim
| Type ulevel ->
Core.Type ulevel
| TyFun(name, kind, a, b) ->
Core.TyFun( name, kind
, rename_value g m ren a
, rename_value g m (add_boundvar ren) (b @@ stuck_local ren.dom))
| Fun(name, f) ->
Core.Fun(name, rename_value g m (add_boundvar ren) (f @@ stuck_local ren.dom))
| TyPair(name, a, b) ->
Core.TyPair( name
, rename_value g m ren a
, rename_value g m (add_boundvar ren) (b @@ stuck_local ren.dom))
| Pair(fst, snd) ->
Core.Pair(rename_value g m ren fst, rename_value g m ren snd)
| TyEq((lhs, lhs_typ), (rhs, rhs_typ)) ->
Core.TyEq( (rename_value g m ren lhs, rename_value g m ren lhs_typ)
, (rename_value g m ren rhs, rename_value g m ren rhs_typ) )
and rename_head g m ren = function
| Local lvl ->
begin match List.assoc lvl ren.map with
| value -> Quote.value_to_core g ren.cod value
| exception Not_found -> raise UnificationFailure
end
| Coe { ulevel; coerced; lhs; rhs; eq } ->
Core.Coe { ulevel
; coerced = rename_value g m ren coerced
; lhs = rename_value g m ren lhs
; rhs = rename_value g m ren rhs
; eq = lazy(rename_value g m ren @@ Lazy.force eq) }
| head ->
Quote.head_to_core g ren.dom head
and rename_elim g m ren headC = function
| EmptyElim ->
headC
| App(elim', arg) ->
Core.App(rename_elim g m ren headC elim', rename_value g m ren arg)
| Proj(elim', field) ->
Core.Proj(rename_elim g m ren headC elim', field)
and prune_meta g m f elim =
let open struct
type state =
{ result_typ : typ
; env : Core.env
; pruned_elim : elimination
; new_meta_elim : elimination
; pruning_ren : renaming }
end in
let rec loop typ elim =
match elim with
| EmptyElim ->
{ result_typ = typ
; env = []
; pruned_elim = EmptyElim
; new_meta_elim = EmptyElim
; pruning_ren = empty_renaming }
| App(elim', argv) ->
let state = loop typ elim' in
begin match force g state.result_typ with
| TyFun(name, _, a, b) ->
if f argv
then
{ result_typ = b (stuck_local state.pruning_ren.dom)
; env =
(Bound, name, rename_value g (-1) state.pruning_ren a) :: state.env
; pruned_elim = state.pruned_elim
; new_meta_elim = state.new_meta_elim
; pruning_ren = { state.pruning_ren with dom = state.pruning_ren.dom + 1 } }
else
{ result_typ = b (stuck_local state.pruning_ren.dom)
; env = state.env
; pruned_elim = App(state.pruned_elim, argv)
; new_meta_elim = App( state.new_meta_elim
, Stuck(Local state.pruning_ren.dom, EmptyElim) )
; pruning_ren = add_boundvar state.pruning_ren }
| _ ->
raise RuntimeError
end
| _ ->
raise RuntimeError
in
let (Free typ | Solved(typ, _)) = g#find_meta m in
let state = loop typ elim in
if state.pruning_ren.dom = state.pruning_ren.cod
then (m, elim)
else
let new_meta_typ =
List.fold_left
(fun ret_typ (_, name, typ) -> Core.TyFun(name, Explicit, typ, ret_typ))
(rename_value g (-1) state.pruning_ren state.result_typ) state.env
|> Eval.eval g 0 []
in
let new_meta = g#fresh_meta new_meta_typ in
let solution =
close_value g state.pruning_ren.dom
@@ Stuck(Meta new_meta, state.new_meta_elim)
in
g#solve_meta m solution;
(new_meta, state.pruned_elim)
let rec discard_defined_vars g env =
match env with
| [] ->
([], empty_renaming)
| (kind, name, typ) :: env' ->
let env', ren = discard_defined_vars g env' in
match kind with
| Bound -> ( (kind, name, rename_value g (-1) ren typ) :: env', add_boundvar ren )
| Defined -> (env', { ren with dom = ren.dom + 1 })
let env_to_tyfun g (env : Value.env) ret_typ =
let env', ren = discard_defined_vars g env in
Eval.eval g 0 []
@@ List.fold_left
(fun ret_typ (_, name, arg_typ) -> Core.TyFun(name, Explicit, arg_typ, ret_typ))
(rename_value g (-1) ren ret_typ) env'
let decompose_pair g meta elim =
let rec loop elim =
match elim with
| EmptyElim ->
begin match g#find_meta meta with
| Free typ -> typ, 0, [], meta, elim
| _ -> raise RuntimeError
end
| App(elim', arg) ->
let typ, level, env, meta', elim' = loop elim' in
begin match Eval.force g typ with
| TyFun(name, _, a, b) ->
( b (stuck_local level)
, level + 1
, (Bound, name, a) :: env
, meta'
, App(elim', arg) )
| _ ->
raise RuntimeError
end
| Proj(elim', field) ->
let typ, level, env, meta', elim' = loop elim' in
begin match typ with
| TyPair(_, fst_typ, snd_typ) ->
let fst_meta = g#fresh_meta (env_to_tyfun g env fst_typ) in
let fstV = Stuck(Meta fst_meta, env_to_elim level env) in
let snd_typ = snd_typ fstV in
let snd_meta = g#fresh_meta (env_to_tyfun g env snd_typ) in
let sndV = Stuck(Meta snd_meta, env_to_elim level env) in
g#solve_meta meta' (close_value g level @@ Pair(fstV, sndV));
begin match field with
| Fst -> (fst_typ, level, env, fst_meta, elim')
| Snd -> (snd_typ, level, env, snd_meta, elim')
end
| _ ->
raise UnificationFailure
end
in
let _, _, _, meta', elim' = loop elim in
meta', elim'
let rec unify_value g level env typ v1 v2 =
match force g typ, force g v1, force g v2 with
| TyFun(name, _, a, b), f1, f2 ->
let var = stuck_local level in
unify_value g (level + 1) ((Bound, name, a) :: env) (b var)
(apply f1 var) (apply f2 var)
| TyPair(_, a, b), p1, p2 ->
let fst1 = project p1 Fst in
let fst2 = project p2 Fst in
unify_value g level env a fst1 fst2;
unify_value g level env (b fst1) (project p1 Snd) (project p2 Snd)
| _, Stuck(Meta m1, elim1), Stuck(Meta m2, elim2) when m1 = m2 ->
let (Free typ | Solved(typ, _)) = g#find_meta m1 in
ignore (unify_elim g level env (Meta m1) typ elim1 elim2)
| _, Stuck(Meta meta, elim), v
| _, v, Stuck(Meta meta, elim) ->
let meta, elim = decompose_pair g meta elim in
let ren = elim_to_renaming g level elim in
let body = rename_value g meta ren v in
g#solve_meta meta (Eval.eval g 0 [] @@ make_fun ren.cod body)
| TyEq _, _, _ ->
()
| Type _, typv1, typv2 ->
unify_typ_aux `Equal g level env typv1 typv2
| Stuck _, Stuck(head1, elim1), Stuck(head2, elim2) ->
let typ = unify_head g level env head1 head2 in
ignore (unify_elim g level env head1 typ elim1 elim2)
| _ ->
raise RuntimeError
and unify_head g level env head1 head2 =
match head1, head2 with
| TopVar(shift1, name1), TopVar(shift2, name2) when shift1 = shift2 && name1 = name2 ->
let (AxiomDecl typ | Definition(typ, _)) = g#find_global name1 in
typ shift1
| Local lvl1, Local lvl2 when lvl1 = lvl2 ->
let (_, _, typ) = List.nth env (level - lvl1 - 1) in
typ
| Coe coe1, Coe coe2 when coe1.ulevel = coe2.ulevel ->
unify_value g level env (Type coe1.ulevel) coe1.lhs coe2.lhs;
unify_value g level env (Type coe1.ulevel) coe1.rhs coe2.rhs;
unify_value g level env coe1.lhs coe1.coerced coe2.coerced;
coe1.rhs
| _ ->
raise UnificationFailure
and unify_elim g level env head head_typ elim1 elim2 =
match elim1, elim2 with
| EmptyElim, EmptyElim ->
head_typ
| App(elim1', arg1), App(elim2', arg2) ->
begin match force g @@ unify_elim g level env head head_typ elim1' elim2' with
| TyFun(_, _, a, b) -> unify_value g level env a arg1 arg2; b arg1
| _ -> raise RuntimeError
end
| Proj(elim1', field1), Proj(elim2', field2) when field1 = field2 ->
begin match force g @@ unify_elim g level env head head_typ elim1' elim2', field1 with
| TyPair(_, a, _), Fst -> a
| TyPair(_, _, b), Snd -> b (Stuck(head, Proj(elim1', Fst)))
| _ -> raise RuntimeError
end
| _ ->
raise UnificationFailure
and unify_typ_aux (mode : [`Subtyp | `Equal]) g level env sub sup =
match force g sub, force g sup with
| Type ulevel1, Type ulevel2 ->
begin match mode with
| `Subtyp when ulevel1 <= ulevel2 -> ()
| `Equal when ulevel1 = ulevel2 -> ()
| _ -> raise UnificationFailure
end
| TyFun(name, kind1, a1, b1), TyFun(_, kind2, a2, b2) when kind1 = kind2 ->
unify_typ_aux mode g level env a2 a1;
let var = stuck_local level in
unify_typ_aux mode g (level + 1) ((Bound, name, a2) :: env) (b1 var) (b2 var)
| TyPair(name, a1, b1), TyPair(_, a2, b2) ->
unify_typ_aux mode g level env a1 a2;
let var = stuck_local level in
unify_typ_aux mode g (level + 1) ((Bound, name, a1) :: env) (b1 var) (b2 var)
| TyEq((lhs1, lhs_typ1), (rhs1, rhs_typ1))
, TyEq((lhs2, lhs_typ2), (rhs2, rhs_typ2)) ->
unify_typ_aux mode g level env lhs_typ1 lhs_typ2;
unify_typ_aux mode g level env rhs_typ1 rhs_typ2;
unify_value g level env lhs_typ1 lhs1 lhs2;
unify_value g level env rhs_typ1 rhs1 rhs2
| Stuck(Meta m1, elim1), Stuck(Meta m2, elim2) when m1 = m2 ->
let (Free typ | Solved(typ, _)) = g#find_meta m1 in
ignore (unify_elim g level env (Meta m1) typ elim1 elim2)
| Stuck(Meta meta, elim), v
| v, Stuck(Meta meta, elim) ->
let meta, elim = decompose_pair g meta elim in
let ren = elim_to_renaming g level elim in
let body = rename_value g meta ren v in
g#solve_meta meta (Eval.eval g 0 [] @@ make_fun ren.cod body)
| Stuck(head1, elim1), Stuck(head2, elim2) ->
let typ = unify_head g level env head1 head2 in
ignore (unify_elim g level env head1 typ elim1 elim2)
| _ ->
raise UnificationFailure
let unify_typ g = unify_typ_aux `Equal g
let subtyp g = unify_typ_aux `Subtyp g
let refine_to_function g level env typ =
match Eval.force g typ with
| TyFun(_, Explicit, a, b) ->
(a, b)
| Stuck(Meta _ , _ ) - >
let arg_meta = ( env_to_tyfun g env @@ Type ulevel ) in
let elim = env_to_elim level env in
let a = Stuck(Type ulevel , Meta ( " " , arg_meta ) , elim ) in
let env ' = ( " " , a , ` Bound ) : : env in
let ret_meta = g#fresh_meta ( env_to_tyfun g env ' @@ Type ulevel ) in
let b v = Stuck(Type ulevel , Meta ( " " , ret_meta ) , App(elim , a , v ) ) in
level ( TyFun ( " " , Explicit , a , b ) ) ;
( a , b )
| Stuck(Meta _, _) ->
let arg_meta = g#fresh_meta (env_to_tyfun g env @@ Type ulevel) in
let elim = env_to_elim level env in
let a = Stuck(Type ulevel, Meta("", arg_meta), elim) in
let env' = ("", a, `Bound) :: env in
let ret_meta = g#fresh_meta (env_to_tyfun g env' @@ Type ulevel) in
let b v = Stuck(Type ulevel, Meta("", ret_meta), App(elim, a, v)) in
subtyp g level typ (TyFun("", Explicit, a, b));
(a, b)
*)
| _ ->
raise UnificationFailure
let refine_to_pair g level env typ =
match Eval.force g typ with
| TyPair(_, a, b) ->
(a, b)
| Stuck(Type ulevel , , _ ) - >
let fst_meta = g#fresh_meta ( env_to_tyfun g env @@ Type ulevel ) in
let elim = env_to_elim level env in
let fst_typ = Stuck(Type ulevel , Meta ( " " , fst_meta ) , elim ) in
let env ' = ( " " , fst_typ , ` Bound ) : : env in
let snd_meta = g#fresh_meta ( env_to_tyfun g env ' @@ Type ulevel ) in
let snd_typ v = Stuck(Type ulevel , Meta ( " " , snd_meta ) , App(elim , fst_typ , v ) ) in
level ( TyPair ( " " , fst_typ , snd_typ ) ) ;
( fst_typ , snd_typ )
| Stuck(Type ulevel, Meta _, _) ->
let fst_meta = g#fresh_meta (env_to_tyfun g env @@ Type ulevel) in
let elim = env_to_elim level env in
let fst_typ = Stuck(Type ulevel, Meta("", fst_meta), elim) in
let env' = ("", fst_typ, `Bound) :: env in
let snd_meta = g#fresh_meta (env_to_tyfun g env' @@ Type ulevel) in
let snd_typ v = Stuck(Type ulevel, Meta("", snd_meta), App(elim, fst_typ, v)) in
subtyp g level typ (TyPair("", fst_typ, snd_typ));
(fst_typ, snd_typ)
*)
| _ ->
raise UnificationFailure
| |
58586a07dc2752dc5b5d2958e309dc484d792273bc26b99776e8ff00efc28dec | melange-re/melange | undef_regression_test.ml |
external size_of_t : Obj.t -> 'a Js.undefined = "length" [@@bs.get]
let f obj =
if Js.typeof obj = "function" then
()
else
let size = size_of_t obj in
match Js.Undefined.toOption size with
| None -> ()
| Some s -> Js.log s (* TODO: This case should be peepwholed ..*)
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/test/undef_regression_test.ml | ocaml | TODO: This case should be peepwholed .. |
external size_of_t : Obj.t -> 'a Js.undefined = "length" [@@bs.get]
let f obj =
if Js.typeof obj = "function" then
()
else
let size = size_of_t obj in
match Js.Undefined.toOption size with
| None -> ()
|
db45423d06c2bc8dbb17344c4c26f1e0d5037fec3be6412b459dd846d5a35321 | 8c6794b6/guile-tjit | weak-vector.scm | ;;; installed-scm-file
Copyright ( C ) 2003 , 2006 , 2011 , 2014 Free Software Foundation , Inc.
;;;;
;;;; This library is free software; you can redistribute it and/or
;;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;;;
;;;; This library is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; Lesser General Public License for more details.
;;;;
You should have received a copy of the GNU Lesser General Public
;;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;;;
(define-module (ice-9 weak-vector)
#:export (make-weak-vector
list->weak-vector
weak-vector
weak-vector?
weak-vector-ref
weak-vector-set!))
(eval-when (load eval compile)
(load-extension (string-append "libguile-" (effective-version))
"scm_init_weak_vector_builtins"))
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/ice-9/weak-vector.scm | scheme | installed-scm-file
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
|
Copyright ( C ) 2003 , 2006 , 2011 , 2014 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
(define-module (ice-9 weak-vector)
#:export (make-weak-vector
list->weak-vector
weak-vector
weak-vector?
weak-vector-ref
weak-vector-set!))
(eval-when (load eval compile)
(load-extension (string-append "libguile-" (effective-version))
"scm_init_weak_vector_builtins"))
|
a0e4f643a84ef33f33a7f19afdbe8f082f56fa28613c77fe75cb8e25cbc978f0 | potatosalad/erlang-jose | jose_curve25519_libsodium.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2022 ,
%%% @doc
%%%
%%% @end
Created : 02 Jan 2016 by < >
%%%-------------------------------------------------------------------
-module(jose_curve25519_libsodium).
-behaviour(jose_curve25519).
%% jose_curve25519 callbacks
-export([eddsa_keypair/0]).
-export([eddsa_keypair/1]).
-export([eddsa_secret_to_public/1]).
-export([ed25519_sign/2]).
-export([ed25519_verify/3]).
-export([ed25519ctx_sign/3]).
-export([ed25519ctx_verify/4]).
-export([ed25519ph_sign/2]).
-export([ed25519ph_sign/3]).
-export([ed25519ph_verify/3]).
-export([ed25519ph_verify/4]).
-export([x25519_keypair/0]).
-export([x25519_keypair/1]).
-export([x25519_secret_to_public/1]).
-export([x25519_shared_secret/2]).
Macros
-define(FALLBACK_MOD, jose_curve25519_fallback).
%%====================================================================
%% jose_curve25519 callbacks
%%====================================================================
EdDSA
eddsa_keypair() ->
libsodium_crypto_sign_ed25519:keypair().
eddsa_keypair(Seed) ->
libsodium_crypto_sign_ed25519:seed_keypair(Seed).
eddsa_secret_to_public(SecretKey) ->
{PK, _} = libsodium_crypto_sign_ed25519:seed_keypair(SecretKey),
PK.
% Ed25519
ed25519_sign(Message, SecretKey) ->
libsodium_crypto_sign_ed25519:detached(Message, SecretKey).
ed25519_verify(Signature, Message, PublicKey) ->
try libsodium_crypto_sign_ed25519:verify_detached(Signature, Message, PublicKey) of
0 ->
true;
_ ->
false
catch
_:_:_ ->
false
end.
% Ed25519ctx
ed25519ctx_sign(Message, SecretKey, Context) ->
?FALLBACK_MOD:ed25519ctx_sign(Message, SecretKey, Context).
ed25519ctx_verify(Signature, Message, PublicKey, Context) ->
?FALLBACK_MOD:ed25519ctx_verify(Signature, Message, PublicKey, Context).
% Ed25519ph
ed25519ph_sign(Message, SecretKey) ->
State0 = libsodium_crypto_sign_ed25519ph:init(),
State1 = libsodium_crypto_sign_ed25519ph:update(State0, Message),
libsodium_crypto_sign_ed25519ph:final_create(State1, SecretKey).
ed25519ph_sign(Message, SecretKey, Context) ->
?FALLBACK_MOD:ed25519ph_sign(Message, SecretKey, Context).
ed25519ph_verify(Signature, Message, PublicKey) ->
State0 = libsodium_crypto_sign_ed25519ph:init(),
State1 = libsodium_crypto_sign_ed25519ph:update(State0, Message),
try libsodium_crypto_sign_ed25519ph:final_verify(State1, Signature, PublicKey) of
0 ->
true;
_ ->
false
catch
_:_:_ ->
false
end.
ed25519ph_verify(Signature, Message, PublicKey, Context) ->
?FALLBACK_MOD:ed25519ph_verify(Signature, Message, PublicKey, Context).
% X25519
x25519_keypair() ->
libsodium_crypto_box_curve25519xchacha20poly1305:keypair().
x25519_keypair(SK = << _:32/binary >>) ->
PK = x25519_secret_to_public(SK),
{PK, SK}.
x25519_secret_to_public(SecretKey) ->
libsodium_crypto_scalarmult_curve25519:base(SecretKey).
x25519_shared_secret(MySecretKey, YourPublicKey) ->
libsodium_crypto_scalarmult_curve25519:crypto_scalarmult_curve25519(MySecretKey, YourPublicKey).
| null | https://raw.githubusercontent.com/potatosalad/erlang-jose/dbc4074066080692246afe613345ef6becc2a3fe/src/jwa/curve25519/jose_curve25519_libsodium.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
jose_curve25519 callbacks
====================================================================
jose_curve25519 callbacks
====================================================================
Ed25519
Ed25519ctx
Ed25519ph
X25519 | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2022 ,
Created : 02 Jan 2016 by < >
-module(jose_curve25519_libsodium).
-behaviour(jose_curve25519).
-export([eddsa_keypair/0]).
-export([eddsa_keypair/1]).
-export([eddsa_secret_to_public/1]).
-export([ed25519_sign/2]).
-export([ed25519_verify/3]).
-export([ed25519ctx_sign/3]).
-export([ed25519ctx_verify/4]).
-export([ed25519ph_sign/2]).
-export([ed25519ph_sign/3]).
-export([ed25519ph_verify/3]).
-export([ed25519ph_verify/4]).
-export([x25519_keypair/0]).
-export([x25519_keypair/1]).
-export([x25519_secret_to_public/1]).
-export([x25519_shared_secret/2]).
Macros
-define(FALLBACK_MOD, jose_curve25519_fallback).
EdDSA
eddsa_keypair() ->
libsodium_crypto_sign_ed25519:keypair().
eddsa_keypair(Seed) ->
libsodium_crypto_sign_ed25519:seed_keypair(Seed).
eddsa_secret_to_public(SecretKey) ->
{PK, _} = libsodium_crypto_sign_ed25519:seed_keypair(SecretKey),
PK.
ed25519_sign(Message, SecretKey) ->
libsodium_crypto_sign_ed25519:detached(Message, SecretKey).
ed25519_verify(Signature, Message, PublicKey) ->
try libsodium_crypto_sign_ed25519:verify_detached(Signature, Message, PublicKey) of
0 ->
true;
_ ->
false
catch
_:_:_ ->
false
end.
ed25519ctx_sign(Message, SecretKey, Context) ->
?FALLBACK_MOD:ed25519ctx_sign(Message, SecretKey, Context).
ed25519ctx_verify(Signature, Message, PublicKey, Context) ->
?FALLBACK_MOD:ed25519ctx_verify(Signature, Message, PublicKey, Context).
ed25519ph_sign(Message, SecretKey) ->
State0 = libsodium_crypto_sign_ed25519ph:init(),
State1 = libsodium_crypto_sign_ed25519ph:update(State0, Message),
libsodium_crypto_sign_ed25519ph:final_create(State1, SecretKey).
ed25519ph_sign(Message, SecretKey, Context) ->
?FALLBACK_MOD:ed25519ph_sign(Message, SecretKey, Context).
ed25519ph_verify(Signature, Message, PublicKey) ->
State0 = libsodium_crypto_sign_ed25519ph:init(),
State1 = libsodium_crypto_sign_ed25519ph:update(State0, Message),
try libsodium_crypto_sign_ed25519ph:final_verify(State1, Signature, PublicKey) of
0 ->
true;
_ ->
false
catch
_:_:_ ->
false
end.
ed25519ph_verify(Signature, Message, PublicKey, Context) ->
?FALLBACK_MOD:ed25519ph_verify(Signature, Message, PublicKey, Context).
x25519_keypair() ->
libsodium_crypto_box_curve25519xchacha20poly1305:keypair().
x25519_keypair(SK = << _:32/binary >>) ->
PK = x25519_secret_to_public(SK),
{PK, SK}.
x25519_secret_to_public(SecretKey) ->
libsodium_crypto_scalarmult_curve25519:base(SecretKey).
x25519_shared_secret(MySecretKey, YourPublicKey) ->
libsodium_crypto_scalarmult_curve25519:crypto_scalarmult_curve25519(MySecretKey, YourPublicKey).
|
089dee0fb1e4a02d1dfa6b30c561fd41c4f007a705f4dbac239e531377a1b137 | aligusnet/mltool | NeuralNetworkTest.hs | module MachineLearning.NeuralNetworkTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.HUnit
import Test.HUnit.Approx
import Test.HUnit.Plus
import MachineLearning.DataSets (dataset2)
import qualified Control.Monad.Random as RndM
import qualified Numeric.LinearAlgebra as LA
import qualified MachineLearning as ML
import qualified MachineLearning.Optimization as Opt
import MachineLearning.Model
import MachineLearning.NeuralNetwork
import qualified MachineLearning.NeuralNetwork.TopologyMaker as TM
(x, y) = ML.splitToXY dataset2
gradientCheckingEps = 0.1
checkGradientTest eps activation loss lambda = do
let nnt = TM.makeTopology activation loss (LA.cols x) 2 [10]
model = NeuralNetwork nnt
thetas = initializeTheta 1511197 nnt
diffs = take 5 $ map (\e -> Opt.checkGradient model lambda x y thetas e) [0.005, 0.0051 ..]
diff = minimum $ filter (not . isNaN) diffs
assertApproxEqual (show thetas) eps 0 diff
xPredict = LA.matrix 2 [ -0.5, 0.5
, 0.2, -0.2
, 1, 1
, 1, 0
, 0, 0]
yExpected = LA.vector [1, 1, 0, 0, 1]
learnTest activation loss minMethod nIters =
let lambda = L2 $ 0.5 / (fromIntegral $ LA.rows x)
x1 = ML.mapFeatures 2 x
nnt = TM.makeTopology activation loss (LA.cols x1) 2 [10]
model = NeuralNetwork nnt
xPredict1 = ML.mapFeatures 2 xPredict
initTheta = initializeTheta 5191711 nnt
(theta, optPath) = Opt.minimize minMethod model 1e-7 nIters lambda x1 y initTheta
yPredicted = hypothesis model xPredict1 theta
js = (LA.toColumns optPath) !! 1
in do
assertVector (show js) 0.01 yExpected yPredicted
tests = [ testGroup "gradient checking" [
testCase "Sigmoid - Logistic: non-zero lambda" $ checkGradientTest 0.1 TM.ASigmoid TM.LLogistic (L2 0.01)
, testCase "Sigmoid - Logistic: zero lambda" $ checkGradientTest 0.1 TM.ASigmoid TM.LLogistic (L2 0)
, testCase "ReLU - Softmax: non-zero lambda" $ checkGradientTest 0.1 TM.ARelu TM.LSoftmax (L2 0.01)
, testCase "ReLU - Softmax: zero lambda" $ checkGradientTest 0.1 TM.ARelu TM.LSoftmax (L2 0)
, testCase "Tanh - MultiSvm: non-zero lambda" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm (L2 0.01)
, testCase "Tanh - MultiSvm: zero lambda" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm (L2 0)
, testCase "Tanh - MultiSvm: no reg" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm RegNone
]
, testGroup "learn" [
testCase "Sigmoid - Logistic: BFGS" $ learnTest TM.ASigmoid TM.LLogistic (Opt.BFGS2 0.01 0.7) 50
, testCase "ReLU - Softmax: BFGS" $ learnTest TM.ARelu TM.LSoftmax (Opt.BFGS2 0.1 0.1) 50
, testCase "Tanh - MultiSvm: BFGS" $ learnTest TM.ATanh TM.LMultiSvm (Opt.BFGS2 0.1 0.1) 50
]
]
| null | https://raw.githubusercontent.com/aligusnet/mltool/92d74c4cc79221bfdcfb76aa058a2e8992ecfe2b/test/MachineLearning/NeuralNetworkTest.hs | haskell | module MachineLearning.NeuralNetworkTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.HUnit
import Test.HUnit.Approx
import Test.HUnit.Plus
import MachineLearning.DataSets (dataset2)
import qualified Control.Monad.Random as RndM
import qualified Numeric.LinearAlgebra as LA
import qualified MachineLearning as ML
import qualified MachineLearning.Optimization as Opt
import MachineLearning.Model
import MachineLearning.NeuralNetwork
import qualified MachineLearning.NeuralNetwork.TopologyMaker as TM
(x, y) = ML.splitToXY dataset2
gradientCheckingEps = 0.1
checkGradientTest eps activation loss lambda = do
let nnt = TM.makeTopology activation loss (LA.cols x) 2 [10]
model = NeuralNetwork nnt
thetas = initializeTheta 1511197 nnt
diffs = take 5 $ map (\e -> Opt.checkGradient model lambda x y thetas e) [0.005, 0.0051 ..]
diff = minimum $ filter (not . isNaN) diffs
assertApproxEqual (show thetas) eps 0 diff
xPredict = LA.matrix 2 [ -0.5, 0.5
, 0.2, -0.2
, 1, 1
, 1, 0
, 0, 0]
yExpected = LA.vector [1, 1, 0, 0, 1]
learnTest activation loss minMethod nIters =
let lambda = L2 $ 0.5 / (fromIntegral $ LA.rows x)
x1 = ML.mapFeatures 2 x
nnt = TM.makeTopology activation loss (LA.cols x1) 2 [10]
model = NeuralNetwork nnt
xPredict1 = ML.mapFeatures 2 xPredict
initTheta = initializeTheta 5191711 nnt
(theta, optPath) = Opt.minimize minMethod model 1e-7 nIters lambda x1 y initTheta
yPredicted = hypothesis model xPredict1 theta
js = (LA.toColumns optPath) !! 1
in do
assertVector (show js) 0.01 yExpected yPredicted
tests = [ testGroup "gradient checking" [
testCase "Sigmoid - Logistic: non-zero lambda" $ checkGradientTest 0.1 TM.ASigmoid TM.LLogistic (L2 0.01)
, testCase "Sigmoid - Logistic: zero lambda" $ checkGradientTest 0.1 TM.ASigmoid TM.LLogistic (L2 0)
, testCase "ReLU - Softmax: non-zero lambda" $ checkGradientTest 0.1 TM.ARelu TM.LSoftmax (L2 0.01)
, testCase "ReLU - Softmax: zero lambda" $ checkGradientTest 0.1 TM.ARelu TM.LSoftmax (L2 0)
, testCase "Tanh - MultiSvm: non-zero lambda" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm (L2 0.01)
, testCase "Tanh - MultiSvm: zero lambda" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm (L2 0)
, testCase "Tanh - MultiSvm: no reg" $ checkGradientTest 0.1 TM.ATanh TM.LMultiSvm RegNone
]
, testGroup "learn" [
testCase "Sigmoid - Logistic: BFGS" $ learnTest TM.ASigmoid TM.LLogistic (Opt.BFGS2 0.01 0.7) 50
, testCase "ReLU - Softmax: BFGS" $ learnTest TM.ARelu TM.LSoftmax (Opt.BFGS2 0.1 0.1) 50
, testCase "Tanh - MultiSvm: BFGS" $ learnTest TM.ATanh TM.LMultiSvm (Opt.BFGS2 0.1 0.1) 50
]
]
| |
261d852f67016c2c7ccb471a1d962b324a0febe333db263994cfb0cacb3188f8 | keechma/keechma-toolbox | forms_test.cljs | (ns keechma.toolbox.forms-test
(:require [cljs.test :refer-macros [deftest testing is async]]
[keechma.toolbox.forms.core :as forms-core]
[keechma.toolbox.forms.controller :as forms-controller]
[keechma.toolbox.forms.mount-controller :as forms-mount-controller]
[keechma.app-state :as app-state]
[keechma.controller :as controller]
[keechma.toolbox.pipeline.core :as pp :refer-macros [pipeline!]]
[cljs.core.async :refer (timeout <!)]
[keechma.toolbox.forms.app :refer [install]]
[keechma.ui-component :as ui]
[keechma.toolbox.test-util :refer [make-container]])
(:require-macros [cljs.core.async.macros :refer [go]]))
(defrecord Form [])
(defmethod forms-core/get-data Form [this app-db form-props]
{:inited true})
(defmethod forms-core/on-mount Form [this app-db form-props]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :on-mount] form-props))))
(defmethod forms-core/on-unmount Form [this app-db form-props]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :on-unmount] form-props))))
(defmethod forms-core/call Form [this app-db form-props args]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :call form-props] args))))
(defn add-mount-target-el []
(let [div (.createElement js/document "div")]
(.appendChild (.-body js/document) div)
div))
(deftest form-flow []
(async done
(let [target-el (add-mount-target-el)
app {:controllers (forms-controller/register {:form (->Form)})
:components {:main {:renderer (fn [ctx])}}
:html-element target-el}
app-state (app-state/start! app)
app-db (:app-db app-state)
form-controller (get-in @app-db [:internal :running-controllers forms-core/id-key])]
(go
(controller/execute form-controller :mount-form [:form :id])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms
{:order [[:form :id]]
:states {[:form :id] {:submit-attempted? false
:dirty-paths #{}
:cached-dirty-paths #{}
:data {:inited true}
:initial-data {:inited true}
:errors {}
:state {:type :mounted}}}}
:on-mount [:form :id]}
(:kv @app-db)))
(controller/execute form-controller :call [[:form :id] {:foo :bar}])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms
{:order [[:form :id]]
:states {[:form :id] {:submit-attempted? false
:dirty-paths #{}
:cached-dirty-paths #{}
:data {:inited true}
:initial-data {:inited true}
:errors {}
:state {:type :mounted}}}}
:call {[:form :id] {:foo :bar}}
:on-mount [:form :id]}
(:kv @app-db)))
(controller/execute form-controller :unmount-form [:form :id])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms {:order []
:states {}}
:call {[:form :id] {:foo :bar}}
:on-mount [:form :id]
:on-unmount [:form :id]}
(:kv @app-db)))
(done)))))
(deftest forms-for-route
(is (= [[:foo :bar] [:foo :baz] [:qux :foo-1]]
(forms-mount-controller/forms-for-route {} {} {:foo (fn [_ _] [:bar :baz])
:qux (fn [_ _] :foo-1)}))))
(defrecord Form1 [])
(def forms-install-forms {:form1 (->Form1)})
(def forms-install-forms-mount {:form1 (fn [_ _] :form)})
(deftest forms-install
(let [[c unmount] (make-container)
app (-> {:html-element c
:components {:main (ui/constructor {:renderer (fn [ctx] [:div])})}}
(install forms-install-forms forms-install-forms-mount))]
(is (= (keys (:controllers app))
[:keechma.toolbox.forms.core/forms :keechma.toolbox.forms.mount-controller/id]))
(is (= (keys (:subscriptions app))
[:keechma.toolbox.forms.core/forms]))
(doseq [[_ c] (:components app)]
(is (= [:keechma.toolbox.forms.core/forms]
(:subscription-deps c))))))
(deftest forms-install-without-mount-controller
(let [[c unmount] (make-container)
app (-> {:html-element c
:components {:main (ui/constructor {:renderer (fn [ctx] [:div])})}}
(install forms-install-forms))]
(is (= (keys (:controllers app))
[:keechma.toolbox.forms.core/forms]))
(is (= (keys (:subscriptions app))
[:keechma.toolbox.forms.core/forms]))
(doseq [[_ c] (:components app)]
(is (= [:keechma.toolbox.forms.core/forms]
(:subscription-deps c))))))
| null | https://raw.githubusercontent.com/keechma/keechma-toolbox/61bf68cbffc1540b56b7b1925fef5e0aa12d60e7/test/cljs/keechma/toolbox/forms_test.cljs | clojure | (ns keechma.toolbox.forms-test
(:require [cljs.test :refer-macros [deftest testing is async]]
[keechma.toolbox.forms.core :as forms-core]
[keechma.toolbox.forms.controller :as forms-controller]
[keechma.toolbox.forms.mount-controller :as forms-mount-controller]
[keechma.app-state :as app-state]
[keechma.controller :as controller]
[keechma.toolbox.pipeline.core :as pp :refer-macros [pipeline!]]
[cljs.core.async :refer (timeout <!)]
[keechma.toolbox.forms.app :refer [install]]
[keechma.ui-component :as ui]
[keechma.toolbox.test-util :refer [make-container]])
(:require-macros [cljs.core.async.macros :refer [go]]))
(defrecord Form [])
(defmethod forms-core/get-data Form [this app-db form-props]
{:inited true})
(defmethod forms-core/on-mount Form [this app-db form-props]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :on-mount] form-props))))
(defmethod forms-core/on-unmount Form [this app-db form-props]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :on-unmount] form-props))))
(defmethod forms-core/call Form [this app-db form-props args]
(pipeline! [value app-db]
(pp/commit! (assoc-in app-db [:kv :call form-props] args))))
(defn add-mount-target-el []
(let [div (.createElement js/document "div")]
(.appendChild (.-body js/document) div)
div))
(deftest form-flow []
(async done
(let [target-el (add-mount-target-el)
app {:controllers (forms-controller/register {:form (->Form)})
:components {:main {:renderer (fn [ctx])}}
:html-element target-el}
app-state (app-state/start! app)
app-db (:app-db app-state)
form-controller (get-in @app-db [:internal :running-controllers forms-core/id-key])]
(go
(controller/execute form-controller :mount-form [:form :id])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms
{:order [[:form :id]]
:states {[:form :id] {:submit-attempted? false
:dirty-paths #{}
:cached-dirty-paths #{}
:data {:inited true}
:initial-data {:inited true}
:errors {}
:state {:type :mounted}}}}
:on-mount [:form :id]}
(:kv @app-db)))
(controller/execute form-controller :call [[:form :id] {:foo :bar}])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms
{:order [[:form :id]]
:states {[:form :id] {:submit-attempted? false
:dirty-paths #{}
:cached-dirty-paths #{}
:data {:inited true}
:initial-data {:inited true}
:errors {}
:state {:type :mounted}}}}
:call {[:form :id] {:foo :bar}}
:on-mount [:form :id]}
(:kv @app-db)))
(controller/execute form-controller :unmount-form [:form :id])
(<! (timeout 1))
(is (= {:keechma.toolbox.forms.core/forms {:order []
:states {}}
:call {[:form :id] {:foo :bar}}
:on-mount [:form :id]
:on-unmount [:form :id]}
(:kv @app-db)))
(done)))))
(deftest forms-for-route
(is (= [[:foo :bar] [:foo :baz] [:qux :foo-1]]
(forms-mount-controller/forms-for-route {} {} {:foo (fn [_ _] [:bar :baz])
:qux (fn [_ _] :foo-1)}))))
(defrecord Form1 [])
(def forms-install-forms {:form1 (->Form1)})
(def forms-install-forms-mount {:form1 (fn [_ _] :form)})
(deftest forms-install
(let [[c unmount] (make-container)
app (-> {:html-element c
:components {:main (ui/constructor {:renderer (fn [ctx] [:div])})}}
(install forms-install-forms forms-install-forms-mount))]
(is (= (keys (:controllers app))
[:keechma.toolbox.forms.core/forms :keechma.toolbox.forms.mount-controller/id]))
(is (= (keys (:subscriptions app))
[:keechma.toolbox.forms.core/forms]))
(doseq [[_ c] (:components app)]
(is (= [:keechma.toolbox.forms.core/forms]
(:subscription-deps c))))))
(deftest forms-install-without-mount-controller
(let [[c unmount] (make-container)
app (-> {:html-element c
:components {:main (ui/constructor {:renderer (fn [ctx] [:div])})}}
(install forms-install-forms))]
(is (= (keys (:controllers app))
[:keechma.toolbox.forms.core/forms]))
(is (= (keys (:subscriptions app))
[:keechma.toolbox.forms.core/forms]))
(doseq [[_ c] (:components app)]
(is (= [:keechma.toolbox.forms.core/forms]
(:subscription-deps c))))))
| |
2c114da90d8d547e4f10108bc4c4d001975141ec1f179494d7f775351cff2e85 | ctford/goldberg | instrument.clj | (ns goldberg.instrument
(:use
[overtone.live]))
(definst harpsichord [freq 440]
(let [duration 1]
(*
(line:kr 1 1 duration FREE)
(pluck (* (white-noise) (env-gen (perc 0.001 5) :action FREE))
1
1
(/ 1 freq) (* duration 2) 0.25))))
| null | https://raw.githubusercontent.com/ctford/goldberg/9e312d0890393c9ee100f6e7486b33fe0102e5c1/src/goldberg/instrument.clj | clojure | (ns goldberg.instrument
(:use
[overtone.live]))
(definst harpsichord [freq 440]
(let [duration 1]
(*
(line:kr 1 1 duration FREE)
(pluck (* (white-noise) (env-gen (perc 0.001 5) :action FREE))
1
1
(/ 1 freq) (* duration 2) 0.25))))
| |
923ec2aef4ce6966aa6e7212aeab0525a52ab6d7429694671ab442c4bc01ffa2 | thelema/bench | single_write.ml | open Unix
let filename = "sw_temp.tmp"
let gen_data buf bs = for i = 0 to bs-1 do buf.[i] <- Char.chr (i land 0xff); done in
let bench_buf bs = (* n is buffer size *)
let buf = String.create bs in
let fd = openfile filename [O_WRONLY; O_CREAT; O_TRUNC] 0o700 in
for i = 0 to 100 do
gen_data buf bs;
ignore (write fd buf 0 bs);
done;
close fd
let () =
Bench.summarize (Bench.bench_throughput bench_buf [512; 1024; 2048; 4096; 8192; 16384; 32767; 65535])
| null | https://raw.githubusercontent.com/thelema/bench/a0e8231464195399a7e5b6699358d012ad9ba87e/examples/single_write.ml | ocaml | n is buffer size | open Unix
let filename = "sw_temp.tmp"
let gen_data buf bs = for i = 0 to bs-1 do buf.[i] <- Char.chr (i land 0xff); done in
let buf = String.create bs in
let fd = openfile filename [O_WRONLY; O_CREAT; O_TRUNC] 0o700 in
for i = 0 to 100 do
gen_data buf bs;
ignore (write fd buf 0 bs);
done;
close fd
let () =
Bench.summarize (Bench.bench_throughput bench_buf [512; 1024; 2048; 4096; 8192; 16384; 32767; 65535])
|
a092c98453f4b5aa34df622a38ded9b76606b7e8b1b41107e2cb9fb2dec9436a | msszczep/pequod-cljs | dep1ex66.clj | version -lfs.github.com/spec/v1
oid sha256:c18ba60c7efd8622270faf6f6d0b63b0e68bbce7312dbe5695a743187e5b7411
size 160903552
| null | https://media.githubusercontent.com/media/msszczep/pequod-cljs/986ad97fa39d5b83828c07daf80655460b27d2dd/src/clj/pequod_cljs/dep1ex66.clj | clojure | version -lfs.github.com/spec/v1
oid sha256:c18ba60c7efd8622270faf6f6d0b63b0e68bbce7312dbe5695a743187e5b7411
size 160903552
| |
91e66883968f3422185bff83e79a58a57bbbb303dc7746a55a16cb75bca0c1b2 | s-expressionists/Eclector | client.lisp | (cl:in-package #:eclector.parse-result.test)
(def-suite* :eclector.parse-result.client
:in :eclector.parse-result)
;;; Test annotating labeled object references
(defclass parse-result+annotation-client
(eclector.reader.test::label-reference-annotation-mixin
simple-result-client)
())
(test labeled-objects/annotation
"Test annotating labeled object references in parse results."
(let* ((client (make-instance 'parse-result+annotation-client))
(result (eclector.parse-result:read-from-string client "#1=(1 #1#)"))
(object (raw result)))
(is (typep result 'cons-result))
(is (eq 1 (raw (first-child result))))
(let ((reference (raw (first-child (rest-child result)))))
(is (eq :circular-reference (first reference)))
(is (eq object (second reference))))))
Combine custom labeled object representation with annotating
;;; labeled object references.
(defclass parse-result+custom-labeled-objects+annotation-client
(eclector.reader.test::label-reference-annotation-mixin
simple-result-client
eclector.reader.test::custom-labeled-objects-client)
())
(test labeled-objects/custom+annotation
"Test annotating references and custom labeled object in parse results."
(let* ((client (make-instance 'parse-result+custom-labeled-objects+annotation-client))
(result (eclector.parse-result:read-from-string client "#1=(1 #1#)"))
(object (raw result)))
(is (typep result 'cons-result))
(is (eq 1 (raw (first-child result))))
(let ((reference (raw (first-child (rest-child result)))))
(is (eq :circular-reference (first reference)))
(is (eq object (second reference))))))
| null | https://raw.githubusercontent.com/s-expressionists/Eclector/acd141db4efdbd88d57a8fe4f258ffc18cc47baa/test/parse-result/client.lisp | lisp | Test annotating labeled object references
labeled object references. | (cl:in-package #:eclector.parse-result.test)
(def-suite* :eclector.parse-result.client
:in :eclector.parse-result)
(defclass parse-result+annotation-client
(eclector.reader.test::label-reference-annotation-mixin
simple-result-client)
())
(test labeled-objects/annotation
"Test annotating labeled object references in parse results."
(let* ((client (make-instance 'parse-result+annotation-client))
(result (eclector.parse-result:read-from-string client "#1=(1 #1#)"))
(object (raw result)))
(is (typep result 'cons-result))
(is (eq 1 (raw (first-child result))))
(let ((reference (raw (first-child (rest-child result)))))
(is (eq :circular-reference (first reference)))
(is (eq object (second reference))))))
Combine custom labeled object representation with annotating
(defclass parse-result+custom-labeled-objects+annotation-client
(eclector.reader.test::label-reference-annotation-mixin
simple-result-client
eclector.reader.test::custom-labeled-objects-client)
())
(test labeled-objects/custom+annotation
"Test annotating references and custom labeled object in parse results."
(let* ((client (make-instance 'parse-result+custom-labeled-objects+annotation-client))
(result (eclector.parse-result:read-from-string client "#1=(1 #1#)"))
(object (raw result)))
(is (typep result 'cons-result))
(is (eq 1 (raw (first-child result))))
(let ((reference (raw (first-child (rest-child result)))))
(is (eq :circular-reference (first reference)))
(is (eq object (second reference))))))
|
a49bdb9a4712b039d4d70178da63e4bf1eeb3a5cca5fb2c0cb16b6c24e596747 | archhaskell/cblrepo | ConvertDB.hs |
- Copyright 2011 - 2014 Per
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright 2011-2014 Per Magnus Therning
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module ConvertDB where
import Util.Misc
import qualified OldPkgDB as ODB
import qualified PkgDB as NDB
import Control.Monad.Reader
import System.Directory
convertDb :: Command ()
convertDb = do
inDbFn <- asks $ inDbFile . optsCmd . fst
outDbFn <- asks $ outDbFile . optsCmd . fst
dbExist <- liftIO $ doesFileExist inDbFn
when dbExist $ do
newDb <- fmap doConvertDB (liftIO $ ODB.readDb inDbFn)
liftIO $ NDB.saveDb newDb outDbFn
doConvertDB :: ODB.CblDB -> NDB.CblDB
doConvertDB = map doConvert
where
doConvert o
| ODB.isGhcPkg o = NDB.createGhcPkg n v
| ODB.isDistroPkg o = NDB.createDistroPkg n v x r
| ODB.isRepoPkg o = NDB.createRepoPkg n v x d f r
| otherwise = error ""
where
n = ODB.pkgName o
v = ODB.pkgVersion o
x = ODB.pkgXRev o
d = ODB.pkgDeps o
f = ODB.pkgFlags o
r = ODB.pkgRelease o
| null | https://raw.githubusercontent.com/archhaskell/cblrepo/83316afca397b1e5e526a69d360efd9cb260921b/src/ConvertDB.hs | haskell |
- Copyright 2011 - 2014 Per
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright 2011-2014 Per Magnus Therning
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module ConvertDB where
import Util.Misc
import qualified OldPkgDB as ODB
import qualified PkgDB as NDB
import Control.Monad.Reader
import System.Directory
convertDb :: Command ()
convertDb = do
inDbFn <- asks $ inDbFile . optsCmd . fst
outDbFn <- asks $ outDbFile . optsCmd . fst
dbExist <- liftIO $ doesFileExist inDbFn
when dbExist $ do
newDb <- fmap doConvertDB (liftIO $ ODB.readDb inDbFn)
liftIO $ NDB.saveDb newDb outDbFn
doConvertDB :: ODB.CblDB -> NDB.CblDB
doConvertDB = map doConvert
where
doConvert o
| ODB.isGhcPkg o = NDB.createGhcPkg n v
| ODB.isDistroPkg o = NDB.createDistroPkg n v x r
| ODB.isRepoPkg o = NDB.createRepoPkg n v x d f r
| otherwise = error ""
where
n = ODB.pkgName o
v = ODB.pkgVersion o
x = ODB.pkgXRev o
d = ODB.pkgDeps o
f = ODB.pkgFlags o
r = ODB.pkgRelease o
| |
e0b6ceeca284bcf591fa9e441f369c0fc50b3935eef07e6bae5d718927765072 | ghc/ghc | Utils.hs |
# LANGUAGE DeriveFunctor #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
( c ) The University of Glasgow 2006
( c ) The GRASP / AQUA Project , Glasgow University , 1992 - 1999
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1999
-}
-- | Analysis functions over data types. Specifically, detecting recursive types.
--
-- This stuff is only used for source-code decls; it's recorded in interface
-- files for imported data types.
module GHC.Tc.TyCl.Utils(
RolesInfo,
inferRoles,
checkSynCycles,
checkClassCycles,
-- * Implicits
addTyConsToGblEnv, mkDefaultMethodType,
-- * Record selectors
tcRecSelBinds, mkRecSelBinds, mkOneRecordSelector
) where
import GHC.Prelude
import GHC.Tc.Errors.Types
import GHC.Tc.Utils.Monad
import GHC.Tc.Utils.Env
import GHC.Tc.Gen.Bind( tcValBinds )
import GHC.Tc.Utils.TcType
import GHC.Builtin.Types( unitTy )
import GHC.Builtin.Uniques ( mkBuiltinUnique )
import GHC.Hs
import GHC.Core.TyCo.Rep( Type(..), Coercion(..), MCoercion(..), UnivCoProvenance(..) )
import GHC.Core.Multiplicity
import GHC.Core.Predicate
import GHC.Core.Make( rEC_SEL_ERROR_ID )
import GHC.Core.Class
import GHC.Core.Type
import GHC.Core.TyCon
import GHC.Core.ConLike
import GHC.Core.DataCon
import GHC.Core.TyCon.Set
import GHC.Core.Coercion ( ltRole )
import GHC.Utils.Outputable
import GHC.Utils.Panic
import GHC.Utils.Panic.Plain
import GHC.Utils.Misc
import GHC.Utils.FV as FV
import GHC.Data.Maybe
import GHC.Data.Bag
import GHC.Data.FastString
import GHC.Unit.Module
import GHC.Types.Basic
import GHC.Types.Error
import GHC.Types.FieldLabel
import GHC.Types.SrcLoc
import GHC.Types.SourceFile
import GHC.Types.SourceText
import GHC.Types.Name
import GHC.Types.Name.Env
import GHC.Types.Name.Reader ( mkVarUnqual )
import GHC.Types.Id
import GHC.Types.Id.Info
import GHC.Types.Var.Env
import GHC.Types.Var.Set
import GHC.Types.Unique.Set
import GHC.Types.TyThing
import qualified GHC.LanguageExtensions as LangExt
import Language.Haskell.Syntax.Basic (FieldLabelString(..))
import Control.Monad
{-
************************************************************************
* *
Cycles in type synonym declarations
* *
************************************************************************
-}
synonymTyConsOfType :: Type -> [TyCon]
-- Does not look through type synonyms at all.
-- Returns a list of synonym tycons in nondeterministic order.
-- Keep this synchronized with 'expandTypeSynonyms'
synonymTyConsOfType ty
= nonDetNameEnvElts (go ty)
where
The NameEnv does duplicate elim
go (TyConApp tc tys) = go_tc tc `plusNameEnv` go_s tys
go (LitTy _) = emptyNameEnv
go (TyVarTy _) = emptyNameEnv
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy _ w a b) = go w `plusNameEnv` go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go (CastTy ty co) = go ty `plusNameEnv` go_co co
go (CoercionTy co) = go_co co
-- Note [TyCon cycles through coercions?!]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Although, in principle, it's possible for a type synonym loop
-- could go through a coercion (since a coercion can refer to
-- a TyCon or Type), it doesn't seem possible to actually construct
a Haskell program which tickles this case . Here is an example
-- program which causes a coercion:
--
-- type family Star where
-- Star = Type
--
-- data T :: Star -> Type
-- data S :: forall (a :: Type). T a -> Type
--
Here , the application ' T a ' must first coerce a : : Type to a : : Star ,
-- witnessed by the type family. But if we now try to make Type refer
to a type synonym which in turn refers to Star , we 'll run into
-- trouble: we're trying to define and use the type constructor
-- in the same recursive group. Possibly this restriction will be
-- lifted in the future but for now, this code is "just for completeness
-- sake".
go_mco MRefl = emptyNameEnv
go_mco (MCo co) = go_co co
go_co (Refl ty) = go ty
go_co (GRefl _ ty mco) = go ty `plusNameEnv` go_mco mco
go_co (TyConAppCo _ tc cs) = go_tc tc `plusNameEnv` go_co_s cs
go_co (AppCo co co') = go_co co `plusNameEnv` go_co co'
go_co (ForAllCo _ co co') = go_co co `plusNameEnv` go_co co'
go_co (FunCo { fco_mult = m, fco_arg = a, fco_res = r })
= go_co m `plusNameEnv` go_co a `plusNameEnv` go_co r
go_co (CoVarCo _) = emptyNameEnv
go_co (HoleCo {}) = emptyNameEnv
go_co (AxiomInstCo _ _ cs) = go_co_s cs
go_co (UnivCo p _ ty ty') = go_prov p `plusNameEnv` go ty `plusNameEnv` go ty'
go_co (SymCo co) = go_co co
go_co (TransCo co co') = go_co co `plusNameEnv` go_co co'
go_co (SelCo _ co) = go_co co
go_co (LRCo _ co) = go_co co
go_co (InstCo co co') = go_co co `plusNameEnv` go_co co'
go_co (KindCo co) = go_co co
go_co (SubCo co) = go_co co
go_co (AxiomRuleCo _ cs) = go_co_s cs
go_prov (PhantomProv co) = go_co co
go_prov (ProofIrrelProv co) = go_co co
go_prov (PluginProv _) = emptyNameEnv
go_prov (CorePrepProv _) = emptyNameEnv
go_tc tc | isTypeSynonymTyCon tc = unitNameEnv (tyConName tc) tc
| otherwise = emptyNameEnv
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
go_co_s cos = foldr (plusNameEnv . go_co) emptyNameEnv cos
-- | A monad for type synonym cycle checking, which keeps
track of the TyCons which are known to be acyclic , or
-- a failure message reporting that a cycle was found.
newtype SynCycleM a = SynCycleM {
runSynCycleM :: SynCycleState -> Either (SrcSpan, SDoc) (a, SynCycleState) }
deriving (Functor)
TODO : TyConSet is implemented as IntMap over uniques .
But we could get away with something based on
-- since we only check membership, but never extract the
-- elements.
type SynCycleState = TyConSet
instance Applicative SynCycleM where
pure x = SynCycleM $ \state -> Right (x, state)
(<*>) = ap
instance Monad SynCycleM where
m >>= f = SynCycleM $ \state ->
case runSynCycleM m state of
Right (x, state') ->
runSynCycleM (f x) state'
Left err -> Left err
failSynCycleM :: SrcSpan -> SDoc -> SynCycleM ()
failSynCycleM loc err = SynCycleM $ \_ -> Left (loc, err)
-- | Test if a 'Name' is acyclic, short-circuiting if we've
-- seen it already.
checkTyConIsAcyclic :: TyCon -> SynCycleM () -> SynCycleM ()
checkTyConIsAcyclic tc m = SynCycleM $ \s ->
if tc `elemTyConSet` s
then Right ((), s) -- short circuit
else case runSynCycleM m s of
Right ((), s') -> Right ((), extendTyConSet s' tc)
Left err -> Left err
| Checks if any of the passed in ' 's have cycles .
-- Takes the 'Unit' of the home package (as we can avoid
checking those TyCons : cycles never go through foreign packages ) and
the corresponding for each ' ' , so we
-- can give better error messages.
checkSynCycles :: Unit -> [TyCon] -> [LTyClDecl GhcRn] -> TcM ()
checkSynCycles this_uid tcs tyclds =
case runSynCycleM (mapM_ (go emptyTyConSet []) tcs) emptyTyConSet of
Left (loc, err) -> setSrcSpan loc $ failWithTc (mkTcRnUnknownMessage $ mkPlainError noHints err)
Right _ -> return ()
where
-- Try our best to print the LTyClDecl for locally defined things
lcl_decls = mkNameEnv (zip (map tyConName tcs) tyclds)
-- Short circuit if we've already seen this Name and concluded
-- it was acyclic.
go :: TyConSet -> [TyCon] -> TyCon -> SynCycleM ()
go so_far seen_tcs tc =
checkTyConIsAcyclic tc $ go' so_far seen_tcs tc
-- Expand type synonyms, complaining if you find the same
type synonym a second time .
go' :: TyConSet -> [TyCon] -> TyCon -> SynCycleM ()
go' so_far seen_tcs tc
| tc `elemTyConSet` so_far
= failSynCycleM (getSrcSpan (head seen_tcs)) $
sep [ text "Cycle in type synonym declarations:"
, nest 2 (vcat (map ppr_decl seen_tcs)) ]
-- Optimization: we don't allow cycles through external packages,
-- so once we find a non-local name we are guaranteed to not
-- have a cycle.
--
This wo n't hold once we get recursive packages with Backpack ,
-- but for now it's fine.
| not (isHoleModule mod ||
moduleUnit mod == this_uid ||
isInteractiveModule mod)
= return ()
| Just ty <- synTyConRhs_maybe tc =
go_ty (extendTyConSet so_far tc) (tc:seen_tcs) ty
| otherwise = return ()
where
n = tyConName tc
mod = nameModule n
ppr_decl tc =
case lookupNameEnv lcl_decls n of
Just (L loc decl) -> ppr (locA loc) <> colon <+> ppr decl
Nothing -> ppr (getSrcSpan n) <> colon <+> ppr n
<+> text "from external module"
where
n = tyConName tc
go_ty :: TyConSet -> [TyCon] -> Type -> SynCycleM ()
go_ty so_far seen_tcs ty =
mapM_ (go so_far seen_tcs) (synonymTyConsOfType ty)
Note [ Superclass cycle check ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C 's superclass cycles transitively is
guaranteed to terminate . This is a Haskell98 requirement ,
but one that we lift with -XUndecidableSuperClasses .
The worry is that a superclass cycle could make the type checker loop .
More precisely , with a constraint ( Given or Wanted )
C .. tyn
one approach is to instantiate all of C 's superclasses , transitively .
We can only do so if that set is finite .
This potential loop occurs only through superclasses . This , for
example , is fine
class C a where
op : : C b = > a - > b - > b
even though C 's full definition uses C.
Making the check static also makes it conservative . Eg
type family F a
class F a = > C a
Here an instance of ( F a ) might mention C :
type instance F [ a ] = C a
and now we 'd have a loop .
The static check works like this , starting with C
* Look at C 's superclass predicates
* If any is a type - function application ,
or is headed by a type variable , fail
* If any has C at the head , fail
* If any has a type class D at the head ,
make the same test with D
A tricky point is : what if there is a type variable at the head ?
Consider this :
class f ( C f ) = > C f
class c = > I d c
and now expand superclasses for constraint ( C I d ):
C I d
-- > I d ( C I d )
-- > C I d
-- > ....
Each step expands superclasses one layer , and clearly does not terminate .
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C's superclass cycles transitively is
guaranteed to terminate. This is a Haskell98 requirement,
but one that we lift with -XUndecidableSuperClasses.
The worry is that a superclass cycle could make the type checker loop.
More precisely, with a constraint (Given or Wanted)
C ty1 .. tyn
one approach is to instantiate all of C's superclasses, transitively.
We can only do so if that set is finite.
This potential loop occurs only through superclasses. This, for
example, is fine
class C a where
op :: C b => a -> b -> b
even though C's full definition uses C.
Making the check static also makes it conservative. Eg
type family F a
class F a => C a
Here an instance of (F a) might mention C:
type instance F [a] = C a
and now we'd have a loop.
The static check works like this, starting with C
* Look at C's superclass predicates
* If any is a type-function application,
or is headed by a type variable, fail
* If any has C at the head, fail
* If any has a type class D at the head,
make the same test with D
A tricky point is: what if there is a type variable at the head?
Consider this:
class f (C f) => C f
class c => Id c
and now expand superclasses for constraint (C Id):
C Id
--> Id (C Id)
--> C Id
--> ....
Each step expands superclasses one layer, and clearly does not terminate.
-}
type ClassSet = UniqSet Class
checkClassCycles :: Class -> Maybe SDoc
-- Nothing <=> ok
-- Just err <=> possible cycle error
checkClassCycles cls
= do { (definite_cycle, err) <- go (unitUniqSet cls)
cls (mkTyVarTys (classTyVars cls))
; let herald | definite_cycle = text "Superclass cycle for"
| otherwise = text "Potential superclass cycle for"
; return (vcat [ herald <+> quotes (ppr cls)
, nest 2 err, hint]) }
where
hint = text "Use UndecidableSuperClasses to accept this"
-- Expand superclasses starting with (C a b), complaining
if you find the same class a second time , or a type function
-- or predicate headed by a type variable
--
NB : this code duplicates TcType.transSuperClasses , but
-- with more error message generation clobber
Make sure the two stay in sync .
go :: ClassSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go so_far cls tys = firstJusts $
map (go_pred so_far) $
immSuperClasses cls tys
go_pred :: ClassSet -> PredType -> Maybe (Bool, SDoc)
-- Nothing <=> ok
-- Just (True, err) <=> definite cycle
-- Just (False, err) <=> possible cycle
NB : tcSplitTyConApp looks through synonyms
| Just (tc, tys) <- tcSplitTyConApp_maybe pred
= go_tc so_far pred tc tys
| hasTyVarHead pred
= Just (False, hang (text "one of whose superclass constraints is headed by a type variable:")
2 (quotes (ppr pred)))
| otherwise
= Nothing
go_tc :: ClassSet -> PredType -> TyCon -> [Type] -> Maybe (Bool, SDoc)
go_tc so_far pred tc tys
| isFamilyTyCon tc
= Just (False, hang (text "one of whose superclass constraints is headed by a type family:")
2 (quotes (ppr pred)))
| Just cls <- tyConClass_maybe tc
= go_cls so_far cls tys
| otherwise -- Equality predicate, for example
= Nothing
go_cls :: ClassSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go_cls so_far cls tys
| cls `elementOfUniqSet` so_far
= Just (True, text "one of whose superclasses is" <+> quotes (ppr cls))
| isCTupleClass cls
= go so_far cls tys
| otherwise
= do { (b,err) <- go (so_far `addOneToUniqSet` cls) cls tys
; return (b, text "one of whose superclasses is" <+> quotes (ppr cls)
$$ err) }
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Role inference
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Note [ Role inference ]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters . Although these roles are stored in the tycons , we can perform this
algorithm on the built tycons , as long as we do n't peek at an as - yet - unknown
roles field ! Ah , the magic of laziness .
First , we choose appropriate initial roles . For families and classes , roles
( including initial roles ) are For datatypes , we start with the role in the
role annotation ( if any ) , or otherwise use Phantom . This is done in
initialRoleEnv1 .
The function irGroup then propagates role information until it reaches a
fixpoint , preferring N over ( R or P ) and R over P. To aid in this , we have a
monad RoleM , which is a combination reader and state monad . In its state are
the current RoleEnv , which gets updated by role propagation , and an update
bit , which we use to know whether or not we 've reached the fixpoint . The
environment of RoleM contains the tycon whose parameters we are inferring , and
a VarEnv from parameters to their positions , so we can update the RoleEnv .
Between tycons , this reader information is missing ; it is added by
addRoleInferenceInfo .
There are two kinds of tycons to consider : algebraic ones ( excluding classes )
and type synonyms . ( Remember , families do n't participate -- all their parameters
are ) An algebraic tycon processes each of its datacons , in turn . Note that
a datacon 's universally quantified parameters might be different from the parent
's parameters , so we use the datacon 's univ parameters in the mapping from
vars to positions . Note also that we do n't want to infer roles for existentials
( they 're all at N , too ) , so we put them in the set of local variables . As an
optimisation , we skip any tycons whose roles are already all , as there
nowhere else for them to go . For synonyms , we just analyse their right - hand sides .
irType walks through a type , looking for uses of a variable of interest and
propagating role information . Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal , the
irType function can assume that anything it sees is at representational . ( The
other possibilities are pruned when they 're encountered . )
The rest of the code is just plumbing .
How do we know that this algorithm is correct ? It should meet the following
specification :
Let Z be a role context -- a mapping from variables to roles . The following
rules define the property ( Z |- t : r ) , where t is a type and r is a role :
Z(a ) = r ' r ' < = r
------------------------- RCVar
Z |- a : r
----------
Z |- T : r -- T is a type constructor
Z |- t1 : r
Z |- t2 : N
-------------- RCApp
Z |- t1 t2 : r
forall i<=n . ( r_i is R or N ) implies Z : r_i
roles(T ) = r_1 .. r_n
---------------------------------------------------- RCDApp
Z t_n : R
Z , a : N |- t : r
---------------------- RCAll
Z |- forall a : k.t : r
We also have the following rules :
For all datacon_i in type T , where a_1 .. a_n are universally quantified
and .. b_m are existentially quantified , and the arguments are t_1 .. t_p ,
then if forall j<=p , a_1 : r_1 .. a_n : r_n , : N .. b_m : N |- t_j : R ,
then roles(T ) = r_1 .. r_n
roles(- > ) = R , R
roles(~ # ) = N , N
With -dcore - lint on , the output of this algorithm is checked in checkValidRoles ,
called from checkValidTycon .
Note [ Role - checking data constructor arguments ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT : : Eq b = > F a - > ( a->a ) - > T ( G a )
Then we want to check the roles at which ' a ' is used
in MkT 's type . We want to work on the user - written type ,
so we need to take into account
* the arguments : ( F a ) and ( a->a )
* the context : C a b
* the result type : ( G a ) -- this is in the eq_spec
Note [ Coercions in role inference ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is ( t | > co1 ) representationally equal to ( t | > co2 ) ? Of course they are ! Changing
the kind of a type is totally irrelevant to the representation of that type . So ,
we want to totally ignore coercions when doing role inference . This includes omitting
any type variables that appear in nominal positions but only within coercions .
************************************************************************
* *
Role inference
* *
************************************************************************
Note [Role inference]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters. Although these roles are stored in the tycons, we can perform this
algorithm on the built tycons, as long as we don't peek at an as-yet-unknown
roles field! Ah, the magic of laziness.
First, we choose appropriate initial roles. For families and classes, roles
(including initial roles) are N. For datatypes, we start with the role in the
role annotation (if any), or otherwise use Phantom. This is done in
initialRoleEnv1.
The function irGroup then propagates role information until it reaches a
fixpoint, preferring N over (R or P) and R over P. To aid in this, we have a
monad RoleM, which is a combination reader and state monad. In its state are
the current RoleEnv, which gets updated by role propagation, and an update
bit, which we use to know whether or not we've reached the fixpoint. The
environment of RoleM contains the tycon whose parameters we are inferring, and
a VarEnv from parameters to their positions, so we can update the RoleEnv.
Between tycons, this reader information is missing; it is added by
addRoleInferenceInfo.
There are two kinds of tycons to consider: algebraic ones (excluding classes)
and type synonyms. (Remember, families don't participate -- all their parameters
are N.) An algebraic tycon processes each of its datacons, in turn. Note that
a datacon's universally quantified parameters might be different from the parent
tycon's parameters, so we use the datacon's univ parameters in the mapping from
vars to positions. Note also that we don't want to infer roles for existentials
(they're all at N, too), so we put them in the set of local variables. As an
optimisation, we skip any tycons whose roles are already all Nominal, as there
nowhere else for them to go. For synonyms, we just analyse their right-hand sides.
irType walks through a type, looking for uses of a variable of interest and
propagating role information. Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal, the
irType function can assume that anything it sees is at representational. (The
other possibilities are pruned when they're encountered.)
The rest of the code is just plumbing.
How do we know that this algorithm is correct? It should meet the following
specification:
Let Z be a role context -- a mapping from variables to roles. The following
rules define the property (Z |- t : r), where t is a type and r is a role:
Z(a) = r' r' <= r
------------------------- RCVar
Z |- a : r
---------- RCConst
Z |- T : r -- T is a type constructor
Z |- t1 : r
Z |- t2 : N
-------------- RCApp
Z |- t1 t2 : r
forall i<=n. (r_i is R or N) implies Z |- t_i : r_i
roles(T) = r_1 .. r_n
---------------------------------------------------- RCDApp
Z |- T t_1 .. t_n : R
Z, a:N |- t : r
---------------------- RCAll
Z |- forall a:k.t : r
We also have the following rules:
For all datacon_i in type T, where a_1 .. a_n are universally quantified
and b_1 .. b_m are existentially quantified, and the arguments are t_1 .. t_p,
then if forall j<=p, a_1 : r_1 .. a_n : r_n, b_1 : N .. b_m : N |- t_j : R,
then roles(T) = r_1 .. r_n
roles(->) = R, R
roles(~#) = N, N
With -dcore-lint on, the output of this algorithm is checked in checkValidRoles,
called from checkValidTycon.
Note [Role-checking data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT :: Eq b => F a -> (a->a) -> T (G a)
Then we want to check the roles at which 'a' is used
in MkT's type. We want to work on the user-written type,
so we need to take into account
* the arguments: (F a) and (a->a)
* the context: C a b
* the result type: (G a) -- this is in the eq_spec
Note [Coercions in role inference]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is (t |> co1) representationally equal to (t |> co2)? Of course they are! Changing
the kind of a type is totally irrelevant to the representation of that type. So,
we want to totally ignore coercions when doing role inference. This includes omitting
any type variables that appear in nominal positions but only within coercions.
-}
type RolesInfo = Name -> [Role]
type RoleEnv = NameEnv [Role] -- from tycon names to roles
-- This, and any of the functions it calls, must *not* look at the roles
-- field of a tycon we are inferring roles about!
-- See Note [Role inference]
inferRoles :: HscSource -> RoleAnnotEnv -> [TyCon] -> Name -> [Role]
inferRoles hsc_src annots tycons
= let role_env = initialRoleEnv hsc_src annots tycons
role_env' = irGroup role_env tycons in
\name -> case lookupNameEnv role_env' name of
Just roles -> roles
Nothing -> pprPanic "inferRoles" (ppr name)
initialRoleEnv :: HscSource -> RoleAnnotEnv -> [TyCon] -> RoleEnv
initialRoleEnv hsc_src annots = extendNameEnvList emptyNameEnv .
map (initialRoleEnv1 hsc_src annots)
initialRoleEnv1 :: HscSource -> RoleAnnotEnv -> TyCon -> (Name, [Role])
initialRoleEnv1 hsc_src annots_env tc
| isFamilyTyCon tc = (name, map (const Nominal) bndrs)
| isAlgTyCon tc = (name, default_roles)
| isTypeSynonymTyCon tc = (name, default_roles)
| otherwise = pprPanic "initialRoleEnv1" (ppr tc)
where name = tyConName tc
bndrs = tyConBinders tc
argflags = map tyConBinderForAllTyFlag bndrs
num_exps = count isVisibleForAllTyFlag argflags
if the number of annotations in the role annotation
-- is wrong, just ignore it. We check this in the validity check.
role_annots
= case lookupRoleAnnot annots_env name of
Just (L _ (RoleAnnotDecl _ _ annots))
| annots `lengthIs` num_exps -> map unLoc annots
_ -> replicate num_exps Nothing
default_roles = build_default_roles argflags role_annots
build_default_roles (argf : argfs) (m_annot : ras)
| isVisibleForAllTyFlag argf
= (m_annot `orElse` default_role) : build_default_roles argfs ras
build_default_roles (_argf : argfs) ras
= Nominal : build_default_roles argfs ras
build_default_roles [] [] = []
build_default_roles _ _ = pprPanic "initialRoleEnv1 (2)"
(vcat [ppr tc, ppr role_annots])
default_role
| isClassTyCon tc = Nominal
Note [ Default roles for abstract TyCons in hs - boot / hsig ]
| HsBootFile <- hsc_src
, isAbstractTyCon tc = Representational
| HsigFile <- hsc_src
, isAbstractTyCon tc = Nominal
| otherwise = Phantom
Note [ Default roles for abstract TyCons in hs - boot / hsig ]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should the default role for an abstract be ?
--
-- Originally, we inferred phantom role for abstract TyCons
-- in hs-boot files, because the type variables were never used.
--
This was silly , because the role of the abstract TyCon
-- was required to match the implementation, and the roles of
data types are almost never phantom . Thus , in ticket # 9204 ,
-- the default was changed so be representational (the most common case). If
-- the implementing data type was actually nominal, you'd get an easy
-- to understand error, and add the role annotation yourself.
--
-- Then Backpack was added, and with it we added role *subtyping*
the matching judgment : if an abstract has a nominal
-- parameter, it's OK to implement it with a representational
-- parameter. But now, the representational default is not a good
-- one, because you should *only* request representational if
-- you're planning to do coercions. To be maximally flexible
-- with what data types you will accept, you want the default
-- for hsig files is nominal. We don't allow role subtyping
-- with hs-boot files (it's good practice to give an exactly
-- accurate role here, because any types that use the abstract
-- type will propagate the role information.)
irGroup :: RoleEnv -> [TyCon] -> RoleEnv
irGroup env tcs
= let (env', update) = runRoleM env $ mapM_ irTyCon tcs in
if update
then irGroup env' tcs
else env'
irTyCon :: TyCon -> RoleM ()
irTyCon tc
| isAlgTyCon tc
= do { old_roles <- lookupRoles tc
; unless (all (== Nominal) old_roles) $ -- also catches data families,
-- which don't want or need role inference
irTcTyVars tc $
See # 8958
; whenIsJust (tyConClass_maybe tc) irClass
; mapM_ irDataCon (visibleDataCons $ algTyConRhs tc) }}
| Just ty <- synTyConRhs_maybe tc
= irTcTyVars tc $
irType emptyVarSet ty
| otherwise
= return ()
-- any type variable used in an associated type must be Nominal
irClass :: Class -> RoleM ()
irClass cls
= mapM_ ir_at (classATs cls)
where
cls_tvs = classTyVars cls
cls_tv_set = mkVarSet cls_tvs
ir_at at_tc
= mapM_ (updateRole Nominal) nvars
where nvars = filter (`elemVarSet` cls_tv_set) $ tyConTyVars at_tc
-- See Note [Role inference]
irDataCon :: DataCon -> RoleM ()
irDataCon datacon
= setRoleInferenceVars univ_tvs $
irExTyVars ex_tvs $ \ ex_var_set ->
do mapM_ (irType ex_var_set) (eqSpecPreds eq_spec ++ theta ++ map scaledThing arg_tys)
Field multiplicities are nominal ( # 18799 )
-- See Note [Role-checking data constructor arguments]
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty)
= dataConFullSig datacon
irType :: VarSet -> Type -> RoleM ()
irType = go
where
# 14101
= go lcls ty'
go lcls (TyVarTy tv) = unless (tv `elemVarSet` lcls) $
updateRole Representational tv
go lcls (AppTy t1 t2) = go lcls t1 >> markNominal lcls t2
go lcls (TyConApp tc tys) = do { roles <- lookupRolesX tc
; zipWithM_ (go_app lcls) roles tys }
go lcls (ForAllTy tvb ty) = do { let tv = binderVar tvb
lcls' = extendVarSet lcls tv
; markNominal lcls (tyVarKind tv)
; go lcls' ty }
go lcls (FunTy _ w arg res) = markNominal lcls w >> go lcls arg >> go lcls res
go _ (LitTy {}) = return ()
-- See Note [Coercions in role inference]
go lcls (CastTy ty _) = go lcls ty
go _ (CoercionTy _) = return ()
go_app _ Phantom _ = return () -- nothing to do here
go_app lcls Nominal ty = markNominal lcls ty -- all vars below here are N
go_app lcls Representational ty = go lcls ty
irTcTyVars :: TyCon -> RoleM a -> RoleM a
irTcTyVars tc thing
= setRoleInferenceTc (tyConName tc) $ go (tyConTyVars tc)
where
go [] = thing
go (tv:tvs) = do { markNominal emptyVarSet (tyVarKind tv)
; addRoleInferenceVar tv $ go tvs }
irExTyVars :: [TyVar] -> (TyVarSet -> RoleM a) -> RoleM a
irExTyVars orig_tvs thing = go emptyVarSet orig_tvs
where
go lcls [] = thing lcls
go lcls (tv:tvs) = do { markNominal lcls (tyVarKind tv)
; go (extendVarSet lcls tv) tvs }
markNominal :: TyVarSet -- local variables
-> Type -> RoleM ()
markNominal lcls ty = let nvars = fvVarList (FV.delFVs lcls $ get_ty_vars ty) in
mapM_ (updateRole Nominal) nvars
where
-- get_ty_vars gets all the tyvars (no covars!) from a type *without*
-- recurring into coercions. Recall: coercions are totally ignored during
-- role inference. See [Coercions in role inference]
get_ty_vars :: Type -> FV
# 20999
= get_ty_vars t'
get_ty_vars (TyVarTy tv) = unitFV tv
get_ty_vars (AppTy t1 t2) = get_ty_vars t1 `unionFV` get_ty_vars t2
get_ty_vars (FunTy _ w t1 t2) = get_ty_vars w `unionFV` get_ty_vars t1 `unionFV` get_ty_vars t2
get_ty_vars (TyConApp _ tys) = mapUnionFV get_ty_vars tys
get_ty_vars (ForAllTy tvb ty) = tyCoFVsBndr tvb (get_ty_vars ty)
get_ty_vars (LitTy {}) = emptyFV
get_ty_vars (CastTy ty _) = get_ty_vars ty
get_ty_vars (CoercionTy _) = emptyFV
like lookupRoles , but with Nominal tags at the end for oversaturated TyConApps
lookupRolesX :: TyCon -> RoleM [Role]
lookupRolesX tc
= do { roles <- lookupRoles tc
; return $ roles ++ repeat Nominal }
-- gets the roles either from the environment or the tycon
lookupRoles :: TyCon -> RoleM [Role]
lookupRoles tc
= do { env <- getRoleEnv
; case lookupNameEnv env (tyConName tc) of
Just roles -> return roles
Nothing -> return $ tyConRoles tc }
-- tries to update a role; won't ever update a role "downwards"
updateRole :: Role -> TyVar -> RoleM ()
updateRole role tv
= do { var_ns <- getVarNs
; name <- getTyConName
; case lookupVarEnv var_ns tv of
Nothing -> pprPanic "updateRole" (ppr name $$ ppr tv $$ ppr var_ns)
Just n -> updateRoleEnv name n role }
the state in the RoleM monad
data RoleInferenceState = RIS { role_env :: RoleEnv
, update :: Bool }
the environment in the RoleM monad
type VarPositions = VarEnv Int
-- See [Role inference]
newtype RoleM a = RM { unRM :: Maybe Name -- of the tycon
-> VarPositions
size of VarPositions
-> RoleInferenceState
-> (a, RoleInferenceState) }
deriving (Functor)
instance Applicative RoleM where
pure x = RM $ \_ _ _ state -> (x, state)
(<*>) = ap
instance Monad RoleM where
a >>= f = RM $ \m_info vps nvps state ->
let (a', state') = unRM a m_info vps nvps state in
unRM (f a') m_info vps nvps state'
runRoleM :: RoleEnv -> RoleM () -> (RoleEnv, Bool)
runRoleM env thing = (env', update)
where RIS { role_env = env', update = update }
= snd $ unRM thing Nothing emptyVarEnv 0 state
state = RIS { role_env = env
, update = False }
setRoleInferenceTc :: Name -> RoleM a -> RoleM a
setRoleInferenceTc name thing = RM $ \m_name vps nvps state ->
assert (isNothing m_name) $
assert (isEmptyVarEnv vps) $
assert (nvps == 0) $
unRM thing (Just name) vps nvps state
addRoleInferenceVar :: TyVar -> RoleM a -> RoleM a
addRoleInferenceVar tv thing
= RM $ \m_name vps nvps state ->
assert (isJust m_name) $
unRM thing m_name (extendVarEnv vps tv nvps) (nvps+1) state
setRoleInferenceVars :: [TyVar] -> RoleM a -> RoleM a
setRoleInferenceVars tvs thing
= RM $ \m_name _vps _nvps state ->
assert (isJust m_name) $
unRM thing m_name (mkVarEnv (zip tvs [0..])) (panic "setRoleInferenceVars")
state
getRoleEnv :: RoleM RoleEnv
getRoleEnv = RM $ \_ _ _ state@(RIS { role_env = env }) -> (env, state)
getVarNs :: RoleM VarPositions
getVarNs = RM $ \_ vps _ state -> (vps, state)
getTyConName :: RoleM Name
getTyConName = RM $ \m_name _ _ state ->
case m_name of
Nothing -> panic "getTyConName"
Just name -> (name, state)
updateRoleEnv :: Name -> Int -> Role -> RoleM ()
updateRoleEnv name n role
= RM $ \_ _ _ state@(RIS { role_env = role_env }) -> ((),
case lookupNameEnv role_env name of
Nothing -> pprPanic "updateRoleEnv" (ppr name)
Just roles -> let (before, old_role : after) = splitAt n roles in
if role `ltRole` old_role
then let roles' = before ++ role : after
role_env' = extendNameEnv role_env name roles' in
RIS { role_env = role_env', update = True }
else state )
{- *********************************************************************
* *
Building implicits
* *
********************************************************************* -}
addTyConsToGblEnv :: [TyCon] -> TcM (TcGblEnv, ThBindEnv)
Given a [ TyCon ] , add to the TcGblEnv
-- * extend the TypeEnv with the tycons
-- * extend the TypeEnv with their implicitTyThings
-- * extend the TypeEnv with any default method Ids
-- * add bindings for record selectors
Return separately the TH levels of these bindings ,
to be added to a LclEnv later .
addTyConsToGblEnv tyclss
= tcExtendTyConEnv tyclss $
tcExtendGlobalEnvImplicit implicit_things $
tcExtendGlobalValEnv def_meth_ids $
do { traceTc "tcAddTyCons" $ vcat
[ text "tycons" <+> ppr tyclss
, text "implicits" <+> ppr implicit_things ]
; gbl_env <- tcRecSelBinds (mkRecSelBinds tyclss)
; th_bndrs <- tcTyThBinders implicit_things
; return (gbl_env, th_bndrs)
}
where
implicit_things = concatMap implicitTyConThings tyclss
def_meth_ids = mkDefaultMethodIds tyclss
mkDefaultMethodIds :: [TyCon] -> [Id]
We want to put the default - method Ids ( both vanilla and generic )
into the type environment so that they are found when we
-- the filled-in default methods of each instance declaration
-- See Note [Default method Ids and Template Haskell]
mkDefaultMethodIds tycons
= [ mkExportedVanillaId dm_name (mkDefaultMethodType cls sel_id dm_spec)
| tc <- tycons
, Just cls <- [tyConClass_maybe tc]
, (sel_id, Just (dm_name, dm_spec)) <- classOpItems cls ]
mkDefaultMethodType :: Class -> Id -> DefMethSpec Type -> Type
-- Returns the top-level type of the default method
mkDefaultMethodType _ sel_id VanillaDM = idType sel_id
mkDefaultMethodType cls _ (GenericDM dm_ty) = mkSigmaTy tv_bndrs [pred] dm_ty
where
pred = mkClassPred cls (mkTyVarTys (binderVars cls_bndrs))
cls_bndrs = tyConBinders (classTyCon cls)
tv_bndrs = tyVarSpecToBinders $ tyConInvisTVBinders cls_bndrs
-- NB: the Class doesn't have TyConBinders; we reach into its
-- TyCon to get those. We /do/ need the TyConBinders because
-- we need the correct visibility: these default methods are
-- used in code generated by the fill-in for missing
methods in instances ( . TyCl . Instance.mkDefMethBind ) , and
-- then typechecked. So we need the right visibility info
( # 13998 )
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
Note [ Default method Ids and Template Haskell ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this ( # 4169 ):
class Numeric a where
fromIntegerNum : : a
fromIntegerNum = ...
ast : : Q [ Dec ]
ast = [ d| instance Numeric Int | ]
When we typecheck ' ast ' we have done the first pass over the class decl
( in tcTyClDecls ) , but we have not yet typechecked the default - method
declarations ( because they can mention value declarations ) . So we
must bring the default method Ids into scope first ( so they can be seen
when typechecking the [ d| .. | ] quote , and them later .
Note [Default method Ids and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (#4169):
class Numeric a where
fromIntegerNum :: a
fromIntegerNum = ...
ast :: Q [Dec]
ast = [d| instance Numeric Int |]
When we typecheck 'ast' we have done the first pass over the class decl
(in tcTyClDecls), but we have not yet typechecked the default-method
declarations (because they can mention value declarations). So we
must bring the default method Ids into scope first (so they can be seen
when typechecking the [d| .. |] quote, and typecheck them later.
-}
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
tcRecSelBinds :: [(Id, LHsBind GhcRn)] -> TcM TcGblEnv
tcRecSelBinds sel_bind_prs
= tcExtendGlobalValEnv [sel_id | (L _ (XSig (IdSig sel_id))) <- sigs] $
do { (rec_sel_binds, tcg_env) <- discardWarnings $
-- See Note [Impredicative record selectors]
setXOptM LangExt.ImpredicativeTypes $
tcValBinds TopLevel binds sigs getGblEnv
; return (tcg_env `addTypecheckedBinds` map snd rec_sel_binds) }
where
sigs = [ L (noAnnSrcSpan loc) (XSig $ IdSig sel_id)
| (sel_id, _) <- sel_bind_prs
, let loc = getSrcSpan sel_id ]
binds = [(NonRecursive, unitBag bind) | (_, bind) <- sel_bind_prs]
mkRecSelBinds :: [TyCon] -> [(Id, LHsBind GhcRn)]
NB We produce * un - typechecked * bindings , rather like ' deriving '
-- This makes life easier, because the later type checking will add
-- all necessary type abstractions and applications
mkRecSelBinds tycons
= map mkRecSelBind [ (tc,fld) | tc <- tycons
, fld <- tyConFieldLabels tc ]
mkRecSelBind :: (TyCon, FieldLabel) -> (Id, LHsBind GhcRn)
mkRecSelBind (tycon, fl)
= mkOneRecordSelector all_cons (RecSelData tycon) fl
FieldSelectors -- See Note [NoFieldSelectors and naughty record selectors]
where
all_cons = map RealDataCon (tyConDataCons tycon)
mkOneRecordSelector :: [ConLike] -> RecSelParent -> FieldLabel -> FieldSelectors
-> (Id, LHsBind GhcRn)
mkOneRecordSelector all_cons idDetails fl has_sel
= (sel_id, L (noAnnSrcSpan loc) sel_bind)
where
loc = getSrcSpan sel_name
loc' = noAnnSrcSpan loc
locn = noAnnSrcSpan loc
locc = noAnnSrcSpan loc
lbl = flLabel fl
sel_name = flSelector fl
sel_id = mkExportedLocalId rec_details sel_name sel_ty
rec_details = RecSelId { sel_tycon = idDetails, sel_naughty = is_naughty }
-- Find a representative constructor, con1
cons_w_field = conLikesWithFields all_cons [lbl]
con1 = assert (not (null cons_w_field)) $ head cons_w_field
-- Selector type; Note [Polymorphic selectors]
(univ_tvs, _, _, _, req_theta, _, data_ty) = conLikeFullSig con1
field_ty = conLikeFieldType con1 lbl
field_ty_tvs = tyCoVarsOfType field_ty
data_ty_tvs = tyCoVarsOfType data_ty
sel_tvs = field_ty_tvs `unionVarSet` data_ty_tvs
sel_tvbs = filter (\tvb -> binderVar tvb `elemVarSet` sel_tvs) $
conLikeUserTyVarBinders con1
-- is_naughty: see Note [Naughty record selectors]
is_naughty = not ok_scoping || no_selectors
ok_scoping = case con1 of
RealDataCon {} -> field_ty_tvs `subVarSet` data_ty_tvs
PatSynCon {} -> field_ty_tvs `subVarSet` mkVarSet univ_tvs
In the PatSynCon case , the selector type is ( data_ty - > field_ty ) , but
-- fvs(data_ty) are all universals (see Note [Pattern synonym result type] in
-- GHC.Core.PatSyn, so no need to check them.
no_selectors = has_sel == NoFieldSelectors -- No field selectors => all are naughty
-- thus suppressing making a binding
-- A slight hack!
sel_ty | is_naughty = unitTy -- See Note [Naughty record selectors]
| otherwise = mkForAllTys (tyVarSpecToBinders sel_tvbs) $
Urgh ! See Note [ The stupid context ] in GHC.Core . DataCon
mkPhiTy (conLikeStupidTheta con1) $
req_theta is empty for normal DataCon
mkPhiTy req_theta $
mkVisFunTyMany data_ty $
-- Record selectors are always typed with Many. We
-- could improve on it in the case where all the
-- fields in all the constructor have multiplicity Many.
field_ty
-- make the binding: sel (C2 { fld = x }) = x
sel ( C7 { fld = x } ) = x
-- where cons_w_field = [C2,C7]
sel_bind = mkTopFunBind Generated sel_lname alts
where
alts | is_naughty = [mkSimpleMatch (mkPrefixFunRhs sel_lname)
[] unit_rhs]
| otherwise = map mk_match cons_w_field ++ deflt
mk_match con = mkSimpleMatch (mkPrefixFunRhs sel_lname)
[L loc' (mk_sel_pat con)]
(L loc' (HsVar noExtField (L locn field_var)))
mk_sel_pat con = ConPat NoExtField (L locn (getName con)) (RecCon rec_fields)
rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing }
rec_field = noLocA (HsFieldBind
{ hfbAnn = noAnn
, hfbLHS
= L locc (FieldOcc sel_name
(L locn $ mkVarUnqual (field_label lbl)))
, hfbRHS
= L loc' (VarPat noExtField (L locn field_var))
, hfbPun = False })
sel_lname = L locn sel_name
field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc
-- Add catch-all default case unless the case is exhaustive
-- We do this explicitly so that we get a nice error message that
-- mentions this particular record selector
deflt | all dealt_with all_cons = []
| otherwise = [mkSimpleMatch CaseAlt
[L loc' (WildPat noExtField)]
(mkHsApp (L loc' (HsVar noExtField
(L locn (getName rEC_SEL_ERROR_ID))))
(L loc' (HsLit noComments msg_lit)))]
-- Do not add a default case unless there are unmatched
constructors . We must take account of GADTs , else we
-- get overlap warning messages from the pattern-match checker
NB : we need to pass type args for the * representation * TyCon
-- to dataConCannotMatch, hence the calculation of inst_tys
-- This matters in data families
-- data instance T Int a where
-- A :: { fld :: Int } -> T Int Bool
-- B :: { fld :: Int } -> T Int Char
dealt_with :: ConLike -> Bool
dealt_with (PatSynCon _) = False -- We can't predict overlap
dealt_with con@(RealDataCon dc)
= con `elem` cons_w_field || dataConCannotMatch inst_tys dc
where
inst_tys = dataConResRepTyArgs dc
unit_rhs = mkLHsTupleExpr [] noExtField
msg_lit = HsStringPrim NoSourceText (bytesFS (field_label lbl))
Note [ Polymorphic selectors ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order , so that visible type application works according to the specification in
the GHC User 's Guide ( see the " Field selectors and TypeApplications " section ) .
We wo n't bother rehashing the entire specification in this Note , but the tricky
part is dealing with GADT constructor fields . Here is an appropriately tricky
example to illustrate the challenges :
{ - # LANGUAGE PolyKinds #
Note [Polymorphic selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order, so that visible type application works according to the specification in
the GHC User's Guide (see the "Field selectors and TypeApplications" section).
We won't bother rehashing the entire specification in this Note, but the tricky
part is dealing with GADT constructor fields. Here is an appropriately tricky
example to illustrate the challenges:
{-# LANGUAGE PolyKinds #-}
data T a b where
MkT :: forall b a x.
{ field1 :: forall c. (Num a, Show c) => (Either a c, Proxy b)
, field2 :: x
}
-> T a b
Our goal is to obtain the following type for `field1`:
field1 :: forall {k} (b :: k) a.
T a b -> forall c. (Num a, Show c) => (Either a c, Proxy b)
(`field2` is naughty, per Note [Naughty record selectors], so we cannot turn
it into a top-level field selector.)
Some potential gotchas, inspired by #18023:
1. Since the user wrote `forall b a x.` in the type of `MkT`, we want the `b`
to appear before the `a` when quantified in the type of `field1`.
2. On the other hand, we *don't* want to quantify `x` in the type of `field1`.
This is because `x` does not appear in the GADT return type, so it is not
needed in the selector type.
3. Because of PolyKinds, the kind of `b` is generalized to `k`. Moreover, since
this `k` is not written in the source code, it is inferred (i.e., not
available for explicit type applications) and thus written as {k} in the type
of `field1`.
In order to address these gotchas, we start by looking at the
conLikeUserTyVarBinders, which gives the order and specificity of each binder.
This effectively solves (1) and (3). To solve (2), we filter the binders to
leave only those that are needed for the selector type.
Note [Naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "naughty" field is one for which we can't define a record
selector, because an existential type variable would escape. For example:
data T = forall a. MkT { x,y::a }
We obviously can't define
x (MkT v _) = v
Nevertheless we *do* put a RecSelId into the type environment
so that if the user tries to use 'x' as a selector we can bleat
helpfully, rather than saying unhelpfully that 'x' is not in scope.
Hence the sel_naughty flag, to identify record selectors that don't really exist.
For naughty selectors we make a dummy binding
sel = ()
so that the later type-check will add them to the environment, and they'll be
exported. The function is never called, because the typechecker spots the
sel_naughty field.
To determine naughtiness we distingish two cases:
* For RealDataCons, a field is "naughty" if its type mentions a
type variable that isn't in the (original, user-written) result type
of the constructor. Note that this *allows* GADT record selectors
(Note [GADT record selectors]) whose types may look like sel :: T [a] -> a
* For a PatSynCon, a field is "naughty" if its type mentions a type variable
that isn't in the universal type variables.
This is a bit subtle. Consider test patsyn/should_run/records_run:
pattern ReadP :: forall a. ReadP a => a -> String
pattern ReadP {fld} <- (read -> readp)
The selector is defined like this:
$selReadPfld :: forall a. ReadP a => String -> a
$selReadPfld @a (d::ReadP a) s = readp @a d s
Perfectly fine! The (ReadP a) constraint lets us contruct a value of type
'a' from a bare String.
Another curious case (#23038):
pattern N :: forall a. () => forall. () => a -> Any
pattern N { fld } <- ( unsafeCoerce -> fld1 ) where N = unsafeCoerce
The selector looks like this
$selNfld :: forall a. Any -> a
$selNfld @a x = unsafeCoerce @Any @a x
Pretty strange (but used in the `cleff` package).
TL;DR for pattern synonyms, the selector is OK if the field type mentions only
the universal type variables of the pattern synonym.
Note [NoFieldSelectors and naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under NoFieldSelectors (see Note [NoFieldSelectors] in GHC.Rename.Env), record
selectors will not be in scope in the renamer. However, for normal datatype
declarations we still generate the underlying selector functions, so they can be
used for constructing the dictionaries for HasField constraints (as described by
Note [HasField instances] in GHC.Tc.Instance.Class). Hence the call to
mkOneRecordSelector in mkRecSelBind always uses FieldSelectors.
However, record pattern synonyms are not used with HasField, so when
NoFieldSelectors is used we do not need to generate selector functions. Thus
mkPatSynRecSelBinds passes the current state of the FieldSelectors extension to
mkOneRecordSelector, and in the NoFieldSelectors case it will treat them as
"naughty" fields (see Note [Naughty record selectors]).
Why generate a naughty binding, rather than no binding at all? Because when
type-checking a record update, we need to look up Ids for the fields. In
particular, disambiguateRecordBinds calls lookupParents which needs to look up
the RecSelIds to determine the sel_tycon.
Note [GADT record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For GADTs, we require that all constructors with a common field 'f' have the same
result type (modulo alpha conversion). [Checked in GHC.Tc.TyCl.checkValidTyCon]
E.g.
data T where
T1 { f :: Maybe a } :: T [a]
T2 { f :: Maybe a, y :: b } :: T [a]
T3 :: T Int
and now the selector takes that result type as its argument:
f :: forall a. T [a] -> Maybe a
Details: the "real" types of T1,T2 are:
T1 :: forall r a. (r~[a]) => a -> T r
T2 :: forall r a b. (r~[a]) => a -> b -> T r
So the selector loooks like this:
f :: forall a. T [a] -> Maybe a
f (a:*) (t:T [a])
= case t of
T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g))
T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g))
T3 -> error "T3 does not have field f"
Note the forall'd tyvars of the selector are just the free tyvars
of the result type; there may be other tyvars in the constructor's
type (e.g. 'b' in T2).
Note the need for casts in the result!
Note [Selector running example]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's OK to combine GADTs and type families. Here's a running example:
data instance T [a] where
T1 { fld :: b } :: T [Maybe b]
The representation type looks like this
data :R7T a where
T1 { fld :: b } :: :R7T (Maybe b)
and there's coercion from the family type to the representation type
:CoR7T a :: T [a] ~ :R7T a
The selector we want for fld looks like this:
fld :: forall b. T [Maybe b] -> b
fld = /\b. \(d::T [Maybe b]).
case d `cast` :CoR7T (Maybe b) of
T1 (x::b) -> x
The scrutinee of the case has type :R7T (Maybe b), which can be
gotten by applying the eq_spec to the univ_tvs of the data con.
Note [Impredicative record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are situations where generating code for record selectors requires the
use of ImpredicativeTypes. Here is one example (adapted from #18005):
type S = (forall b. b -> b) -> Int
data T = MkT {unT :: S}
| Dummy
We want to generate HsBinds for unT that look something like this:
unT :: S
unT (MkT x) = x
unT _ = recSelError "unT"#
Note that the type of recSelError is `forall r (a :: TYPE r). Addr# -> a`.
Therefore, when used in the right-hand side of `unT`, GHC attempts to
instantiate `a` with `(forall b. b -> b) -> Int`, which is impredicative.
To make sure that GHC is OK with this, we enable ImpredicativeTypes internally
when typechecking these HsBinds so that the user does not have to.
-}
| null | https://raw.githubusercontent.com/ghc/ghc/86f240ca956f633c20a61872ec44de9e21266624/compiler/GHC/Tc/TyCl/Utils.hs | haskell | | Analysis functions over data types. Specifically, detecting recursive types.
This stuff is only used for source-code decls; it's recorded in interface
files for imported data types.
* Implicits
* Record selectors
************************************************************************
* *
Cycles in type synonym declarations
* *
************************************************************************
Does not look through type synonyms at all.
Returns a list of synonym tycons in nondeterministic order.
Keep this synchronized with 'expandTypeSynonyms'
Note [TyCon cycles through coercions?!]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Although, in principle, it's possible for a type synonym loop
could go through a coercion (since a coercion can refer to
a TyCon or Type), it doesn't seem possible to actually construct
program which causes a coercion:
type family Star where
Star = Type
data T :: Star -> Type
data S :: forall (a :: Type). T a -> Type
witnessed by the type family. But if we now try to make Type refer
trouble: we're trying to define and use the type constructor
in the same recursive group. Possibly this restriction will be
lifted in the future but for now, this code is "just for completeness
sake".
| A monad for type synonym cycle checking, which keeps
a failure message reporting that a cycle was found.
since we only check membership, but never extract the
elements.
| Test if a 'Name' is acyclic, short-circuiting if we've
seen it already.
short circuit
Takes the 'Unit' of the home package (as we can avoid
can give better error messages.
Try our best to print the LTyClDecl for locally defined things
Short circuit if we've already seen this Name and concluded
it was acyclic.
Expand type synonyms, complaining if you find the same
Optimization: we don't allow cycles through external packages,
so once we find a non-local name we are guaranteed to not
have a cycle.
but for now it's fine.
> I d ( C I d )
> C I d
> ....
> Id (C Id)
> C Id
> ....
Nothing <=> ok
Just err <=> possible cycle error
Expand superclasses starting with (C a b), complaining
or predicate headed by a type variable
with more error message generation clobber
Nothing <=> ok
Just (True, err) <=> definite cycle
Just (False, err) <=> possible cycle
Equality predicate, for example
all their parameters
a mapping from variables to roles . The following
----------------------- RCVar
--------
T is a type constructor
------------ RCApp
-------------------------------------------------- RCDApp
-------------------- RCAll
this is in the eq_spec
all their parameters
a mapping from variables to roles. The following
----------------------- RCVar
-------- RCConst
T is a type constructor
------------ RCApp
-------------------------------------------------- RCDApp
-------------------- RCAll
this is in the eq_spec
from tycon names to roles
This, and any of the functions it calls, must *not* look at the roles
field of a tycon we are inferring roles about!
See Note [Role inference]
is wrong, just ignore it. We check this in the validity check.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Originally, we inferred phantom role for abstract TyCons
in hs-boot files, because the type variables were never used.
was required to match the implementation, and the roles of
the default was changed so be representational (the most common case). If
the implementing data type was actually nominal, you'd get an easy
to understand error, and add the role annotation yourself.
Then Backpack was added, and with it we added role *subtyping*
parameter, it's OK to implement it with a representational
parameter. But now, the representational default is not a good
one, because you should *only* request representational if
you're planning to do coercions. To be maximally flexible
with what data types you will accept, you want the default
for hsig files is nominal. We don't allow role subtyping
with hs-boot files (it's good practice to give an exactly
accurate role here, because any types that use the abstract
type will propagate the role information.)
also catches data families,
which don't want or need role inference
any type variable used in an associated type must be Nominal
See Note [Role inference]
See Note [Role-checking data constructor arguments]
See Note [Coercions in role inference]
nothing to do here
all vars below here are N
local variables
get_ty_vars gets all the tyvars (no covars!) from a type *without*
recurring into coercions. Recall: coercions are totally ignored during
role inference. See [Coercions in role inference]
gets the roles either from the environment or the tycon
tries to update a role; won't ever update a role "downwards"
See [Role inference]
of the tycon
*********************************************************************
* *
Building implicits
* *
*********************************************************************
* extend the TypeEnv with the tycons
* extend the TypeEnv with their implicitTyThings
* extend the TypeEnv with any default method Ids
* add bindings for record selectors
the filled-in default methods of each instance declaration
See Note [Default method Ids and Template Haskell]
Returns the top-level type of the default method
NB: the Class doesn't have TyConBinders; we reach into its
TyCon to get those. We /do/ need the TyConBinders because
we need the correct visibility: these default methods are
used in code generated by the fill-in for missing
then typechecked. So we need the right visibility info
************************************************************************
* *
Building record selectors
* *
************************************************************************
************************************************************************
* *
Building record selectors
* *
************************************************************************
See Note [Impredicative record selectors]
This makes life easier, because the later type checking will add
all necessary type abstractions and applications
See Note [NoFieldSelectors and naughty record selectors]
Find a representative constructor, con1
Selector type; Note [Polymorphic selectors]
is_naughty: see Note [Naughty record selectors]
fvs(data_ty) are all universals (see Note [Pattern synonym result type] in
GHC.Core.PatSyn, so no need to check them.
No field selectors => all are naughty
thus suppressing making a binding
A slight hack!
See Note [Naughty record selectors]
Record selectors are always typed with Many. We
could improve on it in the case where all the
fields in all the constructor have multiplicity Many.
make the binding: sel (C2 { fld = x }) = x
where cons_w_field = [C2,C7]
Add catch-all default case unless the case is exhaustive
We do this explicitly so that we get a nice error message that
mentions this particular record selector
Do not add a default case unless there are unmatched
get overlap warning messages from the pattern-match checker
to dataConCannotMatch, hence the calculation of inst_tys
This matters in data families
data instance T Int a where
A :: { fld :: Int } -> T Int Bool
B :: { fld :: Int } -> T Int Char
We can't predict overlap
# LANGUAGE PolyKinds # |
# LANGUAGE DeriveFunctor #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
( c ) The University of Glasgow 2006
( c ) The GRASP / AQUA Project , Glasgow University , 1992 - 1999
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1999
-}
module GHC.Tc.TyCl.Utils(
RolesInfo,
inferRoles,
checkSynCycles,
checkClassCycles,
addTyConsToGblEnv, mkDefaultMethodType,
tcRecSelBinds, mkRecSelBinds, mkOneRecordSelector
) where
import GHC.Prelude
import GHC.Tc.Errors.Types
import GHC.Tc.Utils.Monad
import GHC.Tc.Utils.Env
import GHC.Tc.Gen.Bind( tcValBinds )
import GHC.Tc.Utils.TcType
import GHC.Builtin.Types( unitTy )
import GHC.Builtin.Uniques ( mkBuiltinUnique )
import GHC.Hs
import GHC.Core.TyCo.Rep( Type(..), Coercion(..), MCoercion(..), UnivCoProvenance(..) )
import GHC.Core.Multiplicity
import GHC.Core.Predicate
import GHC.Core.Make( rEC_SEL_ERROR_ID )
import GHC.Core.Class
import GHC.Core.Type
import GHC.Core.TyCon
import GHC.Core.ConLike
import GHC.Core.DataCon
import GHC.Core.TyCon.Set
import GHC.Core.Coercion ( ltRole )
import GHC.Utils.Outputable
import GHC.Utils.Panic
import GHC.Utils.Panic.Plain
import GHC.Utils.Misc
import GHC.Utils.FV as FV
import GHC.Data.Maybe
import GHC.Data.Bag
import GHC.Data.FastString
import GHC.Unit.Module
import GHC.Types.Basic
import GHC.Types.Error
import GHC.Types.FieldLabel
import GHC.Types.SrcLoc
import GHC.Types.SourceFile
import GHC.Types.SourceText
import GHC.Types.Name
import GHC.Types.Name.Env
import GHC.Types.Name.Reader ( mkVarUnqual )
import GHC.Types.Id
import GHC.Types.Id.Info
import GHC.Types.Var.Env
import GHC.Types.Var.Set
import GHC.Types.Unique.Set
import GHC.Types.TyThing
import qualified GHC.LanguageExtensions as LangExt
import Language.Haskell.Syntax.Basic (FieldLabelString(..))
import Control.Monad
synonymTyConsOfType :: Type -> [TyCon]
synonymTyConsOfType ty
= nonDetNameEnvElts (go ty)
where
The NameEnv does duplicate elim
go (TyConApp tc tys) = go_tc tc `plusNameEnv` go_s tys
go (LitTy _) = emptyNameEnv
go (TyVarTy _) = emptyNameEnv
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy _ w a b) = go w `plusNameEnv` go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go (CastTy ty co) = go ty `plusNameEnv` go_co co
go (CoercionTy co) = go_co co
a Haskell program which tickles this case . Here is an example
Here , the application ' T a ' must first coerce a : : Type to a : : Star ,
to a type synonym which in turn refers to Star , we 'll run into
go_mco MRefl = emptyNameEnv
go_mco (MCo co) = go_co co
go_co (Refl ty) = go ty
go_co (GRefl _ ty mco) = go ty `plusNameEnv` go_mco mco
go_co (TyConAppCo _ tc cs) = go_tc tc `plusNameEnv` go_co_s cs
go_co (AppCo co co') = go_co co `plusNameEnv` go_co co'
go_co (ForAllCo _ co co') = go_co co `plusNameEnv` go_co co'
go_co (FunCo { fco_mult = m, fco_arg = a, fco_res = r })
= go_co m `plusNameEnv` go_co a `plusNameEnv` go_co r
go_co (CoVarCo _) = emptyNameEnv
go_co (HoleCo {}) = emptyNameEnv
go_co (AxiomInstCo _ _ cs) = go_co_s cs
go_co (UnivCo p _ ty ty') = go_prov p `plusNameEnv` go ty `plusNameEnv` go ty'
go_co (SymCo co) = go_co co
go_co (TransCo co co') = go_co co `plusNameEnv` go_co co'
go_co (SelCo _ co) = go_co co
go_co (LRCo _ co) = go_co co
go_co (InstCo co co') = go_co co `plusNameEnv` go_co co'
go_co (KindCo co) = go_co co
go_co (SubCo co) = go_co co
go_co (AxiomRuleCo _ cs) = go_co_s cs
go_prov (PhantomProv co) = go_co co
go_prov (ProofIrrelProv co) = go_co co
go_prov (PluginProv _) = emptyNameEnv
go_prov (CorePrepProv _) = emptyNameEnv
go_tc tc | isTypeSynonymTyCon tc = unitNameEnv (tyConName tc) tc
| otherwise = emptyNameEnv
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
go_co_s cos = foldr (plusNameEnv . go_co) emptyNameEnv cos
track of the TyCons which are known to be acyclic , or
newtype SynCycleM a = SynCycleM {
runSynCycleM :: SynCycleState -> Either (SrcSpan, SDoc) (a, SynCycleState) }
deriving (Functor)
TODO : TyConSet is implemented as IntMap over uniques .
But we could get away with something based on
type SynCycleState = TyConSet
instance Applicative SynCycleM where
pure x = SynCycleM $ \state -> Right (x, state)
(<*>) = ap
instance Monad SynCycleM where
m >>= f = SynCycleM $ \state ->
case runSynCycleM m state of
Right (x, state') ->
runSynCycleM (f x) state'
Left err -> Left err
failSynCycleM :: SrcSpan -> SDoc -> SynCycleM ()
failSynCycleM loc err = SynCycleM $ \_ -> Left (loc, err)
checkTyConIsAcyclic :: TyCon -> SynCycleM () -> SynCycleM ()
checkTyConIsAcyclic tc m = SynCycleM $ \s ->
if tc `elemTyConSet` s
else case runSynCycleM m s of
Right ((), s') -> Right ((), extendTyConSet s' tc)
Left err -> Left err
| Checks if any of the passed in ' 's have cycles .
checking those TyCons : cycles never go through foreign packages ) and
the corresponding for each ' ' , so we
checkSynCycles :: Unit -> [TyCon] -> [LTyClDecl GhcRn] -> TcM ()
checkSynCycles this_uid tcs tyclds =
case runSynCycleM (mapM_ (go emptyTyConSet []) tcs) emptyTyConSet of
Left (loc, err) -> setSrcSpan loc $ failWithTc (mkTcRnUnknownMessage $ mkPlainError noHints err)
Right _ -> return ()
where
lcl_decls = mkNameEnv (zip (map tyConName tcs) tyclds)
go :: TyConSet -> [TyCon] -> TyCon -> SynCycleM ()
go so_far seen_tcs tc =
checkTyConIsAcyclic tc $ go' so_far seen_tcs tc
type synonym a second time .
go' :: TyConSet -> [TyCon] -> TyCon -> SynCycleM ()
go' so_far seen_tcs tc
| tc `elemTyConSet` so_far
= failSynCycleM (getSrcSpan (head seen_tcs)) $
sep [ text "Cycle in type synonym declarations:"
, nest 2 (vcat (map ppr_decl seen_tcs)) ]
This wo n't hold once we get recursive packages with Backpack ,
| not (isHoleModule mod ||
moduleUnit mod == this_uid ||
isInteractiveModule mod)
= return ()
| Just ty <- synTyConRhs_maybe tc =
go_ty (extendTyConSet so_far tc) (tc:seen_tcs) ty
| otherwise = return ()
where
n = tyConName tc
mod = nameModule n
ppr_decl tc =
case lookupNameEnv lcl_decls n of
Just (L loc decl) -> ppr (locA loc) <> colon <+> ppr decl
Nothing -> ppr (getSrcSpan n) <> colon <+> ppr n
<+> text "from external module"
where
n = tyConName tc
go_ty :: TyConSet -> [TyCon] -> Type -> SynCycleM ()
go_ty so_far seen_tcs ty =
mapM_ (go so_far seen_tcs) (synonymTyConsOfType ty)
Note [ Superclass cycle check ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C 's superclass cycles transitively is
guaranteed to terminate . This is a Haskell98 requirement ,
but one that we lift with -XUndecidableSuperClasses .
The worry is that a superclass cycle could make the type checker loop .
More precisely , with a constraint ( Given or Wanted )
C .. tyn
one approach is to instantiate all of C 's superclasses , transitively .
We can only do so if that set is finite .
This potential loop occurs only through superclasses . This , for
example , is fine
class C a where
op : : C b = > a - > b - > b
even though C 's full definition uses C.
Making the check static also makes it conservative . Eg
type family F a
class F a = > C a
Here an instance of ( F a ) might mention C :
type instance F [ a ] = C a
and now we 'd have a loop .
The static check works like this , starting with C
* Look at C 's superclass predicates
* If any is a type - function application ,
or is headed by a type variable , fail
* If any has C at the head , fail
* If any has a type class D at the head ,
make the same test with D
A tricky point is : what if there is a type variable at the head ?
Consider this :
class f ( C f ) = > C f
class c = > I d c
and now expand superclasses for constraint ( C I d ):
C I d
Each step expands superclasses one layer , and clearly does not terminate .
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C's superclass cycles transitively is
guaranteed to terminate. This is a Haskell98 requirement,
but one that we lift with -XUndecidableSuperClasses.
The worry is that a superclass cycle could make the type checker loop.
More precisely, with a constraint (Given or Wanted)
C ty1 .. tyn
one approach is to instantiate all of C's superclasses, transitively.
We can only do so if that set is finite.
This potential loop occurs only through superclasses. This, for
example, is fine
class C a where
op :: C b => a -> b -> b
even though C's full definition uses C.
Making the check static also makes it conservative. Eg
type family F a
class F a => C a
Here an instance of (F a) might mention C:
type instance F [a] = C a
and now we'd have a loop.
The static check works like this, starting with C
* Look at C's superclass predicates
* If any is a type-function application,
or is headed by a type variable, fail
* If any has C at the head, fail
* If any has a type class D at the head,
make the same test with D
A tricky point is: what if there is a type variable at the head?
Consider this:
class f (C f) => C f
class c => Id c
and now expand superclasses for constraint (C Id):
C Id
Each step expands superclasses one layer, and clearly does not terminate.
-}
type ClassSet = UniqSet Class
checkClassCycles :: Class -> Maybe SDoc
checkClassCycles cls
= do { (definite_cycle, err) <- go (unitUniqSet cls)
cls (mkTyVarTys (classTyVars cls))
; let herald | definite_cycle = text "Superclass cycle for"
| otherwise = text "Potential superclass cycle for"
; return (vcat [ herald <+> quotes (ppr cls)
, nest 2 err, hint]) }
where
hint = text "Use UndecidableSuperClasses to accept this"
if you find the same class a second time , or a type function
NB : this code duplicates TcType.transSuperClasses , but
Make sure the two stay in sync .
go :: ClassSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go so_far cls tys = firstJusts $
map (go_pred so_far) $
immSuperClasses cls tys
go_pred :: ClassSet -> PredType -> Maybe (Bool, SDoc)
NB : tcSplitTyConApp looks through synonyms
| Just (tc, tys) <- tcSplitTyConApp_maybe pred
= go_tc so_far pred tc tys
| hasTyVarHead pred
= Just (False, hang (text "one of whose superclass constraints is headed by a type variable:")
2 (quotes (ppr pred)))
| otherwise
= Nothing
go_tc :: ClassSet -> PredType -> TyCon -> [Type] -> Maybe (Bool, SDoc)
go_tc so_far pred tc tys
| isFamilyTyCon tc
= Just (False, hang (text "one of whose superclass constraints is headed by a type family:")
2 (quotes (ppr pred)))
| Just cls <- tyConClass_maybe tc
= go_cls so_far cls tys
= Nothing
go_cls :: ClassSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go_cls so_far cls tys
| cls `elementOfUniqSet` so_far
= Just (True, text "one of whose superclasses is" <+> quotes (ppr cls))
| isCTupleClass cls
= go so_far cls tys
| otherwise
= do { (b,err) <- go (so_far `addOneToUniqSet` cls) cls tys
; return (b, text "one of whose superclasses is" <+> quotes (ppr cls)
$$ err) }
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Role inference
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Note [ Role inference ]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters . Although these roles are stored in the tycons , we can perform this
algorithm on the built tycons , as long as we do n't peek at an as - yet - unknown
roles field ! Ah , the magic of laziness .
First , we choose appropriate initial roles . For families and classes , roles
( including initial roles ) are For datatypes , we start with the role in the
role annotation ( if any ) , or otherwise use Phantom . This is done in
initialRoleEnv1 .
The function irGroup then propagates role information until it reaches a
fixpoint , preferring N over ( R or P ) and R over P. To aid in this , we have a
monad RoleM , which is a combination reader and state monad . In its state are
the current RoleEnv , which gets updated by role propagation , and an update
bit , which we use to know whether or not we 've reached the fixpoint . The
environment of RoleM contains the tycon whose parameters we are inferring , and
a VarEnv from parameters to their positions , so we can update the RoleEnv .
Between tycons , this reader information is missing ; it is added by
addRoleInferenceInfo .
There are two kinds of tycons to consider : algebraic ones ( excluding classes )
are ) An algebraic tycon processes each of its datacons , in turn . Note that
a datacon 's universally quantified parameters might be different from the parent
's parameters , so we use the datacon 's univ parameters in the mapping from
vars to positions . Note also that we do n't want to infer roles for existentials
( they 're all at N , too ) , so we put them in the set of local variables . As an
optimisation , we skip any tycons whose roles are already all , as there
nowhere else for them to go . For synonyms , we just analyse their right - hand sides .
irType walks through a type , looking for uses of a variable of interest and
propagating role information . Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal , the
irType function can assume that anything it sees is at representational . ( The
other possibilities are pruned when they 're encountered . )
The rest of the code is just plumbing .
How do we know that this algorithm is correct ? It should meet the following
specification :
rules define the property ( Z |- t : r ) , where t is a type and r is a role :
Z(a ) = r ' r ' < = r
Z |- a : r
Z |- t1 : r
Z |- t2 : N
Z |- t1 t2 : r
forall i<=n . ( r_i is R or N ) implies Z : r_i
roles(T ) = r_1 .. r_n
Z t_n : R
Z , a : N |- t : r
Z |- forall a : k.t : r
We also have the following rules :
For all datacon_i in type T , where a_1 .. a_n are universally quantified
and .. b_m are existentially quantified , and the arguments are t_1 .. t_p ,
then if forall j<=p , a_1 : r_1 .. a_n : r_n , : N .. b_m : N |- t_j : R ,
then roles(T ) = r_1 .. r_n
roles(- > ) = R , R
roles(~ # ) = N , N
With -dcore - lint on , the output of this algorithm is checked in checkValidRoles ,
called from checkValidTycon .
Note [ Role - checking data constructor arguments ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT : : Eq b = > F a - > ( a->a ) - > T ( G a )
Then we want to check the roles at which ' a ' is used
in MkT 's type . We want to work on the user - written type ,
so we need to take into account
* the arguments : ( F a ) and ( a->a )
* the context : C a b
Note [ Coercions in role inference ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is ( t | > co1 ) representationally equal to ( t | > co2 ) ? Of course they are ! Changing
the kind of a type is totally irrelevant to the representation of that type . So ,
we want to totally ignore coercions when doing role inference . This includes omitting
any type variables that appear in nominal positions but only within coercions .
************************************************************************
* *
Role inference
* *
************************************************************************
Note [Role inference]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters. Although these roles are stored in the tycons, we can perform this
algorithm on the built tycons, as long as we don't peek at an as-yet-unknown
roles field! Ah, the magic of laziness.
First, we choose appropriate initial roles. For families and classes, roles
(including initial roles) are N. For datatypes, we start with the role in the
role annotation (if any), or otherwise use Phantom. This is done in
initialRoleEnv1.
The function irGroup then propagates role information until it reaches a
fixpoint, preferring N over (R or P) and R over P. To aid in this, we have a
monad RoleM, which is a combination reader and state monad. In its state are
the current RoleEnv, which gets updated by role propagation, and an update
bit, which we use to know whether or not we've reached the fixpoint. The
environment of RoleM contains the tycon whose parameters we are inferring, and
a VarEnv from parameters to their positions, so we can update the RoleEnv.
Between tycons, this reader information is missing; it is added by
addRoleInferenceInfo.
There are two kinds of tycons to consider: algebraic ones (excluding classes)
are N.) An algebraic tycon processes each of its datacons, in turn. Note that
a datacon's universally quantified parameters might be different from the parent
tycon's parameters, so we use the datacon's univ parameters in the mapping from
vars to positions. Note also that we don't want to infer roles for existentials
(they're all at N, too), so we put them in the set of local variables. As an
optimisation, we skip any tycons whose roles are already all Nominal, as there
nowhere else for them to go. For synonyms, we just analyse their right-hand sides.
irType walks through a type, looking for uses of a variable of interest and
propagating role information. Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal, the
irType function can assume that anything it sees is at representational. (The
other possibilities are pruned when they're encountered.)
The rest of the code is just plumbing.
How do we know that this algorithm is correct? It should meet the following
specification:
rules define the property (Z |- t : r), where t is a type and r is a role:
Z(a) = r' r' <= r
Z |- a : r
Z |- t1 : r
Z |- t2 : N
Z |- t1 t2 : r
forall i<=n. (r_i is R or N) implies Z |- t_i : r_i
roles(T) = r_1 .. r_n
Z |- T t_1 .. t_n : R
Z, a:N |- t : r
Z |- forall a:k.t : r
We also have the following rules:
For all datacon_i in type T, where a_1 .. a_n are universally quantified
and b_1 .. b_m are existentially quantified, and the arguments are t_1 .. t_p,
then if forall j<=p, a_1 : r_1 .. a_n : r_n, b_1 : N .. b_m : N |- t_j : R,
then roles(T) = r_1 .. r_n
roles(->) = R, R
roles(~#) = N, N
With -dcore-lint on, the output of this algorithm is checked in checkValidRoles,
called from checkValidTycon.
Note [Role-checking data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT :: Eq b => F a -> (a->a) -> T (G a)
Then we want to check the roles at which 'a' is used
in MkT's type. We want to work on the user-written type,
so we need to take into account
* the arguments: (F a) and (a->a)
* the context: C a b
Note [Coercions in role inference]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is (t |> co1) representationally equal to (t |> co2)? Of course they are! Changing
the kind of a type is totally irrelevant to the representation of that type. So,
we want to totally ignore coercions when doing role inference. This includes omitting
any type variables that appear in nominal positions but only within coercions.
-}
type RolesInfo = Name -> [Role]
inferRoles :: HscSource -> RoleAnnotEnv -> [TyCon] -> Name -> [Role]
inferRoles hsc_src annots tycons
= let role_env = initialRoleEnv hsc_src annots tycons
role_env' = irGroup role_env tycons in
\name -> case lookupNameEnv role_env' name of
Just roles -> roles
Nothing -> pprPanic "inferRoles" (ppr name)
initialRoleEnv :: HscSource -> RoleAnnotEnv -> [TyCon] -> RoleEnv
initialRoleEnv hsc_src annots = extendNameEnvList emptyNameEnv .
map (initialRoleEnv1 hsc_src annots)
initialRoleEnv1 :: HscSource -> RoleAnnotEnv -> TyCon -> (Name, [Role])
initialRoleEnv1 hsc_src annots_env tc
| isFamilyTyCon tc = (name, map (const Nominal) bndrs)
| isAlgTyCon tc = (name, default_roles)
| isTypeSynonymTyCon tc = (name, default_roles)
| otherwise = pprPanic "initialRoleEnv1" (ppr tc)
where name = tyConName tc
bndrs = tyConBinders tc
argflags = map tyConBinderForAllTyFlag bndrs
num_exps = count isVisibleForAllTyFlag argflags
if the number of annotations in the role annotation
role_annots
= case lookupRoleAnnot annots_env name of
Just (L _ (RoleAnnotDecl _ _ annots))
| annots `lengthIs` num_exps -> map unLoc annots
_ -> replicate num_exps Nothing
default_roles = build_default_roles argflags role_annots
build_default_roles (argf : argfs) (m_annot : ras)
| isVisibleForAllTyFlag argf
= (m_annot `orElse` default_role) : build_default_roles argfs ras
build_default_roles (_argf : argfs) ras
= Nominal : build_default_roles argfs ras
build_default_roles [] [] = []
build_default_roles _ _ = pprPanic "initialRoleEnv1 (2)"
(vcat [ppr tc, ppr role_annots])
default_role
| isClassTyCon tc = Nominal
Note [ Default roles for abstract TyCons in hs - boot / hsig ]
| HsBootFile <- hsc_src
, isAbstractTyCon tc = Representational
| HsigFile <- hsc_src
, isAbstractTyCon tc = Nominal
| otherwise = Phantom
Note [ Default roles for abstract TyCons in hs - boot / hsig ]
What should the default role for an abstract be ?
This was silly , because the role of the abstract TyCon
data types are almost never phantom . Thus , in ticket # 9204 ,
the matching judgment : if an abstract has a nominal
irGroup :: RoleEnv -> [TyCon] -> RoleEnv
irGroup env tcs
= let (env', update) = runRoleM env $ mapM_ irTyCon tcs in
if update
then irGroup env' tcs
else env'
irTyCon :: TyCon -> RoleM ()
irTyCon tc
| isAlgTyCon tc
= do { old_roles <- lookupRoles tc
irTcTyVars tc $
See # 8958
; whenIsJust (tyConClass_maybe tc) irClass
; mapM_ irDataCon (visibleDataCons $ algTyConRhs tc) }}
| Just ty <- synTyConRhs_maybe tc
= irTcTyVars tc $
irType emptyVarSet ty
| otherwise
= return ()
irClass :: Class -> RoleM ()
irClass cls
= mapM_ ir_at (classATs cls)
where
cls_tvs = classTyVars cls
cls_tv_set = mkVarSet cls_tvs
ir_at at_tc
= mapM_ (updateRole Nominal) nvars
where nvars = filter (`elemVarSet` cls_tv_set) $ tyConTyVars at_tc
irDataCon :: DataCon -> RoleM ()
irDataCon datacon
= setRoleInferenceVars univ_tvs $
irExTyVars ex_tvs $ \ ex_var_set ->
do mapM_ (irType ex_var_set) (eqSpecPreds eq_spec ++ theta ++ map scaledThing arg_tys)
Field multiplicities are nominal ( # 18799 )
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty)
= dataConFullSig datacon
irType :: VarSet -> Type -> RoleM ()
irType = go
where
# 14101
= go lcls ty'
go lcls (TyVarTy tv) = unless (tv `elemVarSet` lcls) $
updateRole Representational tv
go lcls (AppTy t1 t2) = go lcls t1 >> markNominal lcls t2
go lcls (TyConApp tc tys) = do { roles <- lookupRolesX tc
; zipWithM_ (go_app lcls) roles tys }
go lcls (ForAllTy tvb ty) = do { let tv = binderVar tvb
lcls' = extendVarSet lcls tv
; markNominal lcls (tyVarKind tv)
; go lcls' ty }
go lcls (FunTy _ w arg res) = markNominal lcls w >> go lcls arg >> go lcls res
go _ (LitTy {}) = return ()
go lcls (CastTy ty _) = go lcls ty
go _ (CoercionTy _) = return ()
go_app lcls Representational ty = go lcls ty
irTcTyVars :: TyCon -> RoleM a -> RoleM a
irTcTyVars tc thing
= setRoleInferenceTc (tyConName tc) $ go (tyConTyVars tc)
where
go [] = thing
go (tv:tvs) = do { markNominal emptyVarSet (tyVarKind tv)
; addRoleInferenceVar tv $ go tvs }
irExTyVars :: [TyVar] -> (TyVarSet -> RoleM a) -> RoleM a
irExTyVars orig_tvs thing = go emptyVarSet orig_tvs
where
go lcls [] = thing lcls
go lcls (tv:tvs) = do { markNominal lcls (tyVarKind tv)
; go (extendVarSet lcls tv) tvs }
-> Type -> RoleM ()
markNominal lcls ty = let nvars = fvVarList (FV.delFVs lcls $ get_ty_vars ty) in
mapM_ (updateRole Nominal) nvars
where
get_ty_vars :: Type -> FV
# 20999
= get_ty_vars t'
get_ty_vars (TyVarTy tv) = unitFV tv
get_ty_vars (AppTy t1 t2) = get_ty_vars t1 `unionFV` get_ty_vars t2
get_ty_vars (FunTy _ w t1 t2) = get_ty_vars w `unionFV` get_ty_vars t1 `unionFV` get_ty_vars t2
get_ty_vars (TyConApp _ tys) = mapUnionFV get_ty_vars tys
get_ty_vars (ForAllTy tvb ty) = tyCoFVsBndr tvb (get_ty_vars ty)
get_ty_vars (LitTy {}) = emptyFV
get_ty_vars (CastTy ty _) = get_ty_vars ty
get_ty_vars (CoercionTy _) = emptyFV
like lookupRoles , but with Nominal tags at the end for oversaturated TyConApps
lookupRolesX :: TyCon -> RoleM [Role]
lookupRolesX tc
= do { roles <- lookupRoles tc
; return $ roles ++ repeat Nominal }
lookupRoles :: TyCon -> RoleM [Role]
lookupRoles tc
= do { env <- getRoleEnv
; case lookupNameEnv env (tyConName tc) of
Just roles -> return roles
Nothing -> return $ tyConRoles tc }
updateRole :: Role -> TyVar -> RoleM ()
updateRole role tv
= do { var_ns <- getVarNs
; name <- getTyConName
; case lookupVarEnv var_ns tv of
Nothing -> pprPanic "updateRole" (ppr name $$ ppr tv $$ ppr var_ns)
Just n -> updateRoleEnv name n role }
the state in the RoleM monad
data RoleInferenceState = RIS { role_env :: RoleEnv
, update :: Bool }
the environment in the RoleM monad
type VarPositions = VarEnv Int
-> VarPositions
size of VarPositions
-> RoleInferenceState
-> (a, RoleInferenceState) }
deriving (Functor)
instance Applicative RoleM where
pure x = RM $ \_ _ _ state -> (x, state)
(<*>) = ap
instance Monad RoleM where
a >>= f = RM $ \m_info vps nvps state ->
let (a', state') = unRM a m_info vps nvps state in
unRM (f a') m_info vps nvps state'
runRoleM :: RoleEnv -> RoleM () -> (RoleEnv, Bool)
runRoleM env thing = (env', update)
where RIS { role_env = env', update = update }
= snd $ unRM thing Nothing emptyVarEnv 0 state
state = RIS { role_env = env
, update = False }
setRoleInferenceTc :: Name -> RoleM a -> RoleM a
setRoleInferenceTc name thing = RM $ \m_name vps nvps state ->
assert (isNothing m_name) $
assert (isEmptyVarEnv vps) $
assert (nvps == 0) $
unRM thing (Just name) vps nvps state
addRoleInferenceVar :: TyVar -> RoleM a -> RoleM a
addRoleInferenceVar tv thing
= RM $ \m_name vps nvps state ->
assert (isJust m_name) $
unRM thing m_name (extendVarEnv vps tv nvps) (nvps+1) state
setRoleInferenceVars :: [TyVar] -> RoleM a -> RoleM a
setRoleInferenceVars tvs thing
= RM $ \m_name _vps _nvps state ->
assert (isJust m_name) $
unRM thing m_name (mkVarEnv (zip tvs [0..])) (panic "setRoleInferenceVars")
state
getRoleEnv :: RoleM RoleEnv
getRoleEnv = RM $ \_ _ _ state@(RIS { role_env = env }) -> (env, state)
getVarNs :: RoleM VarPositions
getVarNs = RM $ \_ vps _ state -> (vps, state)
getTyConName :: RoleM Name
getTyConName = RM $ \m_name _ _ state ->
case m_name of
Nothing -> panic "getTyConName"
Just name -> (name, state)
updateRoleEnv :: Name -> Int -> Role -> RoleM ()
updateRoleEnv name n role
= RM $ \_ _ _ state@(RIS { role_env = role_env }) -> ((),
case lookupNameEnv role_env name of
Nothing -> pprPanic "updateRoleEnv" (ppr name)
Just roles -> let (before, old_role : after) = splitAt n roles in
if role `ltRole` old_role
then let roles' = before ++ role : after
role_env' = extendNameEnv role_env name roles' in
RIS { role_env = role_env', update = True }
else state )
addTyConsToGblEnv :: [TyCon] -> TcM (TcGblEnv, ThBindEnv)
Given a [ TyCon ] , add to the TcGblEnv
Return separately the TH levels of these bindings ,
to be added to a LclEnv later .
addTyConsToGblEnv tyclss
= tcExtendTyConEnv tyclss $
tcExtendGlobalEnvImplicit implicit_things $
tcExtendGlobalValEnv def_meth_ids $
do { traceTc "tcAddTyCons" $ vcat
[ text "tycons" <+> ppr tyclss
, text "implicits" <+> ppr implicit_things ]
; gbl_env <- tcRecSelBinds (mkRecSelBinds tyclss)
; th_bndrs <- tcTyThBinders implicit_things
; return (gbl_env, th_bndrs)
}
where
implicit_things = concatMap implicitTyConThings tyclss
def_meth_ids = mkDefaultMethodIds tyclss
mkDefaultMethodIds :: [TyCon] -> [Id]
We want to put the default - method Ids ( both vanilla and generic )
into the type environment so that they are found when we
mkDefaultMethodIds tycons
= [ mkExportedVanillaId dm_name (mkDefaultMethodType cls sel_id dm_spec)
| tc <- tycons
, Just cls <- [tyConClass_maybe tc]
, (sel_id, Just (dm_name, dm_spec)) <- classOpItems cls ]
mkDefaultMethodType :: Class -> Id -> DefMethSpec Type -> Type
mkDefaultMethodType _ sel_id VanillaDM = idType sel_id
mkDefaultMethodType cls _ (GenericDM dm_ty) = mkSigmaTy tv_bndrs [pred] dm_ty
where
pred = mkClassPred cls (mkTyVarTys (binderVars cls_bndrs))
cls_bndrs = tyConBinders (classTyCon cls)
tv_bndrs = tyVarSpecToBinders $ tyConInvisTVBinders cls_bndrs
methods in instances ( . TyCl . Instance.mkDefMethBind ) , and
( # 13998 )
Note [ Default method Ids and Template Haskell ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this ( # 4169 ):
class Numeric a where
fromIntegerNum : : a
fromIntegerNum = ...
ast : : Q [ Dec ]
ast = [ d| instance Numeric Int | ]
When we typecheck ' ast ' we have done the first pass over the class decl
( in tcTyClDecls ) , but we have not yet typechecked the default - method
declarations ( because they can mention value declarations ) . So we
must bring the default method Ids into scope first ( so they can be seen
when typechecking the [ d| .. | ] quote , and them later .
Note [Default method Ids and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (#4169):
class Numeric a where
fromIntegerNum :: a
fromIntegerNum = ...
ast :: Q [Dec]
ast = [d| instance Numeric Int |]
When we typecheck 'ast' we have done the first pass over the class decl
(in tcTyClDecls), but we have not yet typechecked the default-method
declarations (because they can mention value declarations). So we
must bring the default method Ids into scope first (so they can be seen
when typechecking the [d| .. |] quote, and typecheck them later.
-}
tcRecSelBinds :: [(Id, LHsBind GhcRn)] -> TcM TcGblEnv
tcRecSelBinds sel_bind_prs
= tcExtendGlobalValEnv [sel_id | (L _ (XSig (IdSig sel_id))) <- sigs] $
do { (rec_sel_binds, tcg_env) <- discardWarnings $
setXOptM LangExt.ImpredicativeTypes $
tcValBinds TopLevel binds sigs getGblEnv
; return (tcg_env `addTypecheckedBinds` map snd rec_sel_binds) }
where
sigs = [ L (noAnnSrcSpan loc) (XSig $ IdSig sel_id)
| (sel_id, _) <- sel_bind_prs
, let loc = getSrcSpan sel_id ]
binds = [(NonRecursive, unitBag bind) | (_, bind) <- sel_bind_prs]
mkRecSelBinds :: [TyCon] -> [(Id, LHsBind GhcRn)]
NB We produce * un - typechecked * bindings , rather like ' deriving '
mkRecSelBinds tycons
= map mkRecSelBind [ (tc,fld) | tc <- tycons
, fld <- tyConFieldLabels tc ]
mkRecSelBind :: (TyCon, FieldLabel) -> (Id, LHsBind GhcRn)
mkRecSelBind (tycon, fl)
= mkOneRecordSelector all_cons (RecSelData tycon) fl
where
all_cons = map RealDataCon (tyConDataCons tycon)
mkOneRecordSelector :: [ConLike] -> RecSelParent -> FieldLabel -> FieldSelectors
-> (Id, LHsBind GhcRn)
mkOneRecordSelector all_cons idDetails fl has_sel
= (sel_id, L (noAnnSrcSpan loc) sel_bind)
where
loc = getSrcSpan sel_name
loc' = noAnnSrcSpan loc
locn = noAnnSrcSpan loc
locc = noAnnSrcSpan loc
lbl = flLabel fl
sel_name = flSelector fl
sel_id = mkExportedLocalId rec_details sel_name sel_ty
rec_details = RecSelId { sel_tycon = idDetails, sel_naughty = is_naughty }
cons_w_field = conLikesWithFields all_cons [lbl]
con1 = assert (not (null cons_w_field)) $ head cons_w_field
(univ_tvs, _, _, _, req_theta, _, data_ty) = conLikeFullSig con1
field_ty = conLikeFieldType con1 lbl
field_ty_tvs = tyCoVarsOfType field_ty
data_ty_tvs = tyCoVarsOfType data_ty
sel_tvs = field_ty_tvs `unionVarSet` data_ty_tvs
sel_tvbs = filter (\tvb -> binderVar tvb `elemVarSet` sel_tvs) $
conLikeUserTyVarBinders con1
is_naughty = not ok_scoping || no_selectors
ok_scoping = case con1 of
RealDataCon {} -> field_ty_tvs `subVarSet` data_ty_tvs
PatSynCon {} -> field_ty_tvs `subVarSet` mkVarSet univ_tvs
In the PatSynCon case , the selector type is ( data_ty - > field_ty ) , but
| otherwise = mkForAllTys (tyVarSpecToBinders sel_tvbs) $
Urgh ! See Note [ The stupid context ] in GHC.Core . DataCon
mkPhiTy (conLikeStupidTheta con1) $
req_theta is empty for normal DataCon
mkPhiTy req_theta $
mkVisFunTyMany data_ty $
field_ty
sel ( C7 { fld = x } ) = x
sel_bind = mkTopFunBind Generated sel_lname alts
where
alts | is_naughty = [mkSimpleMatch (mkPrefixFunRhs sel_lname)
[] unit_rhs]
| otherwise = map mk_match cons_w_field ++ deflt
mk_match con = mkSimpleMatch (mkPrefixFunRhs sel_lname)
[L loc' (mk_sel_pat con)]
(L loc' (HsVar noExtField (L locn field_var)))
mk_sel_pat con = ConPat NoExtField (L locn (getName con)) (RecCon rec_fields)
rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing }
rec_field = noLocA (HsFieldBind
{ hfbAnn = noAnn
, hfbLHS
= L locc (FieldOcc sel_name
(L locn $ mkVarUnqual (field_label lbl)))
, hfbRHS
= L loc' (VarPat noExtField (L locn field_var))
, hfbPun = False })
sel_lname = L locn sel_name
field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc
deflt | all dealt_with all_cons = []
| otherwise = [mkSimpleMatch CaseAlt
[L loc' (WildPat noExtField)]
(mkHsApp (L loc' (HsVar noExtField
(L locn (getName rEC_SEL_ERROR_ID))))
(L loc' (HsLit noComments msg_lit)))]
constructors . We must take account of GADTs , else we
NB : we need to pass type args for the * representation * TyCon
dealt_with :: ConLike -> Bool
dealt_with con@(RealDataCon dc)
= con `elem` cons_w_field || dataConCannotMatch inst_tys dc
where
inst_tys = dataConResRepTyArgs dc
unit_rhs = mkLHsTupleExpr [] noExtField
msg_lit = HsStringPrim NoSourceText (bytesFS (field_label lbl))
Note [ Polymorphic selectors ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order , so that visible type application works according to the specification in
the GHC User 's Guide ( see the " Field selectors and TypeApplications " section ) .
We wo n't bother rehashing the entire specification in this Note , but the tricky
part is dealing with GADT constructor fields . Here is an appropriately tricky
example to illustrate the challenges :
{ - # LANGUAGE PolyKinds #
Note [Polymorphic selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order, so that visible type application works according to the specification in
the GHC User's Guide (see the "Field selectors and TypeApplications" section).
We won't bother rehashing the entire specification in this Note, but the tricky
part is dealing with GADT constructor fields. Here is an appropriately tricky
example to illustrate the challenges:
data T a b where
MkT :: forall b a x.
{ field1 :: forall c. (Num a, Show c) => (Either a c, Proxy b)
, field2 :: x
}
-> T a b
Our goal is to obtain the following type for `field1`:
field1 :: forall {k} (b :: k) a.
T a b -> forall c. (Num a, Show c) => (Either a c, Proxy b)
(`field2` is naughty, per Note [Naughty record selectors], so we cannot turn
it into a top-level field selector.)
Some potential gotchas, inspired by #18023:
1. Since the user wrote `forall b a x.` in the type of `MkT`, we want the `b`
to appear before the `a` when quantified in the type of `field1`.
2. On the other hand, we *don't* want to quantify `x` in the type of `field1`.
This is because `x` does not appear in the GADT return type, so it is not
needed in the selector type.
3. Because of PolyKinds, the kind of `b` is generalized to `k`. Moreover, since
this `k` is not written in the source code, it is inferred (i.e., not
available for explicit type applications) and thus written as {k} in the type
of `field1`.
In order to address these gotchas, we start by looking at the
conLikeUserTyVarBinders, which gives the order and specificity of each binder.
This effectively solves (1) and (3). To solve (2), we filter the binders to
leave only those that are needed for the selector type.
Note [Naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "naughty" field is one for which we can't define a record
selector, because an existential type variable would escape. For example:
data T = forall a. MkT { x,y::a }
We obviously can't define
x (MkT v _) = v
Nevertheless we *do* put a RecSelId into the type environment
so that if the user tries to use 'x' as a selector we can bleat
helpfully, rather than saying unhelpfully that 'x' is not in scope.
Hence the sel_naughty flag, to identify record selectors that don't really exist.
For naughty selectors we make a dummy binding
sel = ()
so that the later type-check will add them to the environment, and they'll be
exported. The function is never called, because the typechecker spots the
sel_naughty field.
To determine naughtiness we distingish two cases:
* For RealDataCons, a field is "naughty" if its type mentions a
type variable that isn't in the (original, user-written) result type
of the constructor. Note that this *allows* GADT record selectors
(Note [GADT record selectors]) whose types may look like sel :: T [a] -> a
* For a PatSynCon, a field is "naughty" if its type mentions a type variable
that isn't in the universal type variables.
This is a bit subtle. Consider test patsyn/should_run/records_run:
pattern ReadP :: forall a. ReadP a => a -> String
pattern ReadP {fld} <- (read -> readp)
The selector is defined like this:
$selReadPfld :: forall a. ReadP a => String -> a
$selReadPfld @a (d::ReadP a) s = readp @a d s
Perfectly fine! The (ReadP a) constraint lets us contruct a value of type
'a' from a bare String.
Another curious case (#23038):
pattern N :: forall a. () => forall. () => a -> Any
pattern N { fld } <- ( unsafeCoerce -> fld1 ) where N = unsafeCoerce
The selector looks like this
$selNfld :: forall a. Any -> a
$selNfld @a x = unsafeCoerce @Any @a x
Pretty strange (but used in the `cleff` package).
TL;DR for pattern synonyms, the selector is OK if the field type mentions only
the universal type variables of the pattern synonym.
Note [NoFieldSelectors and naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under NoFieldSelectors (see Note [NoFieldSelectors] in GHC.Rename.Env), record
selectors will not be in scope in the renamer. However, for normal datatype
declarations we still generate the underlying selector functions, so they can be
used for constructing the dictionaries for HasField constraints (as described by
Note [HasField instances] in GHC.Tc.Instance.Class). Hence the call to
mkOneRecordSelector in mkRecSelBind always uses FieldSelectors.
However, record pattern synonyms are not used with HasField, so when
NoFieldSelectors is used we do not need to generate selector functions. Thus
mkPatSynRecSelBinds passes the current state of the FieldSelectors extension to
mkOneRecordSelector, and in the NoFieldSelectors case it will treat them as
"naughty" fields (see Note [Naughty record selectors]).
Why generate a naughty binding, rather than no binding at all? Because when
type-checking a record update, we need to look up Ids for the fields. In
particular, disambiguateRecordBinds calls lookupParents which needs to look up
the RecSelIds to determine the sel_tycon.
Note [GADT record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For GADTs, we require that all constructors with a common field 'f' have the same
result type (modulo alpha conversion). [Checked in GHC.Tc.TyCl.checkValidTyCon]
E.g.
data T where
T1 { f :: Maybe a } :: T [a]
T2 { f :: Maybe a, y :: b } :: T [a]
T3 :: T Int
and now the selector takes that result type as its argument:
f :: forall a. T [a] -> Maybe a
Details: the "real" types of T1,T2 are:
T1 :: forall r a. (r~[a]) => a -> T r
T2 :: forall r a b. (r~[a]) => a -> b -> T r
So the selector loooks like this:
f :: forall a. T [a] -> Maybe a
f (a:*) (t:T [a])
= case t of
T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g))
T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g))
T3 -> error "T3 does not have field f"
Note the forall'd tyvars of the selector are just the free tyvars
of the result type; there may be other tyvars in the constructor's
type (e.g. 'b' in T2).
Note the need for casts in the result!
Note [Selector running example]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's OK to combine GADTs and type families. Here's a running example:
data instance T [a] where
T1 { fld :: b } :: T [Maybe b]
The representation type looks like this
data :R7T a where
T1 { fld :: b } :: :R7T (Maybe b)
and there's coercion from the family type to the representation type
:CoR7T a :: T [a] ~ :R7T a
The selector we want for fld looks like this:
fld :: forall b. T [Maybe b] -> b
fld = /\b. \(d::T [Maybe b]).
case d `cast` :CoR7T (Maybe b) of
T1 (x::b) -> x
The scrutinee of the case has type :R7T (Maybe b), which can be
gotten by applying the eq_spec to the univ_tvs of the data con.
Note [Impredicative record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are situations where generating code for record selectors requires the
use of ImpredicativeTypes. Here is one example (adapted from #18005):
type S = (forall b. b -> b) -> Int
data T = MkT {unT :: S}
| Dummy
We want to generate HsBinds for unT that look something like this:
unT :: S
unT (MkT x) = x
unT _ = recSelError "unT"#
Note that the type of recSelError is `forall r (a :: TYPE r). Addr# -> a`.
Therefore, when used in the right-hand side of `unT`, GHC attempts to
instantiate `a` with `(forall b. b -> b) -> Int`, which is impredicative.
To make sure that GHC is OK with this, we enable ImpredicativeTypes internally
when typechecking these HsBinds so that the user does not have to.
-}
|
41529b888d5ee5e2f43c1f68c8f0ff46d6ed6810be82f9fa4611ea9b0290e38a | noinia/hgeometry | DualDT.hs | # LANGUAGE ScopedTypeVariables #
module Algorithms.Geometry.VoronoiDiagram.DualDT where
import Algorithms.Geometry.DelaunayTriangulation.DivideAndConquer
import Algorithms.Geometry.DelaunayTriangulation.Types
import Control.Lens
import Data.Ext
import Geometry.Ball
import Geometry.Box
import Geometry.HalfLine
import Geometry.PlanarSubdivision
import Geometry.Point
import Geometry.Vector
import qualified Data.List.NonEmpty as NonEmpty
import Data.PlanarGraph (FaceId)
import Data.PlaneGraph (PlaneGraph)
import qualified Data.PlaneGraph as PG
import Data.Proxy
import qualified Data.Vector as V
--------------------------------------------------------------------------------
type UnboundedVoronoiEdges s e r = V.Vector (VertexId' s, HalfLine 2 r :+ e)
data VertexType v = BoundingBoxVertex | VoronoiVertex !v deriving (Show,Eq,Ord)
data SiteData f r = SiteData !(Point 2 r) !f deriving (Show,Eq)
data VoronoiDiagram s v e f r = VoronoiDiagram {
_boundedDiagram :: !(PlanarSubdivision s (VertexType v) e (SiteData f r) r)
, _boundedArea :: !(Rectangle () r)
, _unboundedIntersections :: !(UnboundedVoronoiEdges s e r)
} deriving (Show,Eq)
voronoiDiagram :: (Ord r, Fractional r)
=> proxy s -> NonEmpty.NonEmpty (Point 2 r :+ p)
-> VoronoiDiagram s () () p r
voronoiDiagram px pts = VoronoiDiagram diag bBox unb'
where
oid = PG.outerFaceId dt
dt ' : : PlaneGraph s Primal _ p ( ) ( ) r
dt' = toPlaneGraph px $ delaunayTriangulation pts
dt = dt'&PG.faceData .~ (fmap (toVDVertex dt') . PG.faces' $ dt')
bBox = grow 1 . boundingBoxList' . V.mapMaybe snd . PG.faces $ dt
diag = undefined
unb = unBoundedEdge dt <$> PG.boundary oid dt
-- this gives the unboundededges from their actual voronoi vertices
unb' = undefined
-- | Computes the unbounded edge corresponding to this dart of the
-- convex hull.
--
-- running time: \(O(1)\)
unBoundedEdge :: Fractional r
=> PlaneGraph s v e (Maybe (Point 2 r)) r -- ^ the delaunaytriangulation
-> Dart s
-> (FaceId' s, HalfLine 2 r :+ ())
unBoundedEdge dt d = let (p,q) = over both (^.location) $ dt^.PG.endPointsOf d
fi = PG.leftFace d dt
Just v = dt^.dataOf fi
-- the face to the left of this dart should be a triangle
-- and thus have a
in (fi, ext $ unboundedEdgeFrom v p q)
| Given an edge of the convex hull ( specifiekd by two adjacent
vertices ) and the vertex in the diagram that is incident to
these two sites , computes the halfine representing their unbouded
-- edge of the VD.
unboundedEdgeFrom :: Fractional r
=> Point 2 r -- ^ The starting point of the unbounded edge
-> Point 2 r -- ^ vertex of the CH
-> Point 2 r -- ^ adjacent vertex of the CH
-> HalfLine 2 r
unboundedEdgeFrom v p q = HalfLine v (midPoint p q .-. v)
where
midPoint a b = a .+^ ((b^.vector) ^/ 2)
| Computes the location of a Voronoi vertex
--
toVDVertex :: (Fractional r, Eq r)
=> PlaneGraph s v e f r -> FaceId' s
-> Maybe (Point 2 r)
toVDVertex dt fi
| V.length bvs /= 3 = Nothing
| otherwise = disk a b c ^?_Just.center.core
where
bvs = PG.boundaryVertices fi dt
[a,b,c] = map (\v -> dt^.PG.locationOf v) . V.toList $ bvs
| null | https://raw.githubusercontent.com/noinia/hgeometry/89cd3d3109ec68f877bf8e34dc34b6df337a4ec1/hgeometry/src/Algorithms/Geometry/VoronoiDiagram/DualDT.hs | haskell | ------------------------------------------------------------------------------
this gives the unboundededges from their actual voronoi vertices
| Computes the unbounded edge corresponding to this dart of the
convex hull.
running time: \(O(1)\)
^ the delaunaytriangulation
the face to the left of this dart should be a triangle
and thus have a
edge of the VD.
^ The starting point of the unbounded edge
^ vertex of the CH
^ adjacent vertex of the CH
| # LANGUAGE ScopedTypeVariables #
module Algorithms.Geometry.VoronoiDiagram.DualDT where
import Algorithms.Geometry.DelaunayTriangulation.DivideAndConquer
import Algorithms.Geometry.DelaunayTriangulation.Types
import Control.Lens
import Data.Ext
import Geometry.Ball
import Geometry.Box
import Geometry.HalfLine
import Geometry.PlanarSubdivision
import Geometry.Point
import Geometry.Vector
import qualified Data.List.NonEmpty as NonEmpty
import Data.PlanarGraph (FaceId)
import Data.PlaneGraph (PlaneGraph)
import qualified Data.PlaneGraph as PG
import Data.Proxy
import qualified Data.Vector as V
type UnboundedVoronoiEdges s e r = V.Vector (VertexId' s, HalfLine 2 r :+ e)
data VertexType v = BoundingBoxVertex | VoronoiVertex !v deriving (Show,Eq,Ord)
data SiteData f r = SiteData !(Point 2 r) !f deriving (Show,Eq)
data VoronoiDiagram s v e f r = VoronoiDiagram {
_boundedDiagram :: !(PlanarSubdivision s (VertexType v) e (SiteData f r) r)
, _boundedArea :: !(Rectangle () r)
, _unboundedIntersections :: !(UnboundedVoronoiEdges s e r)
} deriving (Show,Eq)
voronoiDiagram :: (Ord r, Fractional r)
=> proxy s -> NonEmpty.NonEmpty (Point 2 r :+ p)
-> VoronoiDiagram s () () p r
voronoiDiagram px pts = VoronoiDiagram diag bBox unb'
where
oid = PG.outerFaceId dt
dt ' : : PlaneGraph s Primal _ p ( ) ( ) r
dt' = toPlaneGraph px $ delaunayTriangulation pts
dt = dt'&PG.faceData .~ (fmap (toVDVertex dt') . PG.faces' $ dt')
bBox = grow 1 . boundingBoxList' . V.mapMaybe snd . PG.faces $ dt
diag = undefined
unb = unBoundedEdge dt <$> PG.boundary oid dt
unb' = undefined
unBoundedEdge :: Fractional r
-> Dart s
-> (FaceId' s, HalfLine 2 r :+ ())
unBoundedEdge dt d = let (p,q) = over both (^.location) $ dt^.PG.endPointsOf d
fi = PG.leftFace d dt
Just v = dt^.dataOf fi
in (fi, ext $ unboundedEdgeFrom v p q)
| Given an edge of the convex hull ( specifiekd by two adjacent
vertices ) and the vertex in the diagram that is incident to
these two sites , computes the halfine representing their unbouded
unboundedEdgeFrom :: Fractional r
-> HalfLine 2 r
unboundedEdgeFrom v p q = HalfLine v (midPoint p q .-. v)
where
midPoint a b = a .+^ ((b^.vector) ^/ 2)
| Computes the location of a Voronoi vertex
toVDVertex :: (Fractional r, Eq r)
=> PlaneGraph s v e f r -> FaceId' s
-> Maybe (Point 2 r)
toVDVertex dt fi
| V.length bvs /= 3 = Nothing
| otherwise = disk a b c ^?_Just.center.core
where
bvs = PG.boundaryVertices fi dt
[a,b,c] = map (\v -> dt^.PG.locationOf v) . V.toList $ bvs
|
14e6879a6748aa63d8f58f1ac146e0e1f4e0d7f0bb56375d3b090277368e3333 | ucsd-progsys/liquidhaskell | Bare.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TupleSections #
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module contains the functions that convert /from/ descriptions of
-- symbols, names and types (over freshly parsed /bare/ Strings),
/to/ representations connected to GHC ' Var 's , ' Name 's , and ' Type 's .
-- The actual /representations/ of bare and real (refinement) types are all
-- in 'RefType' -- they are different instances of 'RType'.
module Language.Haskell.Liquid.Bare (
* Creating a TargetSpec
-- $creatingTargetSpecs
makeTargetSpec
-- * Loading and Saving lifted specs from/to disk
, loadLiftedSpec
, saveLiftedSpec
) where
import Prelude hiding (error)
import Optics
import Control.Monad (forM)
import Control.Applicative ((<|>))
import qualified Control.Exception as Ex
import qualified Data.Binary as B
import qualified Data.Maybe as Mb
import qualified Data.List as L
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import Text.PrettyPrint.HughesPJ hiding (first, (<>)) -- (text, (<+>))
import System.FilePath (dropExtension)
import System.Directory (doesFileExist)
import System.Console.CmdArgs.Verbosity (whenLoud)
import Language.Fixpoint.Utils.Files as Files
import Language.Fixpoint.Misc as Misc
import Language.Fixpoint.Types hiding (dcFields, DataDecl, Error, panic)
import qualified Language.Fixpoint.Types as F
import qualified Language.Haskell.Liquid.Misc as Misc -- (nubHashOn)
import qualified Liquid.GHC.Misc as GM
import qualified Liquid.GHC.API as Ghc
import Liquid.GHC.Types (StableName)
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.WiredIn
import qualified Language.Haskell.Liquid.Measure as Ms
import qualified Language.Haskell.Liquid.Bare.Types as Bare
import qualified Language.Haskell.Liquid.Bare.Resolve as Bare
import qualified Language.Haskell.Liquid.Bare.DataType as Bare
import Language.Haskell.Liquid.Bare.Elaborate
import qualified Language.Haskell.Liquid.Bare.Expand as Bare
import qualified Language.Haskell.Liquid.Bare.Measure as Bare
import qualified Language.Haskell.Liquid.Bare.Plugged as Bare
import qualified Language.Haskell.Liquid.Bare.Axiom as Bare
import qualified Language.Haskell.Liquid.Bare.ToBare as Bare
import qualified Language.Haskell.Liquid.Bare.Class as Bare
import qualified Language.Haskell.Liquid.Bare.Check as Bare
import qualified Language.Haskell.Liquid.Bare.Laws as Bare
import qualified Language.Haskell.Liquid.Bare.Typeclass as Bare
import qualified Language.Haskell.Liquid.Transforms.CoreToLogic as CoreToLogic
import Control.Arrow (second)
import Data.Hashable (Hashable)
import qualified Language.Haskell.Liquid.Bare.Slice as Dg
--------------------------------------------------------------------------------
-- | De/Serializing Spec files
--------------------------------------------------------------------------------
loadLiftedSpec :: Config -> FilePath -> IO (Maybe Ms.BareSpec)
loadLiftedSpec cfg srcF
| noLiftedImport cfg = putStrLn "No LIFTED Import" >> return Nothing
| otherwise = do
let specF = extFileName BinSpec srcF
ex <- doesFileExist specF
whenLoud $ putStrLn $ "Loading Binary Lifted Spec: " ++ specF ++ " " ++ "for source-file: " ++ show srcF ++ " " ++ show ex
lSp <- if ex
then Just <$> B.decodeFile specF
else {- warnMissingLiftedSpec srcF specF >> -} return Nothing
Ex.evaluate lSp
-- warnMissingLiftedSpec :: FilePath -> FilePath -> IO ()
-- warnMissingLiftedSpec srcF specF = do
-- incDir <- Misc.getIncludeDir
-- unless (Misc.isIncludeFile incDir srcF)
-- $ Ex.throw (errMissingSpec srcF specF)
saveLiftedSpec :: FilePath -> Ms.BareSpec -> IO ()
saveLiftedSpec srcF lspec = do
ensurePath specF
B.encodeFile specF lspec
-- print (errorP "DIE" "HERE" :: String)
where
specF = extFileName BinSpec srcF
$ creatingTargetSpecs
/Liquid Haskell/ operates on ' TargetSpec 's , so this module provides a single function called
' makeTargetSpec ' to produce a ' TargetSpec ' , alongside the ' LiftedSpec ' . The former will be used by
functions like ' liquid ' or ' liquidOne ' to verify our program is correct , the latter will be serialised
to disk so that we can retrieve it later without having to re - check the relevant file .
/Liquid Haskell/ operates on 'TargetSpec's, so this module provides a single function called
'makeTargetSpec' to produce a 'TargetSpec', alongside the 'LiftedSpec'. The former will be used by
functions like 'liquid' or 'liquidOne' to verify our program is correct, the latter will be serialised
to disk so that we can retrieve it later without having to re-check the relevant Haskell file.
-}
| ' makeTargetSpec ' constructs the ' TargetSpec ' and then validates it . Upon success , the ' TargetSpec '
and the ' LiftedSpec ' are returned . We perform error checking in \"two phases\ " : during the first phase ,
we check for errors and warnings in the input ' BareSpec ' and the dependencies . During this phase we ideally
-- want to short-circuit in case the validation failure is found in one of the dependencies (to avoid
-- printing potentially endless failures).
The second phase involves creating the ' TargetSpec ' , and returning either the full list of diagnostics
( errors and warnings ) in case things went wrong , or the final ' TargetSpec ' and ' LiftedSpec ' together
-- with a list of 'Warning's, which shouldn't abort the compilation (modulo explicit request from the user,
-- to treat warnings and errors).
makeTargetSpec :: Config
-> LogicMap
-> TargetSrc
-> BareSpec
-> TargetDependencies
-> Ghc.TcRn (Either Diagnostics ([Warning], TargetSpec, LiftedSpec))
makeTargetSpec cfg lmap targetSrc bareSpec dependencies = do
let targDiagnostics = Bare.checkTargetSrc cfg targetSrc
let depsDiagnostics = mapM (uncurry Bare.checkBareSpec) legacyDependencies
let bareSpecDiagnostics = Bare.checkBareSpec (giTargetMod targetSrc) legacyBareSpec
case targDiagnostics >> depsDiagnostics >> bareSpecDiagnostics of
Left d | noErrors d -> secondPhase (allWarnings d)
Left d -> return $ Left d
Right () -> secondPhase mempty
where
secondPhase :: [Warning] -> Ghc.TcRn (Either Diagnostics ([Warning], TargetSpec, LiftedSpec))
secondPhase phaseOneWarns = do
we should be able to setContext regardless of whether
we use the ghc api . However , ghc will complain
-- if the filename does not match the module name
when ( ) $ do
-- Ghc.setContext [iimport |(modName, _) <- allSpecs legacyBareSpec,
let = if isTarget modName
then Ghc . IIModule ( getModName )
else Ghc . IIDecl ( Ghc.simpleImportDecl ( getModName ) ) ]
void $ Ghc.execStmt
" let { infixr 1 = = > ; True = = > False = False ; _ = = > _ = True } "
-- Ghc.execOptions
void $ Ghc.execStmt
" let { infixr 1 < = > ; True < = > False = False ; _ < = > _ = True } "
-- Ghc.execOptions
void $ Ghc.execStmt
" let { infix 4 = = ; (= =) : : a - > a - > Bool ; _ = = _ = undefined } "
-- Ghc.execOptions
void $ Ghc.execStmt
" let { infix 4 /= ; ( /= ) : : a - > a - > Bool ; _ /= _ = undefined } "
-- Ghc.execOptions
void $ Ghc.execStmt
" let { infixl 7 / ; ( / ) : : a = > a - > a - > a ; _ / _ = undefined } "
-- Ghc.execOptions
void $ Ghc.execStmt
-- "let {len :: [a] -> Int; len _ = undefined}"
-- Ghc.execOptions
diagOrSpec <- makeGhcSpec cfg (review targetSrcIso targetSrc) lmap (allSpecs legacyBareSpec)
return $ do
(warns, ghcSpec) <- diagOrSpec
let (targetSpec, liftedSpec) = view targetSpecGetter ghcSpec
pure (phaseOneWarns <> warns, targetSpec, liftedSpec)
toLegacyDep :: (Ghc.StableModule, LiftedSpec) -> (ModName, Ms.BareSpec)
toLegacyDep (sm, ls) = (ModName SrcImport (Ghc.moduleName . Ghc.unStableModule $ sm), unsafeFromLiftedSpec ls)
toLegacyTarget :: Ms.BareSpec -> (ModName, Ms.BareSpec)
toLegacyTarget validatedSpec = (giTargetMod targetSrc, validatedSpec)
legacyDependencies :: [(ModName, Ms.BareSpec)]
legacyDependencies = map toLegacyDep . M.toList . getDependencies $ dependencies
allSpecs :: Ms.BareSpec -> [(ModName, Ms.BareSpec)]
allSpecs validSpec = toLegacyTarget validSpec : legacyDependencies
legacyBareSpec :: Spec LocBareType F.LocSymbol
legacyBareSpec = review bareSpecIso bareSpec
-------------------------------------------------------------------------------------
| invokes @makeGhcSpec0@ to construct the @GhcSpec@ and then
validates it using @checkGhcSpec@.
-------------------------------------------------------------------------------------
makeGhcSpec :: Config
-> GhcSrc
-> LogicMap
-> [(ModName, Ms.BareSpec)]
-> Ghc.TcRn (Either Diagnostics ([Warning], GhcSpec))
-------------------------------------------------------------------------------------
makeGhcSpec cfg src lmap validatedSpecs = do
(dg0, sp) <- makeGhcSpec0 cfg src lmap validatedSpecs
let diagnostics = Bare.checkTargetSpec (map snd validatedSpecs)
(view targetSrcIso src)
(ghcSpecEnv sp)
(_giCbs src)
(fst . view targetSpecGetter $ sp)
pure $ if not (noErrors dg0) then Left dg0 else
case diagnostics of
Left dg1
| noErrors dg1 -> pure (allWarnings dg1, sp)
| otherwise -> Left dg1
Right () -> pure (mempty, sp)
ghcSpecEnv :: GhcSpec -> SEnv SortedReft
ghcSpecEnv sp = F.notracepp "RENV" $ fromListSEnv binds
where
emb = gsTcEmbeds (_gsName sp)
binds = F.notracepp "binds" $ concat
[ [(x, rSort t) | (x, Loc _ _ t) <- gsMeas (_gsData sp)]
, [(symbol v, rSort t) | (v, Loc _ _ t) <- gsCtors (_gsData sp)]
, [(symbol v, vSort v) | v <- gsReflects (_gsRefl sp)]
, [(x, vSort v) | (x, v) <- gsFreeSyms (_gsName sp), Ghc.isConLikeId v ]
, [(x, RR s mempty) | (x, s) <- wiredSortedSyms ]
, [(x, RR s mempty) | (x, s) <- _gsImps sp ]
]
vSort = rSort . classRFInfoType (typeclass $ getConfig sp) .
(ofType :: Ghc.Type -> SpecType) . Ghc.varType
rSort = rTypeSortedReft emb
-------------------------------------------------------------------------------------
-- | @makeGhcSpec0@ slurps up all the relevant information needed to generate
-- constraints for a target module and packages them into a @GhcSpec@
See [ NOTE ] LIFTING - STAGES to see why we split into , lSpec1 , etc .
-- essentially, to get to the `BareRTEnv` as soon as possible, as thats what
-- lets us use aliases inside data-constructor definitions.
-------------------------------------------------------------------------------------
makeGhcSpec0 :: Config -> GhcSrc -> LogicMap -> [(ModName, Ms.BareSpec)] ->
Ghc.TcRn (Diagnostics, GhcSpec)
makeGhcSpec0 cfg src lmap mspecsNoCls = do
-- build up environments
tycEnv <- makeTycEnv1 name env (tycEnv0, datacons) coreToLg simplifier
let tyi = Bare.tcTyConMap tycEnv
let sigEnv = makeSigEnv embs tyi (_gsExports src) rtEnv
let lSpec1 = lSpec0 <> makeLiftedSpec1 cfg src tycEnv lmap mySpec1
let mySpec = mySpec2 <> lSpec1
let specs = M.insert name mySpec iSpecs2
let myRTE = myRTEnv src env sigEnv rtEnv
let (dg5, measEnv) = withDiagnostics $ makeMeasEnv env tycEnv sigEnv specs
let (dg4, sig) = withDiagnostics $ makeSpecSig cfg name specs env sigEnv tycEnv measEnv (_giCbs src)
elaboratedSig <-
if allowTC then Bare.makeClassAuxTypes (elaborateSpecType coreToLg simplifier) datacons instMethods
>>= elaborateSig sig
else pure sig
let qual = makeSpecQual cfg env tycEnv measEnv rtEnv specs
let sData = makeSpecData src env sigEnv measEnv elaboratedSig specs
let (dg1, spcVars) = withDiagnostics $ makeSpecVars cfg src mySpec env measEnv
let (dg2, spcTerm) = withDiagnostics $ makeSpecTerm cfg mySpec env name
let (dg3, refl) = withDiagnostics $ makeSpecRefl cfg src measEnv specs env name elaboratedSig tycEnv
let laws = makeSpecLaws env sigEnv (gsTySigs elaboratedSig ++ gsAsmSigs elaboratedSig) measEnv specs
let finalLiftedSpec = makeLiftedSpec name src env refl sData elaboratedSig qual myRTE lSpec1
let diags = mconcat [dg0, dg1, dg2, dg3, dg4, dg5]
pure (diags, SP
{ _gsConfig = cfg
, _gsImps = makeImports mspecs
, _gsSig = addReflSigs env name rtEnv refl elaboratedSig
, _gsRefl = refl
, _gsLaws = laws
, _gsData = sData
, _gsQual = qual
, _gsName = makeSpecName env tycEnv measEnv name
, _gsVars = spcVars
, _gsTerm = spcTerm
, _gsLSpec = finalLiftedSpec
{ impSigs = makeImports mspecs
, expSigs = [ (F.symbol v, F.sr_sort $ Bare.varSortedReft embs v) | v <- gsReflects refl ]
, dataDecls = Bare.dataDeclSize mySpec $ dataDecls mySpec
, measures = Ms.measures mySpec
We want to export measures in a ' LiftedSpec ' , especially if they are
-- required to check termination of some 'liftedSigs' we export. Due to the fact
-- that 'lSpec1' doesn't contain the measures that we compute via 'makeHaskellMeasures',
-- we take them from 'mySpec', which has those.
, asmSigs = Ms.asmSigs finalLiftedSpec ++ Ms.asmSigs mySpec
-- Export all the assumptions (not just the ones created out of reflection) in
a ' LiftedSpec ' .
, imeasures = Ms.imeasures finalLiftedSpec ++ Ms.imeasures mySpec
-- Preserve user-defined 'imeasures'.
, dvariance = Ms.dvariance finalLiftedSpec ++ Ms.dvariance mySpec
-- Preserve user-defined 'dvariance'.
, rinstance = Ms.rinstance finalLiftedSpec ++ Ms.rinstance mySpec
-- Preserve rinstances.
}
})
where
-- typeclass elaboration
coreToLg ce =
case CoreToLogic.runToLogic
embs
lmap
dm
(\x -> todo Nothing ("coreToLogic not working " ++ x))
(CoreToLogic.coreToLogic allowTC ce) of
Left msg -> panic Nothing (F.showpp msg)
Right e -> e
elaborateSig si auxsig = do
tySigs <-
forM (gsTySigs si) $ \(x, t) ->
if GM.isFromGHCReal x then
pure (x, t)
else do t' <- traverse (elaborateSpecType coreToLg simplifier) t
pure (x, t')
-- things like len breaks the code
-- asmsigs should be elaborated only if they are from the current module
asmSigs < - forM ( gsAsmSigs si ) $ \(x , t ) - > do
-- t' <- traverse (elaborateSpecType (pure ()) coreToLg) t
-- pure (x, fst <$> t')
pure
si
{ gsTySigs = F.notracepp ("asmSigs" ++ F.showpp (gsAsmSigs si)) tySigs ++ auxsig }
simplifier :: Ghc.CoreExpr -> Ghc.TcRn Ghc.CoreExpr
simplifier = pure -- no simplification
allowTC = typeclass cfg
mySpec2 = Bare.qualifyExpand env name rtEnv l [] mySpec1 where l = F.dummyPos "expand-mySpec2"
iSpecs2 = Bare.qualifyExpand env name rtEnv l [] iSpecs0 where l = F.dummyPos "expand-iSpecs2"
rtEnv = Bare.makeRTEnv env name mySpec1 iSpecs0 lmap
mspecs = if allowTC then M.toList $ M.insert name mySpec0 iSpecs0 else mspecsNoCls
(mySpec0, instMethods) = if allowTC
then Bare.compileClasses src env (name, mySpec0NoCls) (M.toList iSpecs0)
else (mySpec0NoCls, [])
mySpec1 = mySpec0 <> lSpec0
lSpec0 = makeLiftedSpec0 cfg src embs lmap mySpec0
embs = makeEmbeds src env ((name, mySpec0) : M.toList iSpecs0)
dm = Bare.tcDataConMap tycEnv0
(dg0, datacons, tycEnv0) = makeTycEnv0 cfg name env embs mySpec2 iSpecs2
-- extract name and specs
env = Bare.makeEnv cfg src lmap mspecsNoCls
(mySpec0NoCls, iSpecs0) = splitSpecs name src mspecsNoCls
check
name = F.notracepp ("ALL-SPECS" ++ zzz) $ _giTargetMod src
zzz = F.showpp (fst <$> mspecs)
splitSpecs :: ModName -> GhcSrc -> [(ModName, Ms.BareSpec)] -> (Ms.BareSpec, Bare.ModSpecs)
splitSpecs name src specs = (mySpec, iSpecm)
where
iSpecm = fmap mconcat . Misc.group $ iSpecs
iSpecs = Dg.sliceSpecs src mySpec iSpecs'
mySpec = mconcat (snd <$> mySpecs)
(mySpecs, iSpecs') = L.partition ((name ==) . fst) specs
makeImports :: [(ModName, Ms.BareSpec)] -> [(F.Symbol, F.Sort)]
makeImports specs = concatMap (expSigs . snd) specs'
where specs' = filter (isSrcImport . fst) specs
makeEmbeds :: GhcSrc -> Bare.Env -> [(ModName, Ms.BareSpec)] -> F.TCEmb Ghc.TyCon
makeEmbeds src env
= Bare.addClassEmbeds (_gsCls src) (_gsFiTcs src)
. mconcat
. map (makeTyConEmbeds env)
makeTyConEmbeds :: Bare.Env -> (ModName, Ms.BareSpec) -> F.TCEmb Ghc.TyCon
makeTyConEmbeds env (name, spec)
= F.tceFromList [ (tc, t) | (c,t) <- F.tceToList (Ms.embeds spec), tc <- symTc c ]
where
symTc = Mb.maybeToList . Bare.maybeResolveSym env name "embed-tycon"
--------------------------------------------------------------------------------
-- | [NOTE]: REFLECT-IMPORTS
--
1 . MAKE the full LiftedSpec , which will eventually , contain :
makeHaskell{Inlines , Measures , Axioms , Bounds }
2 . SAVE the LiftedSpec , which will be reloaded
--
This step creates the aliases and inlines etc . It must be done BEFORE
we compute the ` SpecType ` for ( all , including the reflected binders ) ,
as we need the inlines and aliases to properly ` expand ` the SpecTypes .
--------------------------------------------------------------------------------
makeLiftedSpec1 :: Config -> GhcSrc -> Bare.TycEnv -> LogicMap -> Ms.BareSpec
-> Ms.BareSpec
makeLiftedSpec1 config src tycEnv lmap mySpec = mempty
{ Ms.measures = Bare.makeHaskellMeasures (typeclass config) src tycEnv lmap mySpec }
--------------------------------------------------------------------------------
-- | [NOTE]: LIFTING-STAGES
--
-- We split the lifting up into stage:
0 . Where we only lift inlines ,
1 . Where we lift reflects , measures , and normalized tySigs
--
This is because we need the inlines to build the @BareRTEnv@ which then
-- does the alias @expand@ business, that in turn, lets us build the DataConP,
-- i.e. the refined datatypes and their associate selectors, projectors etc,
-- that are needed for subsequent stages of the lifting.
--------------------------------------------------------------------------------
makeLiftedSpec0 :: Config -> GhcSrc -> F.TCEmb Ghc.TyCon -> LogicMap -> Ms.BareSpec
-> Ms.BareSpec
makeLiftedSpec0 cfg src embs lmap mySpec = mempty
{ Ms.ealiases = lmapEAlias . snd <$> Bare.makeHaskellInlines (typeclass cfg) src embs lmap mySpec
, Ms.reflects = Ms.reflects mySpec
, Ms.dataDecls = Bare.makeHaskellDataDecls cfg name mySpec tcs
, Ms.embeds = Ms.embeds mySpec
We do want ' embeds ' to survive and to be present into the final ' LiftedSpec ' . The
-- caveat is to decide which format is more appropriate. We obviously cannot store
them as a ' TCEmb TyCon ' as serialising a ' ' would be fairly exponsive . This
-- needs more thinking.
, Ms.cmeasures = Ms.cmeasures mySpec
We do want ' cmeasures ' to survive and to be present into the final ' LiftedSpec ' . The
-- caveat is to decide which format is more appropriate. This needs more thinking.
}
where
tcs = uniqNub (_gsTcs src ++ refTcs)
refTcs = reflectedTyCons cfg embs cbs mySpec
cbs = _giCbs src
name = _giTargetMod src
uniqNub :: (Ghc.Uniquable a) => [a] -> [a]
uniqNub xs = M.elems $ M.fromList [ (index x, x) | x <- xs ]
where
index = Ghc.getKey . Ghc.getUnique
-- | 'reflectedTyCons' returns the list of `[TyCon]` that must be reflected but
-- which are defined *outside* the current module e.g. in Base or somewhere
-- that we don't have access to the code.
reflectedTyCons :: Config -> TCEmb Ghc.TyCon -> [Ghc.CoreBind] -> Ms.BareSpec -> [Ghc.TyCon]
reflectedTyCons cfg embs cbs spec
| exactDCFlag cfg = filter (not . isEmbedded embs)
$ concatMap varTyCons
$ reflectedVars spec cbs ++ measureVars spec cbs
| otherwise = []
| We can not reflect embedded tycons ( e.g. ) as that gives you a sort
conflict : e.g. what is the type of is - True ? does it take a GHC.Types . Bool
-- or its embedding, a bool?
isEmbedded :: TCEmb Ghc.TyCon -> Ghc.TyCon -> Bool
isEmbedded embs c = F.tceMember c embs
varTyCons :: Ghc.Var -> [Ghc.TyCon]
varTyCons = specTypeCons . ofType . Ghc.varType
specTypeCons :: SpecType -> [Ghc.TyCon]
specTypeCons = foldRType tc []
where
tc acc t@RApp {} = rtc_tc (rt_tycon t) : acc
tc acc _ = acc
reflectedVars :: Ms.BareSpec -> [Ghc.CoreBind] -> [Ghc.Var]
reflectedVars spec cbs = fst <$> xDefs
where
xDefs = Mb.mapMaybe (`GM.findVarDef` cbs) reflSyms
reflSyms = val <$> S.toList (Ms.reflects spec)
measureVars :: Ms.BareSpec -> [Ghc.CoreBind] -> [Ghc.Var]
measureVars spec cbs = fst <$> xDefs
where
xDefs = Mb.mapMaybe (`GM.findVarDef` cbs) measureSyms
measureSyms = val <$> S.toList (Ms.hmeas spec)
------------------------------------------------------------------------------------------
makeSpecVars :: Config -> GhcSrc -> Ms.BareSpec -> Bare.Env -> Bare.MeasEnv
-> Bare.Lookup GhcSpecVars
------------------------------------------------------------------------------------------
makeSpecVars cfg src mySpec env measEnv = do
tgtVars <- mapM (resolveStringVar env name) (checks cfg)
igVars <- sMapM (Bare.lookupGhcVar env name "gs-ignores") (Ms.ignores mySpec)
lVars <- sMapM (Bare.lookupGhcVar env name "gs-lvars" ) (Ms.lvars mySpec)
return (SpVar tgtVars igVars lVars cMethods)
where
name = _giTargetMod src
cMethods = snd3 <$> Bare.meMethods measEnv
sMapM :: (Monad m, Eq b, Hashable b) => (a -> m b) -> S.HashSet a -> m (S.HashSet b)
sMapM f xSet = do
ys <- mapM f (S.toList xSet)
return (S.fromList ys)
sForM :: (Monad m, Eq b, Hashable b) =>S.HashSet a -> (a -> m b) -> m (S.HashSet b)
sForM xs f = sMapM f xs
qualifySymbolic :: (F.Symbolic a) => ModName -> a -> F.Symbol
qualifySymbolic name s = GM.qualifySymbol (F.symbol name) (F.symbol s)
resolveStringVar :: Bare.Env -> ModName -> String -> Bare.Lookup Ghc.Var
resolveStringVar env name s = Bare.lookupGhcVar env name "resolve-string-var" lx
where
lx = dummyLoc (qualifySymbolic name s)
------------------------------------------------------------------------------------------
makeSpecQual :: Config -> Bare.Env -> Bare.TycEnv -> Bare.MeasEnv -> BareRTEnv -> Bare.ModSpecs
-> GhcSpecQual
------------------------------------------------------------------------------------------
makeSpecQual _cfg env tycEnv measEnv _rtEnv specs = SpQual
{ gsQualifiers = filter okQual quals
makeSpecRTAliases env rtEnv -- TODO - REBARE
}
where
quals = concatMap (makeQualifiers env tycEnv) (M.toList specs)
-- mSyms = F.tracepp "MSYMS" $ M.fromList (Bare.meSyms measEnv ++ Bare.meClassSyms measEnv)
okQual q = F.notracepp ("okQual: " ++ F.showpp q)
$ all (`S.member` mSyms) (F.syms q)
mSyms = F.notracepp "MSYMS" . S.fromList
$ (fst <$> wiredSortedSyms)
++ (fst <$> Bare.meSyms measEnv)
++ (fst <$> Bare.meClassSyms measEnv)
makeQualifiers :: Bare.Env -> Bare.TycEnv -> (ModName, Ms.Spec ty bndr) -> [F.Qualifier]
makeQualifiers env tycEnv (modn, spec)
= fmap (Bare.qualifyTopDummy env modn)
. Mb.mapMaybe (resolveQParams env tycEnv modn)
$ Ms.qualifiers spec
-- | @resolveQualParams@ converts the sorts of parameters from, e.g.
-- 'Int' ===> 'GHC.Types.Int' or
-- 'Ptr' ===> 'GHC.Ptr.Ptr'
-- It would not be required if _all_ qualifiers are scraped from
-- function specs, but we're keeping it around for backwards compatibility.
resolveQParams :: Bare.Env -> Bare.TycEnv -> ModName -> F.Qualifier -> Maybe F.Qualifier
resolveQParams env tycEnv name q = do
qps <- mapM goQP (F.qParams q)
return $ q { F.qParams = qps }
where
goQP qp = do { s <- go (F.qpSort qp) ; return qp { F.qpSort = s } }
go :: F.Sort -> Maybe F.Sort
go (FAbs i s) = FAbs i <$> go s
go (FFunc s1 s2) = FFunc <$> go s1 <*> go s2
go (FApp s1 s2) = FApp <$> go s1 <*> go s2
go (FTC c) = qualifyFTycon env tycEnv name c
go s = Just s
qualifyFTycon :: Bare.Env -> Bare.TycEnv -> ModName -> F.FTycon -> Maybe F.Sort
qualifyFTycon env tycEnv name c
| isPrimFTC = Just (FTC c)
| otherwise = tyConSort embs . F.atLoc tcs <$> ty
where
ty = Bare.maybeResolveSym env name "qualify-FTycon" tcs
isPrimFTC = F.val tcs `elem` F.prims
tcs = F.fTyconSymbol c
embs = Bare.tcEmbs tycEnv
tyConSort :: F.TCEmb Ghc.TyCon -> F.Located Ghc.TyCon -> F.Sort
tyConSort embs lc = Mb.maybe s0 fst (F.tceLookup c embs)
where
c = F.val lc
s0 = tyConSortRaw lc
tyConSortRaw :: F.Located Ghc.TyCon -> F.Sort
tyConSortRaw = FTC . F.symbolFTycon . fmap F.symbol
------------------------------------------------------------------------------------------
makeSpecTerm :: Config -> Ms.BareSpec -> Bare.Env -> ModName ->
Bare.Lookup GhcSpecTerm
------------------------------------------------------------------------------------------
makeSpecTerm cfg mySpec env name = do
sizes <- if structuralTerm cfg then pure mempty else makeSize env name mySpec
lazies <- makeLazy env name mySpec
autos <- makeAutoSize env name mySpec
decr <- makeDecrs env name mySpec
gfail <- makeFail env name mySpec
return $ SpTerm
{ gsLazy = S.insert dictionaryVar (lazies `mappend` sizes)
, gsFail = gfail
, gsStTerm = sizes
, gsAutosize = autos
, gsDecr = decr
, gsNonStTerm = mempty
}
formerly ,
makeDecrs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, [Int])]
makeDecrs env name mySpec =
forM (Ms.decr mySpec) $ \(lx, z) -> do
v <- Bare.lookupGhcVar env name "decreasing" lx
return (v, z)
makeRelation :: Bare.Env -> ModName -> Bare.SigEnv ->
[(LocSymbol, LocSymbol, LocBareType, LocBareType, RelExpr, RelExpr)] -> Bare.Lookup [(Ghc.Var, Ghc.Var, LocSpecType, LocSpecType, RelExpr, RelExpr)]
makeRelation env name sigEnv = mapM go
where
go (x, y, tx, ty, a, e) = do
vx <- Bare.lookupGhcVar env name "Var" x
vy <- Bare.lookupGhcVar env name "Var" y
return
( vx
, vy
, Bare.cookSpecType env sigEnv name (Bare.HsTV vx) tx
, Bare.cookSpecType env sigEnv name (Bare.HsTV vy) ty
, a
, e
)
makeLazy :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.Var)
makeLazy env name spec =
sMapM (Bare.lookupGhcVar env name "Var") (Ms.lazy spec)
makeFail :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet (Located Ghc.Var))
makeFail env name spec =
sForM (Ms.fails spec) $ \x -> do
vx <- Bare.lookupGhcVar env name "Var" x
return x { val = vx }
makeRewrite :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet (Located Ghc.Var))
makeRewrite env name spec =
sForM (Ms.rewrites spec) $ \x -> do
vx <- Bare.lookupGhcVar env name "Var" x
return x { val = vx }
makeRewriteWith :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (M.HashMap Ghc.Var [Ghc.Var])
makeRewriteWith env name spec = M.fromList <$> makeRewriteWith' env name spec
makeRewriteWith' :: Bare.Env -> ModName -> Spec ty bndr -> Bare.Lookup [(Ghc.Var, [Ghc.Var])]
makeRewriteWith' env name spec =
forM (M.toList $ Ms.rewriteWith spec) $ \(x, xs) -> do
xv <- Bare.lookupGhcVar env name "Var1" x
xvs <- mapM (Bare.lookupGhcVar env name "Var2") xs
return (xv, xvs)
makeAutoSize :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.TyCon)
makeAutoSize env name
= fmap S.fromList
. mapM (Bare.lookupGhcTyCon env name "TyCon")
. S.toList
. Ms.autosize
makeSize :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.Var)
makeSize env name
= fmap S.fromList
. mapM (Bare.lookupGhcVar env name "Var")
. Mb.mapMaybe getSizeFuns
. Ms.dataDecls
getSizeFuns :: DataDecl -> Maybe LocSymbol
getSizeFuns decl
| Just x <- tycSFun decl
, SymSizeFun f <- x
= Just f
| otherwise
= Nothing
------------------------------------------------------------------------------------------
makeSpecLaws :: Bare.Env -> Bare.SigEnv -> [(Ghc.Var,LocSpecType)] -> Bare.MeasEnv -> Bare.ModSpecs
-> GhcSpecLaws
------------------------------------------------------------------------------------------
makeSpecLaws env sigEnv sigs menv specs = SpLaws
{ gsLawDefs = second (map (\(_,x,y) -> (x,y))) <$> Bare.meCLaws menv
, gsLawInst = Bare.makeInstanceLaws env sigEnv sigs specs
}
------------------------------------------------------------------------------------------
makeSpecRefl :: Config -> GhcSrc -> Bare.MeasEnv -> Bare.ModSpecs -> Bare.Env -> ModName -> GhcSpecSig -> Bare.TycEnv
-> Bare.Lookup GhcSpecRefl
------------------------------------------------------------------------------------------
makeSpecRefl cfg src menv specs env name sig tycEnv = do
autoInst <- makeAutoInst env name mySpec
rwr <- makeRewrite env name mySpec
rwrWith <- makeRewriteWith env name mySpec
wRefls <- Bare.wiredReflects cfg env name sig
xtes <- Bare.makeHaskellAxioms cfg src env tycEnv name lmap sig mySpec
let myAxioms =
[ Bare.qualifyTop
env
name
(F.loc lt)
e {eqName = s, eqRec = S.member s (exprSymbolsSet (eqBody e))}
| (x, lt, e) <- xtes
, let s = symbol x
]
let sigVars = F.notracepp "SIGVARS" $ (fst3 <$> xtes) -- reflects
++ (fst <$> gsAsmSigs sig) -- assumes
++ (fst <$> gsRefSigs sig)
return SpRefl
{ gsLogicMap = lmap
, gsAutoInst = autoInst
, gsImpAxioms = concatMap (Ms.axeqs . snd) (M.toList specs)
, gsMyAxioms = F.notracepp "gsMyAxioms" myAxioms
, gsReflects = F.notracepp "gsReflects" (lawMethods ++ filter (isReflectVar rflSyms) sigVars ++ wRefls)
, gsHAxioms = F.notracepp "gsHAxioms" xtes
, gsWiredReft = wRefls
, gsRewrites = rwr
, gsRewritesWith = rwrWith
}
where
lawMethods = F.notracepp "Law Methods" $ concatMap Ghc.classMethods (fst <$> Bare.meCLaws menv)
mySpec = M.lookupDefault mempty name specs
rflSyms = S.fromList (getReflects specs)
lmap = Bare.reLMap env
isReflectVar :: S.HashSet F.Symbol -> Ghc.Var -> Bool
isReflectVar reflSyms v = S.member vx reflSyms
where
vx = GM.dropModuleNames (symbol v)
getReflects :: Bare.ModSpecs -> [Symbol]
getReflects = fmap val . S.toList . S.unions . fmap (names . snd) . M.toList
where
names z = S.unions [ Ms.reflects z, Ms.inlines z, Ms.hmeas z ]
------------------------------------------------------------------------------------------
-- | @updateReflSpecSig@ uses the information about reflected functions to update the
-- "assumed" signatures.
------------------------------------------------------------------------------------------
addReflSigs :: Bare.Env -> ModName -> BareRTEnv -> GhcSpecRefl -> GhcSpecSig -> GhcSpecSig
------------------------------------------------------------------------------------------
addReflSigs env name rtEnv refl sig =
sig { gsRefSigs = F.notracepp ("gsRefSigs for " ++ F.showpp name) $ map expandReflectedSignature reflSigs
, gsAsmSigs = F.notracepp ("gsAsmSigs for " ++ F.showpp name) (wreflSigs ++ filter notReflected (gsAsmSigs sig))
}
where
See T1738 . We need to expand and qualify any reflected signature /here/ , after any
-- relevant binder has been detected and \"promoted\". The problem stems from the fact that any input
' BareSpec ' will have a ' reflects ' list of binders to reflect under the form of an opaque ' Var ' , that
qualifyExpand ca n't touch when we do a first pass in ' makeGhcSpec0 ' . However , once we reflected all
the functions , we are left with a pair ( Var , LocSpecType ) . The latter /needs/ to be qualified and
expanded again , for example in case it has expression aliases derived from ' inlines ' .
expandReflectedSignature :: (Ghc.Var, LocSpecType) -> (Ghc.Var, LocSpecType)
expandReflectedSignature = fmap (Bare.qualifyExpand env name rtEnv (F.dummyPos "expand-refSigs") [])
(wreflSigs, reflSigs) = L.partition ((`elem` gsWiredReft refl) . fst)
[ (x, t) | (x, t, _) <- gsHAxioms refl ]
reflected = fst <$> (wreflSigs ++ reflSigs)
notReflected xt = fst xt `notElem` reflected
makeAutoInst :: Bare.Env -> ModName -> Ms.BareSpec ->
Bare.Lookup (M.HashMap Ghc.Var (Maybe Int))
makeAutoInst env name spec = M.fromList <$> kvs
where
kvs = forM (M.toList (Ms.autois spec)) $ \(k, val) -> do
vk <- Bare.lookupGhcVar env name "Var" k
return (vk, val)
----------------------------------------------------------------------------------------
makeSpecSig :: Config -> ModName -> Bare.ModSpecs -> Bare.Env -> Bare.SigEnv -> Bare.TycEnv -> Bare.MeasEnv -> [Ghc.CoreBind]
-> Bare.Lookup GhcSpecSig
----------------------------------------------------------------------------------------
makeSpecSig cfg name specs env sigEnv tycEnv measEnv cbs = do
mySigs <- makeTySigs env sigEnv name mySpec
aSigs <- F.notracepp ("makeSpecSig aSigs " ++ F.showpp name) $ makeAsmSigs env sigEnv name specs
let asmSigs = Bare.tcSelVars tycEnv
++ aSigs
++ [ (x,t) | (_, x, t) <- concatMap snd (Bare.meCLaws measEnv) ]
let tySigs = strengthenSigs . concat $
[ [(v, (0, t)) | (v, t,_) <- mySigs ] -- NOTE: these weights are to priortize
, [(v, (1, t)) | (v, t ) <- makeMthSigs measEnv ] -- user defined sigs OVER auto-generated
, [(v, (2, t)) | (v, t ) <- makeInlSigs env rtEnv allSpecs ] -- during the strengthening, i.e. to KEEP
the binders used in USER - defined sigs
] -- as they appear in termination metrics
newTys <- makeNewTypes env sigEnv allSpecs
relation <- makeRelation env name sigEnv (Ms.relational mySpec)
asmRel <- makeRelation env name sigEnv (Ms.asmRel mySpec)
return SpSig
{ gsTySigs = tySigs
, gsAsmSigs = asmSigs
, gsRefSigs = []
, gsDicts = dicts
, gsMethods = if then [ ] else dicts ( Bare.meClasses measEnv ) cbs
, gsMethods = if noclasscheck cfg then [] else Bare.makeMethodTypes (typeclass cfg) dicts (Bare.meClasses measEnv) cbs
, gsInSigs = mempty
, gsNewTypes = newTys
, gsTexprs = [ (v, t, es) | (v, t, Just es) <- mySigs ]
, gsRelation = relation
, gsAsmRel = asmRel
}
where
dicts = Bare.makeSpecDictionaries env sigEnv specs
mySpec = M.lookupDefault mempty name specs
allSpecs = M.toList specs
rtEnv = Bare.sigRTEnv sigEnv
-- hmeas = makeHMeas env allSpecs
strengthenSigs :: [(Ghc.Var, (Int, LocSpecType))] ->[(Ghc.Var, LocSpecType)]
strengthenSigs sigs = go <$> Misc.groupList sigs
where
go (v, ixs) = (v,) $ L.foldl1' (flip meetLoc) (F.notracepp ("STRENGTHEN-SIGS: " ++ F.showpp v) (prio ixs))
prio = fmap snd . Misc.sortOn fst
meetLoc :: LocSpecType -> LocSpecType -> LocSpecType
meetLoc t1 t2 = t1 {val = val t1 `F.meet` val t2}
makeMthSigs :: Bare.MeasEnv -> [(Ghc.Var, LocSpecType)]
makeMthSigs measEnv = [ (v, t) | (_, v, t) <- Bare.meMethods measEnv ]
makeInlSigs :: Bare.Env -> BareRTEnv -> [(ModName, Ms.BareSpec)] -> [(Ghc.Var, LocSpecType)]
makeInlSigs env rtEnv
= makeLiftedSigs rtEnv (CoreToLogic.inlineSpecType (typeclass (getConfig env)))
. makeFromSet "hinlines" Ms.inlines env
makeMsrSigs :: Bare.Env -> BareRTEnv -> [(ModName, Ms.BareSpec)] -> [(Ghc.Var, LocSpecType)]
makeMsrSigs env rtEnv
= makeLiftedSigs rtEnv (CoreToLogic.inlineSpecType (typeclass (getConfig env)))
. makeFromSet "hmeas" Ms.hmeas env
makeLiftedSigs :: BareRTEnv -> (Ghc.Var -> SpecType) -> [Ghc.Var] -> [(Ghc.Var, LocSpecType)]
makeLiftedSigs rtEnv f xs
= [(x, lt) | x <- xs
, let lx = GM.locNamedThing x
, let lt = expand $ lx {val = f x}
]
where
expand = Bare.specExpandType rtEnv
makeFromSet :: String -> (Ms.BareSpec -> S.HashSet LocSymbol) -> Bare.Env -> [(ModName, Ms.BareSpec)]
-> [Ghc.Var]
makeFromSet msg f env specs = concat [ mk n xs | (n, s) <- specs, let xs = S.toList (f s)]
where
mk name = Mb.mapMaybe (Bare.maybeResolveSym env name msg)
makeTySigs :: Bare.Env -> Bare.SigEnv -> ModName -> Ms.BareSpec
-> Bare.Lookup [(Ghc.Var, LocSpecType, Maybe [Located F.Expr])]
makeTySigs env sigEnv name spec = do
bareSigs <- bareTySigs env name spec
expSigs <- makeTExpr env name bareSigs rtEnv spec
let rawSigs = Bare.resolveLocalBinds env expSigs
return [ (x, cook x bt, z) | (x, bt, z) <- rawSigs ]
where
rtEnv = Bare.sigRTEnv sigEnv
cook x bt = Bare.cookSpecType env sigEnv name (Bare.HsTV x) bt
bareTySigs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, LocBareType)]
bareTySigs env name spec = checkDuplicateSigs <$> vts
where
vts = forM ( Ms.sigs spec ++ Ms.localSigs spec ) $ \ (x, t) -> do
v <- F.notracepp "LOOKUP-GHC-VAR" $ Bare.lookupGhcVar env name "rawTySigs" x
return (v, t)
checkDuplicateSigs : : [ ( Ghc . Var , LocSpecType ) ] - > [ ( Ghc . Var , LocSpecType ) ]
checkDuplicateSigs :: (Symbolic x) => [(x, F.Located t)] -> [(x, F.Located t)]
checkDuplicateSigs xts = case Misc.uniqueByKey symXs of
Left (k, ls) -> uError (errDupSpecs (pprint k) (GM.sourcePosSrcSpan <$> ls))
Right _ -> xts
where
symXs = [ (F.symbol x, F.loc t) | (x, t) <- xts ]
makeAsmSigs :: Bare.Env -> Bare.SigEnv -> ModName -> Bare.ModSpecs -> Bare.Lookup [(Ghc.Var, LocSpecType)]
makeAsmSigs env sigEnv myName specs = do
raSigs <- rawAsmSigs env myName specs
return [ (x, t) | (name, x, bt) <- raSigs, let t = Bare.cookSpecType env sigEnv name (Bare.LqTV x) bt ]
rawAsmSigs :: Bare.Env -> ModName -> Bare.ModSpecs -> Bare.Lookup [(ModName, Ghc.Var, LocBareType)]
rawAsmSigs env myName specs = do
aSigs <- allAsmSigs env myName specs
return [ (m, v, t) | (v, sigs) <- aSigs, let (m, t) = myAsmSig v sigs ]
myAsmSig :: Ghc.Var -> [(Bool, ModName, LocBareType)] -> (ModName, LocBareType)
myAsmSig v sigs = Mb.fromMaybe errImp (Misc.firstMaybes [mbHome, mbImp])
where
mbHome = takeUnique mkErr sigsHome
mbImp = takeUnique mkErr (Misc.firstGroup sigsImp) -- see [NOTE:Prioritize-Home-Spec]
sigsHome = [(m, t) | (True, m, t) <- sigs ]
sigsImp = F.notracepp ("SIGS-IMP: " ++ F.showpp v)
[(d, (m, t)) | (False, m, t) <- sigs, let d = nameDistance vName m]
mkErr ts = ErrDupSpecs (Ghc.getSrcSpan v) (F.pprint v) (GM.sourcePosSrcSpan . F.loc . snd <$> ts) :: UserError
errImp = impossible Nothing "myAsmSig: cannot happen as sigs is non-null"
vName = GM.takeModuleNames (F.symbol v)
makeTExpr :: Bare.Env -> ModName -> [(Ghc.Var, LocBareType)] -> BareRTEnv -> Ms.BareSpec
-> Bare.Lookup [(Ghc.Var, LocBareType, Maybe [Located F.Expr])]
makeTExpr env name tySigs rtEnv spec = do
vExprs <- M.fromList <$> makeVarTExprs env name spec
let vSigExprs = Misc.hashMapMapWithKey (\v t -> (t, M.lookup v vExprs)) vSigs
return [ (v, t, qual t <$> es) | (v, (t, es)) <- M.toList vSigExprs ]
where
qual t es = qualifyTermExpr env name rtEnv t <$> es
vSigs = M.fromList tySigs
qualifyTermExpr :: Bare.Env -> ModName -> BareRTEnv -> LocBareType -> Located F.Expr
-> Located F.Expr
qualifyTermExpr env name rtEnv t le
= F.atLoc le (Bare.qualifyExpand env name rtEnv l bs e)
where
l = F.loc le
e = F.val le
bs = ty_binds . toRTypeRep . val $ t
makeVarTExprs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, [Located F.Expr])]
makeVarTExprs env name spec =
forM (Ms.termexprs spec) $ \(x, es) -> do
vx <- Bare.lookupGhcVar env name "Var" x
return (vx, es)
----------------------------------------------------------------------------------------
-- [NOTE:Prioritize-Home-Spec] Prioritize spec for THING defined in
` Foo . Bar . . Quux.x ` over any other specification , IF GHC 's
fully qualified name for THING is ` Foo . Bar . . .
--
-- For example, see tests/names/neg/T1078.hs for example,
-- which assumes a spec for `head` defined in both
--
( 1 ) Data / ByteString.spec
( 2 ) Data / ByteString / Char8.spec
--
We end up resolving the ` head ` in ( 1 ) to the @Var@ ` Data.ByteString.Char8.head `
-- even though there is no exact match, just to account for re-exports of "internal"
-- modules and such (see `Resolve.matchMod`). However, we should pick the closer name
-- if its available.
----------------------------------------------------------------------------------------
nameDistance :: F.Symbol -> ModName -> Int
nameDistance vName tName
| vName == F.symbol tName = 0
| otherwise = 1
takeUnique :: Ex.Exception e => ([a] -> e) -> [a] -> Maybe a
takeUnique _ [] = Nothing
takeUnique _ [x] = Just x
takeUnique f xs = Ex.throw (f xs)
allAsmSigs :: Bare.Env -> ModName -> Bare.ModSpecs ->
Bare.Lookup [(Ghc.Var, [(Bool, ModName, LocBareType)])]
allAsmSigs env myName specs = do
let aSigs = [ (name, must, x, t) | (name, spec) <- M.toList specs
, (must, x, t) <- getAsmSigs myName name spec ]
vSigs <- forM aSigs $ \(name, must, x, t) -> do
vMb <- resolveAsmVar env name must x
return (vMb, (must, name, t))
return $ Misc.groupList [ (v, z) | (Just v, z) <- vSigs ]
resolveAsmVar :: Bare.Env -> ModName -> Bool -> LocSymbol -> Bare.Lookup (Maybe Ghc.Var)
resolveAsmVar env name True lx = Just <$> Bare.lookupGhcVar env name "resolveAsmVar-True" lx
resolveAsmVar env name False lx = return $ Bare.maybeResolveSym env name "resolveAsmVar-False" lx <|> GM.maybeAuxVar (F.val lx)
getAsmSigs :: ModName -> ModName -> Ms.BareSpec -> [(Bool, LocSymbol, LocBareType)]
getAsmSigs myName name spec
| myName == name = [ (True, x, t) | (x, t) <- Ms.asmSigs spec ] -- MUST resolve, or error
| otherwise = [ (False, x', t) | (x, t) <- Ms.asmSigs spec
++ Ms.sigs spec
MAY - NOT resolve
where
qSym = fmap (GM.qualifySymbol ns)
ns = F.symbol name
TODO - REBARE : grepClassAssumes
_grepClassAssumes :: [RInstance t] -> [(Located F.Symbol, t)]
_grepClassAssumes = concatMap go
where
go xts = Mb.mapMaybe goOne (risigs xts)
goOne (x, RIAssumed t) = Just (fmap (F.symbol . (".$c" ++ ) . F.symbolString) x, t)
goOne (_, RISig _) = Nothing
makeSigEnv :: F.TCEmb Ghc.TyCon -> Bare.TyConMap -> S.HashSet StableName -> BareRTEnv -> Bare.SigEnv
makeSigEnv embs tyi exports rtEnv = Bare.SigEnv
{ sigEmbs = embs
, sigTyRTyMap = tyi
, sigExports = exports
, sigRTEnv = rtEnv
}
makeNewTypes :: Bare.Env -> Bare.SigEnv -> [(ModName, Ms.BareSpec)] ->
Bare.Lookup [(Ghc.TyCon, LocSpecType)]
makeNewTypes env sigEnv specs = do
fmap concat $
forM nameDecls $ uncurry (makeNewType env sigEnv)
where
nameDecls = [(name, d) | (name, spec) <- specs, d <- Ms.newtyDecls spec]
makeNewType :: Bare.Env -> Bare.SigEnv -> ModName -> DataDecl ->
Bare.Lookup [(Ghc.TyCon, LocSpecType)]
makeNewType env sigEnv name d = do
tcMb <- Bare.lookupGhcDnTyCon env name "makeNewType" tcName
case tcMb of
Just tc -> return [(tc, lst)]
_ -> return []
where
tcName = tycName d
lst = Bare.cookSpecType env sigEnv name Bare.GenTV bt
bt = getTy tcName (tycSrcPos d) (Mb.fromMaybe [] (tycDCons d))
getTy _ l [c]
| [(_, t)] <- dcFields c = Loc l l t
getTy n l _ = Ex.throw (mkErr n l)
mkErr n l = ErrOther (GM.sourcePosSrcSpan l) ("Bad new type declaration:" <+> F.pprint n) :: UserError
------------------------------------------------------------------------------------------
makeSpecData :: GhcSrc -> Bare.Env -> Bare.SigEnv -> Bare.MeasEnv -> GhcSpecSig -> Bare.ModSpecs
-> GhcSpecData
------------------------------------------------------------------------------------------
makeSpecData src env sigEnv measEnv sig specs = SpData
{ gsCtors = F.notracepp "GS-CTORS"
[ (x, if allowTC then t else tt)
| (x, t) <- Bare.meDataCons measEnv
, let tt = Bare.plugHoles (typeclass $ getConfig env) sigEnv name (Bare.LqTV x) t
]
, gsMeas = [ (F.symbol x, uRType <$> t) | (x, t) <- measVars ]
, gsMeasures = Bare.qualifyTopDummy env name <$> (ms1 ++ ms2)
, gsInvariants = Misc.nubHashOn (F.loc . snd) invs
, gsIaliases = concatMap (makeIAliases env sigEnv) (M.toList specs)
, gsUnsorted = usI ++ concatMap msUnSorted (concatMap measures specs)
}
where
allowTC = typeclass (getConfig env)
measVars = Bare.meSyms measEnv -- ms'
++ Bare.meClassSyms measEnv -- cms'
++ Bare.varMeasures env
measuresSp = Bare.meMeasureSpec measEnv
ms1 = M.elems (Ms.measMap measuresSp)
ms2 = Ms.imeas measuresSp
mySpec = M.lookupDefault mempty name specs
name = _giTargetMod src
(minvs,usI) = makeMeasureInvariants env name sig mySpec
invs = minvs ++ concatMap (makeInvariants env sigEnv) (M.toList specs)
makeIAliases :: Bare.Env -> Bare.SigEnv -> (ModName, Ms.BareSpec) -> [(LocSpecType, LocSpecType)]
makeIAliases env sigEnv (name, spec)
= [ z | Right z <- mkIA <$> Ms.ialiases spec ]
where
mkIA : : ( LocBareType , LocBareType ) - > Either _ ( LocSpecType , LocSpecType )
mkIA (t1, t2) = (,) <$> mkI' t1 <*> mkI' t2
mkI' = Bare.cookSpecTypeE env sigEnv name Bare.GenTV
makeInvariants :: Bare.Env -> Bare.SigEnv -> (ModName, Ms.BareSpec) -> [(Maybe Ghc.Var, Located SpecType)]
makeInvariants env sigEnv (name, spec) =
[ (Nothing, t)
| (_, bt) <- Ms.invariants spec
, Bare.knownGhcType env name bt
, let t = Bare.cookSpecType env sigEnv name Bare.GenTV bt
] ++
concat [ (Nothing,) . makeSizeInv l <$> ts
| (bts, l) <- Ms.dsize spec
, all (Bare.knownGhcType env name) bts
, let ts = Bare.cookSpecType env sigEnv name Bare.GenTV <$> bts
]
makeSizeInv :: F.LocSymbol -> Located SpecType -> Located SpecType
makeSizeInv s lst = lst{val = go (val lst)}
where go (RApp c ts rs r) = RApp c ts rs (r `meet` nat)
go (RAllT a t r) = RAllT a (go t) r
go t = t
nat = MkUReft (Reft (vv_, PAtom Le (ECon $ I 0) (EApp (EVar $ val s) (eVar vv_))))
mempty
makeMeasureInvariants :: Bare.Env -> ModName -> GhcSpecSig -> Ms.BareSpec
-> ([(Maybe Ghc.Var, LocSpecType)], [UnSortedExpr])
makeMeasureInvariants env name sig mySpec
= mapSnd Mb.catMaybes $
unzip (measureTypeToInv env name <$> [(x, (y, ty)) | x <- xs, (y, ty) <- sigs
, isSymbolOfVar (val x) y ])
where
sigs = gsTySigs sig
xs = S.toList (Ms.hmeas mySpec)
isSymbolOfVar :: Symbol -> Ghc.Var -> Bool
isSymbolOfVar x v = x == symbol' v
where
symbol' :: Ghc.Var -> Symbol
symbol' = GM.dropModuleNames . symbol . Ghc.getName
measureTypeToInv :: Bare.Env -> ModName -> (LocSymbol, (Ghc.Var, LocSpecType)) -> ((Maybe Ghc.Var, LocSpecType), Maybe UnSortedExpr)
measureTypeToInv env name (x, (v, t))
= notracepp "measureTypeToInv" ((Just v, t {val = Bare.qualifyTop env name (F.loc x) mtype}), usorted)
where
trep = toRTypeRep (val t)
rts = ty_args trep
args = ty_binds trep
res = ty_res trep
z = last args
tz = last rts
usorted = if isSimpleADT tz then Nothing else mapFst (:[]) <$> mkReft (dummyLoc $ F.symbol v) z tz res
mtype
| null rts
= uError $ ErrHMeas (GM.sourcePosSrcSpan $ loc t) (pprint x) "Measure has no arguments!"
| otherwise
= mkInvariant x z tz res
isSimpleADT (RApp _ ts _ _) = all isRVar ts
isSimpleADT _ = False
mkInvariant :: LocSymbol -> Symbol -> SpecType -> SpecType -> SpecType
mkInvariant x z t tr = strengthen (top <$> t) (MkUReft reft' mempty)
where
reft' = Mb.maybe mempty Reft mreft
mreft = mkReft x z t tr
mkReft :: LocSymbol -> Symbol -> SpecType -> SpecType -> Maybe (Symbol, Expr)
mkReft x z _t tr
| Just q <- stripRTypeBase tr
= let Reft (v, p) = toReft q
su = mkSubst [(v, mkEApp x [EVar v]), (z,EVar v)]
p ' = filter ( \e - > z ` notElem ` syms e ) $ conjuncts p
in Just (v, subst su p)
mkReft _ _ _ _
= Nothing
REBARE : formerly , makeGhcSpec3
-------------------------------------------------------------------------------------------
makeSpecName :: Bare.Env -> Bare.TycEnv -> Bare.MeasEnv -> ModName -> GhcSpecNames
-------------------------------------------------------------------------------------------
makeSpecName env tycEnv measEnv name = SpNames
{ gsFreeSyms = Bare.reSyms env
, gsDconsP = [ F.atLoc dc (dcpCon dc) | dc <- datacons ++ cls ]
, gsTconsP = Bare.qualifyTopDummy env name <$> tycons
, gsLits = -- TODO - REBARE , redundant with gsMeas
, gsTcEmbeds = Bare.tcEmbs tycEnv
, gsADTs = Bare.tcAdts tycEnv
, gsTyconEnv = Bare.tcTyConMap tycEnv
}
where
datacons, cls :: [DataConP]
datacons = Bare.tcDataCons tycEnv
cls = F.notracepp "meClasses" $ Bare.meClasses measEnv
tycons = Bare.tcTyCons tycEnv
-- REBARE: formerly, makeGhcCHOP1
split into two to break circular dependency . we need dataconmap for core2logic
-------------------------------------------------------------------------------------------
makeTycEnv0 :: Config -> ModName -> Bare.Env -> TCEmb Ghc.TyCon -> Ms.BareSpec -> Bare.ModSpecs
-> (Diagnostics, [Located DataConP], Bare.TycEnv)
-------------------------------------------------------------------------------------------
makeTycEnv0 cfg myName env embs mySpec iSpecs = (diag0 <> diag1, datacons, Bare.TycEnv
{ tcTyCons = tycons
, tcDataCons = mempty -- val <$> datacons
, tcSelMeasures = dcSelectors
, tcSelVars = mempty -- recSelectors
, tcTyConMap = tyi
, tcAdts = adts
, tcDataConMap = dm
, tcEmbs = embs
, tcName = myName
})
where
(tcDds, dcs) = conTys
(diag0, conTys) = withDiagnostics $ Bare.makeConTypes myName env specs
specs = (myName, mySpec) : M.toList iSpecs
tcs = Misc.snd3 <$> tcDds
tyi = Bare.qualifyTopDummy env myName (makeTyConInfo embs fiTcs tycons)
-- tycons = F.tracepp "TYCONS" $ Misc.replaceWith tcpCon tcs wiredTyCons
datacons = Bare.makePluggedDataCons embs tyi ( Misc.replaceWith ( dcpCon . ) ( F.tracepp " DATACONS " $ concat dcs ) wiredDataCons )
tycons = tcs ++ knownWiredTyCons env myName
datacons = Bare.makePluggedDataCon (typeclass cfg) embs tyi <$> (concat dcs ++ knownWiredDataCons env myName)
tds = [(name, tcpCon tcp, dd) | (name, tcp, Just dd) <- tcDds]
(diag1, adts) = Bare.makeDataDecls cfg embs myName tds datacons
dm = Bare.dataConMap adts
dcSelectors = concatMap (Bare.makeMeasureSelectors cfg dm) (if reflection cfg then charDataCon:datacons else datacons)
fiTcs = _gsFiTcs (Bare.reSrc env)
makeTycEnv1 ::
ModName
-> Bare.Env
-> (Bare.TycEnv, [Located DataConP])
-> (Ghc.CoreExpr -> F.Expr)
-> (Ghc.CoreExpr -> Ghc.TcRn Ghc.CoreExpr)
-> Ghc.TcRn Bare.TycEnv
makeTycEnv1 myName env (tycEnv, datacons) coreToLg simplifier = do
fst for selector generation , snd for dataconsig generation
lclassdcs <- forM classdcs $ traverse (Bare.elaborateClassDcp coreToLg simplifier)
let recSelectors = Bare.makeRecordSelectorSigs env myName (dcs ++ (fmap . fmap) snd lclassdcs)
pure $
tycEnv {Bare.tcSelVars = recSelectors, Bare.tcDataCons = F.val <$> ((fmap . fmap) fst lclassdcs ++ dcs )}
where
(classdcs, dcs) =
L.partition
(Ghc.isClassTyCon . Ghc.dataConTyCon . dcpCon . F.val) datacons
knownWiredDataCons :: Bare.Env -> ModName -> [Located DataConP]
knownWiredDataCons env name = filter isKnown wiredDataCons
where
isKnown = Bare.knownGhcDataCon env name . GM.namedLocSymbol . dcpCon . val
knownWiredTyCons :: Bare.Env -> ModName -> [TyConP]
knownWiredTyCons env name = filter isKnown wiredTyCons
where
isKnown = Bare.knownGhcTyCon env name . GM.namedLocSymbol . tcpCon
-- REBARE: formerly, makeGhcCHOP2
-------------------------------------------------------------------------------------------
makeMeasEnv :: Bare.Env -> Bare.TycEnv -> Bare.SigEnv -> Bare.ModSpecs ->
Bare.Lookup Bare.MeasEnv
-------------------------------------------------------------------------------------------
makeMeasEnv env tycEnv sigEnv specs = do
laws <- Bare.makeCLaws env sigEnv name specs
(cls, mts) <- Bare.makeClasses env sigEnv name specs
let dms = Bare.makeDefaultMethods env mts
measures0 <- mapM (Bare.makeMeasureSpec env sigEnv name) (M.toList specs)
let measures = mconcat (Ms.mkMSpec' dcSelectors : measures0)
let (cs, ms) = Bare.makeMeasureSpec' (typeclass $ getConfig env) measures
let cms = Bare.makeClassMeasureSpec measures
let cms' = [ (x, Loc l l' $ cSort t) | (Loc l l' x, t) <- cms ]
let ms' = [ (F.val lx, F.atLoc lx t) | (lx, t) <- ms
, Mb.isNothing (lookup (val lx) cms') ]
let cs' = [ (v, txRefs v t) | (v, t) <- Bare.meetDataConSpec (typeclass (getConfig env)) embs cs (datacons ++ cls)]
return Bare.MeasEnv
{ meMeasureSpec = measures
, meClassSyms = cms'
, meSyms = ms'
, meDataCons = cs'
, meClasses = cls
, meMethods = mts ++ dms
, meCLaws = laws
}
where
txRefs v t = Bare.txRefSort tyi embs (t <$ GM.locNamedThing v)
tyi = Bare.tcTyConMap tycEnv
dcSelectors = Bare.tcSelMeasures tycEnv
datacons = Bare.tcDataCons tycEnv
embs = Bare.tcEmbs tycEnv
name = Bare.tcName tycEnv
-----------------------------------------------------------------------------------------
| @makeLiftedSpec@ is used to generate the BareSpec object that should be serialized
-- so that downstream files that import this target can access the lifted definitions,
-- e.g. for measures, reflected functions etc.
-----------------------------------------------------------------------------------------
makeLiftedSpec :: ModName -> GhcSrc -> Bare.Env
-> GhcSpecRefl -> GhcSpecData -> GhcSpecSig -> GhcSpecQual -> BareRTEnv
-> Ms.BareSpec -> Ms.BareSpec
-----------------------------------------------------------------------------------------
makeLiftedSpec name src _env refl sData sig qual myRTE lSpec0 = lSpec0
{ Ms.asmSigs = F.notracepp ("makeLiftedSpec : ASSUMED-SIGS " ++ F.showpp name ) (xbs ++ myDCs)
, Ms.reflSigs = F.notracepp "REFL-SIGS" xbs
, Ms.sigs = F.notracepp ("makeLiftedSpec : LIFTED-SIGS " ++ F.showpp name ) $ mkSigs (gsTySigs sig)
, Ms.invariants = [ (varLocSym <$> x, Bare.specToBare <$> t)
| (x, t) <- gsInvariants sData
, isLocInFile srcF t
]
, Ms.axeqs = gsMyAxioms refl
, Ms.aliases = F.notracepp "MY-ALIASES" $ M.elems . typeAliases $ myRTE
, Ms.ealiases = M.elems . exprAliases $ myRTE
, Ms.qualifiers = filter (isLocInFile srcF) (gsQualifiers qual)
}
where
myDCs = [(x,t) | (x,t) <- mkSigs (gsCtors sData)
, F.symbol name == fst (GM.splitModuleName $ val x)]
mkSigs xts = [ toBare (x, t) | (x, t) <- xts
, S.member x sigVars && isExportedVar (view targetSrcIso src) x
]
toBare (x, t) = (varLocSym x, Bare.specToBare <$> t)
xbs = toBare <$> reflTySigs
sigVars = S.difference defVars reflVars
defVars = S.fromList (_giDefVars src)
reflTySigs = [(x, t) | (x,t,_) <- gsHAxioms refl, x `notElem` gsWiredReft refl]
reflVars = S.fromList (fst <$> reflTySigs)
-- myAliases fld = M.elems . fld $ myRTE
srcF = _giTarget src
-- | Returns 'True' if the input determines a location within the input file. Due to the fact we might have
Haskell sources which have \"companion\ " specs defined alongside them , we also need to account for this
case , by stripping out the extensions and check that the LHS is a source and the RHS a spec file .
isLocInFile :: (F.Loc a) => FilePath -> a -> Bool
isLocInFile f lx = f == lifted || isCompanion
where
lifted :: FilePath
lifted = locFile lx
isCompanion :: Bool
isCompanion =
(==) (dropExtension f) (dropExtension lifted)
&& isExtFile Hs f
&& isExtFile Files.Spec lifted
locFile :: (F.Loc a) => a -> FilePath
locFile = Misc.fst3 . F.sourcePosElts . F.sp_start . F.srcSpan
varLocSym :: Ghc.Var -> LocSymbol
varLocSym v = F.symbol <$> GM.locNamedThing v
-- makeSpecRTAliases :: Bare.Env -> BareRTEnv -> [Located SpecRTAlias]
-- makeSpecRTAliases _env _rtEnv = [] -- TODO-REBARE
--------------------------------------------------------------------------------
-- | @myRTEnv@ slices out the part of RTEnv that was generated by aliases defined
in the _ target _ file , " cooks " the aliases ( by conversion to SpecType ) , and
then saves them back as BareType .
--------------------------------------------------------------------------------
myRTEnv :: GhcSrc -> Bare.Env -> Bare.SigEnv -> BareRTEnv -> BareRTEnv
myRTEnv src env sigEnv rtEnv = mkRTE tAs' eAs
where
tAs' = normalizeBareAlias env sigEnv name <$> tAs
tAs = myAliases typeAliases
eAs = myAliases exprAliases
myAliases fld = filter (isLocInFile srcF) . M.elems . fld $ rtEnv
srcF = _giTarget src
name = _giTargetMod src
mkRTE :: [Located (RTAlias x a)] -> [Located (RTAlias F.Symbol F.Expr)] -> RTEnv x a
mkRTE tAs eAs = RTE
{ typeAliases = M.fromList [ (aName a, a) | a <- tAs ]
, exprAliases = M.fromList [ (aName a, a) | a <- eAs ]
}
where aName = rtName . F.val
normalizeBareAlias :: Bare.Env -> Bare.SigEnv -> ModName -> Located BareRTAlias
-> Located BareRTAlias
normalizeBareAlias env sigEnv name lx = fixRTA <$> lx
where
fixRTA :: BareRTAlias -> BareRTAlias
fixRTA = mapRTAVars fixArg . fmap fixBody
fixArg :: Symbol -> Symbol
fixArg = F.symbol . GM.symbolTyVar
fixBody :: BareType -> BareType
fixBody = Bare.specToBare
. F.val
. Bare.cookSpecType env sigEnv name Bare.RawTV
. F.atLoc lx
withDiagnostics :: (Monoid a) => Bare.Lookup a -> (Diagnostics, a)
withDiagnostics (Left es) = (mkDiagnostics [] es, mempty)
withDiagnostics (Right v) = (emptyDiagnostics, v)
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/afa10475b1bbf3c65f4e1874a367dc65dec57791/src/Language/Haskell/Liquid/Bare.hs | haskell | # LANGUAGE ScopedTypeVariables #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE OverloadedStrings #
| This module contains the functions that convert /from/ descriptions of
symbols, names and types (over freshly parsed /bare/ Strings),
The actual /representations/ of bare and real (refinement) types are all
in 'RefType' -- they are different instances of 'RType'.
$creatingTargetSpecs
* Loading and Saving lifted specs from/to disk
(text, (<+>))
(nubHashOn)
------------------------------------------------------------------------------
| De/Serializing Spec files
------------------------------------------------------------------------------
warnMissingLiftedSpec srcF specF >>
warnMissingLiftedSpec :: FilePath -> FilePath -> IO ()
warnMissingLiftedSpec srcF specF = do
incDir <- Misc.getIncludeDir
unless (Misc.isIncludeFile incDir srcF)
$ Ex.throw (errMissingSpec srcF specF)
print (errorP "DIE" "HERE" :: String)
want to short-circuit in case the validation failure is found in one of the dependencies (to avoid
printing potentially endless failures).
with a list of 'Warning's, which shouldn't abort the compilation (modulo explicit request from the user,
to treat warnings and errors).
if the filename does not match the module name
Ghc.setContext [iimport |(modName, _) <- allSpecs legacyBareSpec,
Ghc.execOptions
Ghc.execOptions
Ghc.execOptions
Ghc.execOptions
Ghc.execOptions
"let {len :: [a] -> Int; len _ = undefined}"
Ghc.execOptions
-----------------------------------------------------------------------------------
-----------------------------------------------------------------------------------
-----------------------------------------------------------------------------------
-----------------------------------------------------------------------------------
| @makeGhcSpec0@ slurps up all the relevant information needed to generate
constraints for a target module and packages them into a @GhcSpec@
essentially, to get to the `BareRTEnv` as soon as possible, as thats what
lets us use aliases inside data-constructor definitions.
-----------------------------------------------------------------------------------
build up environments
required to check termination of some 'liftedSigs' we export. Due to the fact
that 'lSpec1' doesn't contain the measures that we compute via 'makeHaskellMeasures',
we take them from 'mySpec', which has those.
Export all the assumptions (not just the ones created out of reflection) in
Preserve user-defined 'imeasures'.
Preserve user-defined 'dvariance'.
Preserve rinstances.
typeclass elaboration
things like len breaks the code
asmsigs should be elaborated only if they are from the current module
t' <- traverse (elaborateSpecType (pure ()) coreToLg) t
pure (x, fst <$> t')
no simplification
extract name and specs
------------------------------------------------------------------------------
| [NOTE]: REFLECT-IMPORTS
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| [NOTE]: LIFTING-STAGES
We split the lifting up into stage:
does the alias @expand@ business, that in turn, lets us build the DataConP,
i.e. the refined datatypes and their associate selectors, projectors etc,
that are needed for subsequent stages of the lifting.
------------------------------------------------------------------------------
caveat is to decide which format is more appropriate. We obviously cannot store
needs more thinking.
caveat is to decide which format is more appropriate. This needs more thinking.
| 'reflectedTyCons' returns the list of `[TyCon]` that must be reflected but
which are defined *outside* the current module e.g. in Base or somewhere
that we don't have access to the code.
or its embedding, a bool?
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
TODO - REBARE
mSyms = F.tracepp "MSYMS" $ M.fromList (Bare.meSyms measEnv ++ Bare.meClassSyms measEnv)
| @resolveQualParams@ converts the sorts of parameters from, e.g.
'Int' ===> 'GHC.Types.Int' or
'Ptr' ===> 'GHC.Ptr.Ptr'
It would not be required if _all_ qualifiers are scraped from
function specs, but we're keeping it around for backwards compatibility.
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
reflects
assumes
----------------------------------------------------------------------------------------
| @updateReflSpecSig@ uses the information about reflected functions to update the
"assumed" signatures.
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
relevant binder has been detected and \"promoted\". The problem stems from the fact that any input
--------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------
NOTE: these weights are to priortize
user defined sigs OVER auto-generated
during the strengthening, i.e. to KEEP
as they appear in termination metrics
hmeas = makeHMeas env allSpecs
see [NOTE:Prioritize-Home-Spec]
--------------------------------------------------------------------------------------
[NOTE:Prioritize-Home-Spec] Prioritize spec for THING defined in
For example, see tests/names/neg/T1078.hs for example,
which assumes a spec for `head` defined in both
even though there is no exact match, just to account for re-exports of "internal"
modules and such (see `Resolve.matchMod`). However, we should pick the closer name
if its available.
--------------------------------------------------------------------------------------
MUST resolve, or error
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
ms'
cms'
-----------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------
TODO - REBARE , redundant with gsMeas
REBARE: formerly, makeGhcCHOP1
-----------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------
val <$> datacons
recSelectors
tycons = F.tracepp "TYCONS" $ Misc.replaceWith tcpCon tcs wiredTyCons
REBARE: formerly, makeGhcCHOP2
-----------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------
so that downstream files that import this target can access the lifted definitions,
e.g. for measures, reflected functions etc.
---------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------
myAliases fld = M.elems . fld $ myRTE
| Returns 'True' if the input determines a location within the input file. Due to the fact we might have
makeSpecRTAliases :: Bare.Env -> BareRTEnv -> [Located SpecRTAlias]
makeSpecRTAliases _env _rtEnv = [] -- TODO-REBARE
------------------------------------------------------------------------------
| @myRTEnv@ slices out the part of RTEnv that was generated by aliases defined
------------------------------------------------------------------------------ | # LANGUAGE FlexibleContexts #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TupleSections #
/to/ representations connected to GHC ' Var 's , ' Name 's , and ' Type 's .
module Language.Haskell.Liquid.Bare (
* Creating a TargetSpec
makeTargetSpec
, loadLiftedSpec
, saveLiftedSpec
) where
import Prelude hiding (error)
import Optics
import Control.Monad (forM)
import Control.Applicative ((<|>))
import qualified Control.Exception as Ex
import qualified Data.Binary as B
import qualified Data.Maybe as Mb
import qualified Data.List as L
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import System.FilePath (dropExtension)
import System.Directory (doesFileExist)
import System.Console.CmdArgs.Verbosity (whenLoud)
import Language.Fixpoint.Utils.Files as Files
import Language.Fixpoint.Misc as Misc
import Language.Fixpoint.Types hiding (dcFields, DataDecl, Error, panic)
import qualified Language.Fixpoint.Types as F
import qualified Liquid.GHC.Misc as GM
import qualified Liquid.GHC.API as Ghc
import Liquid.GHC.Types (StableName)
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.WiredIn
import qualified Language.Haskell.Liquid.Measure as Ms
import qualified Language.Haskell.Liquid.Bare.Types as Bare
import qualified Language.Haskell.Liquid.Bare.Resolve as Bare
import qualified Language.Haskell.Liquid.Bare.DataType as Bare
import Language.Haskell.Liquid.Bare.Elaborate
import qualified Language.Haskell.Liquid.Bare.Expand as Bare
import qualified Language.Haskell.Liquid.Bare.Measure as Bare
import qualified Language.Haskell.Liquid.Bare.Plugged as Bare
import qualified Language.Haskell.Liquid.Bare.Axiom as Bare
import qualified Language.Haskell.Liquid.Bare.ToBare as Bare
import qualified Language.Haskell.Liquid.Bare.Class as Bare
import qualified Language.Haskell.Liquid.Bare.Check as Bare
import qualified Language.Haskell.Liquid.Bare.Laws as Bare
import qualified Language.Haskell.Liquid.Bare.Typeclass as Bare
import qualified Language.Haskell.Liquid.Transforms.CoreToLogic as CoreToLogic
import Control.Arrow (second)
import Data.Hashable (Hashable)
import qualified Language.Haskell.Liquid.Bare.Slice as Dg
loadLiftedSpec :: Config -> FilePath -> IO (Maybe Ms.BareSpec)
loadLiftedSpec cfg srcF
| noLiftedImport cfg = putStrLn "No LIFTED Import" >> return Nothing
| otherwise = do
let specF = extFileName BinSpec srcF
ex <- doesFileExist specF
whenLoud $ putStrLn $ "Loading Binary Lifted Spec: " ++ specF ++ " " ++ "for source-file: " ++ show srcF ++ " " ++ show ex
lSp <- if ex
then Just <$> B.decodeFile specF
Ex.evaluate lSp
saveLiftedSpec :: FilePath -> Ms.BareSpec -> IO ()
saveLiftedSpec srcF lspec = do
ensurePath specF
B.encodeFile specF lspec
where
specF = extFileName BinSpec srcF
$ creatingTargetSpecs
/Liquid Haskell/ operates on ' TargetSpec 's , so this module provides a single function called
' makeTargetSpec ' to produce a ' TargetSpec ' , alongside the ' LiftedSpec ' . The former will be used by
functions like ' liquid ' or ' liquidOne ' to verify our program is correct , the latter will be serialised
to disk so that we can retrieve it later without having to re - check the relevant file .
/Liquid Haskell/ operates on 'TargetSpec's, so this module provides a single function called
'makeTargetSpec' to produce a 'TargetSpec', alongside the 'LiftedSpec'. The former will be used by
functions like 'liquid' or 'liquidOne' to verify our program is correct, the latter will be serialised
to disk so that we can retrieve it later without having to re-check the relevant Haskell file.
-}
| ' makeTargetSpec ' constructs the ' TargetSpec ' and then validates it . Upon success , the ' TargetSpec '
and the ' LiftedSpec ' are returned . We perform error checking in \"two phases\ " : during the first phase ,
we check for errors and warnings in the input ' BareSpec ' and the dependencies . During this phase we ideally
The second phase involves creating the ' TargetSpec ' , and returning either the full list of diagnostics
( errors and warnings ) in case things went wrong , or the final ' TargetSpec ' and ' LiftedSpec ' together
makeTargetSpec :: Config
-> LogicMap
-> TargetSrc
-> BareSpec
-> TargetDependencies
-> Ghc.TcRn (Either Diagnostics ([Warning], TargetSpec, LiftedSpec))
makeTargetSpec cfg lmap targetSrc bareSpec dependencies = do
let targDiagnostics = Bare.checkTargetSrc cfg targetSrc
let depsDiagnostics = mapM (uncurry Bare.checkBareSpec) legacyDependencies
let bareSpecDiagnostics = Bare.checkBareSpec (giTargetMod targetSrc) legacyBareSpec
case targDiagnostics >> depsDiagnostics >> bareSpecDiagnostics of
Left d | noErrors d -> secondPhase (allWarnings d)
Left d -> return $ Left d
Right () -> secondPhase mempty
where
secondPhase :: [Warning] -> Ghc.TcRn (Either Diagnostics ([Warning], TargetSpec, LiftedSpec))
secondPhase phaseOneWarns = do
we should be able to setContext regardless of whether
we use the ghc api . However , ghc will complain
when ( ) $ do
let = if isTarget modName
then Ghc . IIModule ( getModName )
else Ghc . IIDecl ( Ghc.simpleImportDecl ( getModName ) ) ]
void $ Ghc.execStmt
" let { infixr 1 = = > ; True = = > False = False ; _ = = > _ = True } "
void $ Ghc.execStmt
" let { infixr 1 < = > ; True < = > False = False ; _ < = > _ = True } "
void $ Ghc.execStmt
" let { infix 4 = = ; (= =) : : a - > a - > Bool ; _ = = _ = undefined } "
void $ Ghc.execStmt
" let { infix 4 /= ; ( /= ) : : a - > a - > Bool ; _ /= _ = undefined } "
void $ Ghc.execStmt
" let { infixl 7 / ; ( / ) : : a = > a - > a - > a ; _ / _ = undefined } "
void $ Ghc.execStmt
diagOrSpec <- makeGhcSpec cfg (review targetSrcIso targetSrc) lmap (allSpecs legacyBareSpec)
return $ do
(warns, ghcSpec) <- diagOrSpec
let (targetSpec, liftedSpec) = view targetSpecGetter ghcSpec
pure (phaseOneWarns <> warns, targetSpec, liftedSpec)
toLegacyDep :: (Ghc.StableModule, LiftedSpec) -> (ModName, Ms.BareSpec)
toLegacyDep (sm, ls) = (ModName SrcImport (Ghc.moduleName . Ghc.unStableModule $ sm), unsafeFromLiftedSpec ls)
toLegacyTarget :: Ms.BareSpec -> (ModName, Ms.BareSpec)
toLegacyTarget validatedSpec = (giTargetMod targetSrc, validatedSpec)
legacyDependencies :: [(ModName, Ms.BareSpec)]
legacyDependencies = map toLegacyDep . M.toList . getDependencies $ dependencies
allSpecs :: Ms.BareSpec -> [(ModName, Ms.BareSpec)]
allSpecs validSpec = toLegacyTarget validSpec : legacyDependencies
legacyBareSpec :: Spec LocBareType F.LocSymbol
legacyBareSpec = review bareSpecIso bareSpec
| invokes @makeGhcSpec0@ to construct the @GhcSpec@ and then
validates it using @checkGhcSpec@.
makeGhcSpec :: Config
-> GhcSrc
-> LogicMap
-> [(ModName, Ms.BareSpec)]
-> Ghc.TcRn (Either Diagnostics ([Warning], GhcSpec))
makeGhcSpec cfg src lmap validatedSpecs = do
(dg0, sp) <- makeGhcSpec0 cfg src lmap validatedSpecs
let diagnostics = Bare.checkTargetSpec (map snd validatedSpecs)
(view targetSrcIso src)
(ghcSpecEnv sp)
(_giCbs src)
(fst . view targetSpecGetter $ sp)
pure $ if not (noErrors dg0) then Left dg0 else
case diagnostics of
Left dg1
| noErrors dg1 -> pure (allWarnings dg1, sp)
| otherwise -> Left dg1
Right () -> pure (mempty, sp)
ghcSpecEnv :: GhcSpec -> SEnv SortedReft
ghcSpecEnv sp = F.notracepp "RENV" $ fromListSEnv binds
where
emb = gsTcEmbeds (_gsName sp)
binds = F.notracepp "binds" $ concat
[ [(x, rSort t) | (x, Loc _ _ t) <- gsMeas (_gsData sp)]
, [(symbol v, rSort t) | (v, Loc _ _ t) <- gsCtors (_gsData sp)]
, [(symbol v, vSort v) | v <- gsReflects (_gsRefl sp)]
, [(x, vSort v) | (x, v) <- gsFreeSyms (_gsName sp), Ghc.isConLikeId v ]
, [(x, RR s mempty) | (x, s) <- wiredSortedSyms ]
, [(x, RR s mempty) | (x, s) <- _gsImps sp ]
]
vSort = rSort . classRFInfoType (typeclass $ getConfig sp) .
(ofType :: Ghc.Type -> SpecType) . Ghc.varType
rSort = rTypeSortedReft emb
See [ NOTE ] LIFTING - STAGES to see why we split into , lSpec1 , etc .
makeGhcSpec0 :: Config -> GhcSrc -> LogicMap -> [(ModName, Ms.BareSpec)] ->
Ghc.TcRn (Diagnostics, GhcSpec)
makeGhcSpec0 cfg src lmap mspecsNoCls = do
tycEnv <- makeTycEnv1 name env (tycEnv0, datacons) coreToLg simplifier
let tyi = Bare.tcTyConMap tycEnv
let sigEnv = makeSigEnv embs tyi (_gsExports src) rtEnv
let lSpec1 = lSpec0 <> makeLiftedSpec1 cfg src tycEnv lmap mySpec1
let mySpec = mySpec2 <> lSpec1
let specs = M.insert name mySpec iSpecs2
let myRTE = myRTEnv src env sigEnv rtEnv
let (dg5, measEnv) = withDiagnostics $ makeMeasEnv env tycEnv sigEnv specs
let (dg4, sig) = withDiagnostics $ makeSpecSig cfg name specs env sigEnv tycEnv measEnv (_giCbs src)
elaboratedSig <-
if allowTC then Bare.makeClassAuxTypes (elaborateSpecType coreToLg simplifier) datacons instMethods
>>= elaborateSig sig
else pure sig
let qual = makeSpecQual cfg env tycEnv measEnv rtEnv specs
let sData = makeSpecData src env sigEnv measEnv elaboratedSig specs
let (dg1, spcVars) = withDiagnostics $ makeSpecVars cfg src mySpec env measEnv
let (dg2, spcTerm) = withDiagnostics $ makeSpecTerm cfg mySpec env name
let (dg3, refl) = withDiagnostics $ makeSpecRefl cfg src measEnv specs env name elaboratedSig tycEnv
let laws = makeSpecLaws env sigEnv (gsTySigs elaboratedSig ++ gsAsmSigs elaboratedSig) measEnv specs
let finalLiftedSpec = makeLiftedSpec name src env refl sData elaboratedSig qual myRTE lSpec1
let diags = mconcat [dg0, dg1, dg2, dg3, dg4, dg5]
pure (diags, SP
{ _gsConfig = cfg
, _gsImps = makeImports mspecs
, _gsSig = addReflSigs env name rtEnv refl elaboratedSig
, _gsRefl = refl
, _gsLaws = laws
, _gsData = sData
, _gsQual = qual
, _gsName = makeSpecName env tycEnv measEnv name
, _gsVars = spcVars
, _gsTerm = spcTerm
, _gsLSpec = finalLiftedSpec
{ impSigs = makeImports mspecs
, expSigs = [ (F.symbol v, F.sr_sort $ Bare.varSortedReft embs v) | v <- gsReflects refl ]
, dataDecls = Bare.dataDeclSize mySpec $ dataDecls mySpec
, measures = Ms.measures mySpec
We want to export measures in a ' LiftedSpec ' , especially if they are
, asmSigs = Ms.asmSigs finalLiftedSpec ++ Ms.asmSigs mySpec
a ' LiftedSpec ' .
, imeasures = Ms.imeasures finalLiftedSpec ++ Ms.imeasures mySpec
, dvariance = Ms.dvariance finalLiftedSpec ++ Ms.dvariance mySpec
, rinstance = Ms.rinstance finalLiftedSpec ++ Ms.rinstance mySpec
}
})
where
coreToLg ce =
case CoreToLogic.runToLogic
embs
lmap
dm
(\x -> todo Nothing ("coreToLogic not working " ++ x))
(CoreToLogic.coreToLogic allowTC ce) of
Left msg -> panic Nothing (F.showpp msg)
Right e -> e
elaborateSig si auxsig = do
tySigs <-
forM (gsTySigs si) $ \(x, t) ->
if GM.isFromGHCReal x then
pure (x, t)
else do t' <- traverse (elaborateSpecType coreToLg simplifier) t
pure (x, t')
asmSigs < - forM ( gsAsmSigs si ) $ \(x , t ) - > do
pure
si
{ gsTySigs = F.notracepp ("asmSigs" ++ F.showpp (gsAsmSigs si)) tySigs ++ auxsig }
simplifier :: Ghc.CoreExpr -> Ghc.TcRn Ghc.CoreExpr
allowTC = typeclass cfg
mySpec2 = Bare.qualifyExpand env name rtEnv l [] mySpec1 where l = F.dummyPos "expand-mySpec2"
iSpecs2 = Bare.qualifyExpand env name rtEnv l [] iSpecs0 where l = F.dummyPos "expand-iSpecs2"
rtEnv = Bare.makeRTEnv env name mySpec1 iSpecs0 lmap
mspecs = if allowTC then M.toList $ M.insert name mySpec0 iSpecs0 else mspecsNoCls
(mySpec0, instMethods) = if allowTC
then Bare.compileClasses src env (name, mySpec0NoCls) (M.toList iSpecs0)
else (mySpec0NoCls, [])
mySpec1 = mySpec0 <> lSpec0
lSpec0 = makeLiftedSpec0 cfg src embs lmap mySpec0
embs = makeEmbeds src env ((name, mySpec0) : M.toList iSpecs0)
dm = Bare.tcDataConMap tycEnv0
(dg0, datacons, tycEnv0) = makeTycEnv0 cfg name env embs mySpec2 iSpecs2
env = Bare.makeEnv cfg src lmap mspecsNoCls
(mySpec0NoCls, iSpecs0) = splitSpecs name src mspecsNoCls
check
name = F.notracepp ("ALL-SPECS" ++ zzz) $ _giTargetMod src
zzz = F.showpp (fst <$> mspecs)
splitSpecs :: ModName -> GhcSrc -> [(ModName, Ms.BareSpec)] -> (Ms.BareSpec, Bare.ModSpecs)
splitSpecs name src specs = (mySpec, iSpecm)
where
iSpecm = fmap mconcat . Misc.group $ iSpecs
iSpecs = Dg.sliceSpecs src mySpec iSpecs'
mySpec = mconcat (snd <$> mySpecs)
(mySpecs, iSpecs') = L.partition ((name ==) . fst) specs
makeImports :: [(ModName, Ms.BareSpec)] -> [(F.Symbol, F.Sort)]
makeImports specs = concatMap (expSigs . snd) specs'
where specs' = filter (isSrcImport . fst) specs
makeEmbeds :: GhcSrc -> Bare.Env -> [(ModName, Ms.BareSpec)] -> F.TCEmb Ghc.TyCon
makeEmbeds src env
= Bare.addClassEmbeds (_gsCls src) (_gsFiTcs src)
. mconcat
. map (makeTyConEmbeds env)
makeTyConEmbeds :: Bare.Env -> (ModName, Ms.BareSpec) -> F.TCEmb Ghc.TyCon
makeTyConEmbeds env (name, spec)
= F.tceFromList [ (tc, t) | (c,t) <- F.tceToList (Ms.embeds spec), tc <- symTc c ]
where
symTc = Mb.maybeToList . Bare.maybeResolveSym env name "embed-tycon"
1 . MAKE the full LiftedSpec , which will eventually , contain :
makeHaskell{Inlines , Measures , Axioms , Bounds }
2 . SAVE the LiftedSpec , which will be reloaded
This step creates the aliases and inlines etc . It must be done BEFORE
we compute the ` SpecType ` for ( all , including the reflected binders ) ,
as we need the inlines and aliases to properly ` expand ` the SpecTypes .
makeLiftedSpec1 :: Config -> GhcSrc -> Bare.TycEnv -> LogicMap -> Ms.BareSpec
-> Ms.BareSpec
makeLiftedSpec1 config src tycEnv lmap mySpec = mempty
{ Ms.measures = Bare.makeHaskellMeasures (typeclass config) src tycEnv lmap mySpec }
0 . Where we only lift inlines ,
1 . Where we lift reflects , measures , and normalized tySigs
This is because we need the inlines to build the @BareRTEnv@ which then
makeLiftedSpec0 :: Config -> GhcSrc -> F.TCEmb Ghc.TyCon -> LogicMap -> Ms.BareSpec
-> Ms.BareSpec
makeLiftedSpec0 cfg src embs lmap mySpec = mempty
{ Ms.ealiases = lmapEAlias . snd <$> Bare.makeHaskellInlines (typeclass cfg) src embs lmap mySpec
, Ms.reflects = Ms.reflects mySpec
, Ms.dataDecls = Bare.makeHaskellDataDecls cfg name mySpec tcs
, Ms.embeds = Ms.embeds mySpec
We do want ' embeds ' to survive and to be present into the final ' LiftedSpec ' . The
them as a ' TCEmb TyCon ' as serialising a ' ' would be fairly exponsive . This
, Ms.cmeasures = Ms.cmeasures mySpec
We do want ' cmeasures ' to survive and to be present into the final ' LiftedSpec ' . The
}
where
tcs = uniqNub (_gsTcs src ++ refTcs)
refTcs = reflectedTyCons cfg embs cbs mySpec
cbs = _giCbs src
name = _giTargetMod src
uniqNub :: (Ghc.Uniquable a) => [a] -> [a]
uniqNub xs = M.elems $ M.fromList [ (index x, x) | x <- xs ]
where
index = Ghc.getKey . Ghc.getUnique
reflectedTyCons :: Config -> TCEmb Ghc.TyCon -> [Ghc.CoreBind] -> Ms.BareSpec -> [Ghc.TyCon]
reflectedTyCons cfg embs cbs spec
| exactDCFlag cfg = filter (not . isEmbedded embs)
$ concatMap varTyCons
$ reflectedVars spec cbs ++ measureVars spec cbs
| otherwise = []
| We can not reflect embedded tycons ( e.g. ) as that gives you a sort
conflict : e.g. what is the type of is - True ? does it take a GHC.Types . Bool
isEmbedded :: TCEmb Ghc.TyCon -> Ghc.TyCon -> Bool
isEmbedded embs c = F.tceMember c embs
varTyCons :: Ghc.Var -> [Ghc.TyCon]
varTyCons = specTypeCons . ofType . Ghc.varType
specTypeCons :: SpecType -> [Ghc.TyCon]
specTypeCons = foldRType tc []
where
tc acc t@RApp {} = rtc_tc (rt_tycon t) : acc
tc acc _ = acc
reflectedVars :: Ms.BareSpec -> [Ghc.CoreBind] -> [Ghc.Var]
reflectedVars spec cbs = fst <$> xDefs
where
xDefs = Mb.mapMaybe (`GM.findVarDef` cbs) reflSyms
reflSyms = val <$> S.toList (Ms.reflects spec)
measureVars :: Ms.BareSpec -> [Ghc.CoreBind] -> [Ghc.Var]
measureVars spec cbs = fst <$> xDefs
where
xDefs = Mb.mapMaybe (`GM.findVarDef` cbs) measureSyms
measureSyms = val <$> S.toList (Ms.hmeas spec)
makeSpecVars :: Config -> GhcSrc -> Ms.BareSpec -> Bare.Env -> Bare.MeasEnv
-> Bare.Lookup GhcSpecVars
makeSpecVars cfg src mySpec env measEnv = do
tgtVars <- mapM (resolveStringVar env name) (checks cfg)
igVars <- sMapM (Bare.lookupGhcVar env name "gs-ignores") (Ms.ignores mySpec)
lVars <- sMapM (Bare.lookupGhcVar env name "gs-lvars" ) (Ms.lvars mySpec)
return (SpVar tgtVars igVars lVars cMethods)
where
name = _giTargetMod src
cMethods = snd3 <$> Bare.meMethods measEnv
sMapM :: (Monad m, Eq b, Hashable b) => (a -> m b) -> S.HashSet a -> m (S.HashSet b)
sMapM f xSet = do
ys <- mapM f (S.toList xSet)
return (S.fromList ys)
sForM :: (Monad m, Eq b, Hashable b) =>S.HashSet a -> (a -> m b) -> m (S.HashSet b)
sForM xs f = sMapM f xs
qualifySymbolic :: (F.Symbolic a) => ModName -> a -> F.Symbol
qualifySymbolic name s = GM.qualifySymbol (F.symbol name) (F.symbol s)
resolveStringVar :: Bare.Env -> ModName -> String -> Bare.Lookup Ghc.Var
resolveStringVar env name s = Bare.lookupGhcVar env name "resolve-string-var" lx
where
lx = dummyLoc (qualifySymbolic name s)
makeSpecQual :: Config -> Bare.Env -> Bare.TycEnv -> Bare.MeasEnv -> BareRTEnv -> Bare.ModSpecs
-> GhcSpecQual
makeSpecQual _cfg env tycEnv measEnv _rtEnv specs = SpQual
{ gsQualifiers = filter okQual quals
}
where
quals = concatMap (makeQualifiers env tycEnv) (M.toList specs)
okQual q = F.notracepp ("okQual: " ++ F.showpp q)
$ all (`S.member` mSyms) (F.syms q)
mSyms = F.notracepp "MSYMS" . S.fromList
$ (fst <$> wiredSortedSyms)
++ (fst <$> Bare.meSyms measEnv)
++ (fst <$> Bare.meClassSyms measEnv)
makeQualifiers :: Bare.Env -> Bare.TycEnv -> (ModName, Ms.Spec ty bndr) -> [F.Qualifier]
makeQualifiers env tycEnv (modn, spec)
= fmap (Bare.qualifyTopDummy env modn)
. Mb.mapMaybe (resolveQParams env tycEnv modn)
$ Ms.qualifiers spec
resolveQParams :: Bare.Env -> Bare.TycEnv -> ModName -> F.Qualifier -> Maybe F.Qualifier
resolveQParams env tycEnv name q = do
qps <- mapM goQP (F.qParams q)
return $ q { F.qParams = qps }
where
goQP qp = do { s <- go (F.qpSort qp) ; return qp { F.qpSort = s } }
go :: F.Sort -> Maybe F.Sort
go (FAbs i s) = FAbs i <$> go s
go (FFunc s1 s2) = FFunc <$> go s1 <*> go s2
go (FApp s1 s2) = FApp <$> go s1 <*> go s2
go (FTC c) = qualifyFTycon env tycEnv name c
go s = Just s
qualifyFTycon :: Bare.Env -> Bare.TycEnv -> ModName -> F.FTycon -> Maybe F.Sort
qualifyFTycon env tycEnv name c
| isPrimFTC = Just (FTC c)
| otherwise = tyConSort embs . F.atLoc tcs <$> ty
where
ty = Bare.maybeResolveSym env name "qualify-FTycon" tcs
isPrimFTC = F.val tcs `elem` F.prims
tcs = F.fTyconSymbol c
embs = Bare.tcEmbs tycEnv
tyConSort :: F.TCEmb Ghc.TyCon -> F.Located Ghc.TyCon -> F.Sort
tyConSort embs lc = Mb.maybe s0 fst (F.tceLookup c embs)
where
c = F.val lc
s0 = tyConSortRaw lc
tyConSortRaw :: F.Located Ghc.TyCon -> F.Sort
tyConSortRaw = FTC . F.symbolFTycon . fmap F.symbol
makeSpecTerm :: Config -> Ms.BareSpec -> Bare.Env -> ModName ->
Bare.Lookup GhcSpecTerm
makeSpecTerm cfg mySpec env name = do
sizes <- if structuralTerm cfg then pure mempty else makeSize env name mySpec
lazies <- makeLazy env name mySpec
autos <- makeAutoSize env name mySpec
decr <- makeDecrs env name mySpec
gfail <- makeFail env name mySpec
return $ SpTerm
{ gsLazy = S.insert dictionaryVar (lazies `mappend` sizes)
, gsFail = gfail
, gsStTerm = sizes
, gsAutosize = autos
, gsDecr = decr
, gsNonStTerm = mempty
}
formerly ,
makeDecrs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, [Int])]
makeDecrs env name mySpec =
forM (Ms.decr mySpec) $ \(lx, z) -> do
v <- Bare.lookupGhcVar env name "decreasing" lx
return (v, z)
makeRelation :: Bare.Env -> ModName -> Bare.SigEnv ->
[(LocSymbol, LocSymbol, LocBareType, LocBareType, RelExpr, RelExpr)] -> Bare.Lookup [(Ghc.Var, Ghc.Var, LocSpecType, LocSpecType, RelExpr, RelExpr)]
makeRelation env name sigEnv = mapM go
where
go (x, y, tx, ty, a, e) = do
vx <- Bare.lookupGhcVar env name "Var" x
vy <- Bare.lookupGhcVar env name "Var" y
return
( vx
, vy
, Bare.cookSpecType env sigEnv name (Bare.HsTV vx) tx
, Bare.cookSpecType env sigEnv name (Bare.HsTV vy) ty
, a
, e
)
makeLazy :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.Var)
makeLazy env name spec =
sMapM (Bare.lookupGhcVar env name "Var") (Ms.lazy spec)
makeFail :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet (Located Ghc.Var))
makeFail env name spec =
sForM (Ms.fails spec) $ \x -> do
vx <- Bare.lookupGhcVar env name "Var" x
return x { val = vx }
makeRewrite :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet (Located Ghc.Var))
makeRewrite env name spec =
sForM (Ms.rewrites spec) $ \x -> do
vx <- Bare.lookupGhcVar env name "Var" x
return x { val = vx }
makeRewriteWith :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (M.HashMap Ghc.Var [Ghc.Var])
makeRewriteWith env name spec = M.fromList <$> makeRewriteWith' env name spec
makeRewriteWith' :: Bare.Env -> ModName -> Spec ty bndr -> Bare.Lookup [(Ghc.Var, [Ghc.Var])]
makeRewriteWith' env name spec =
forM (M.toList $ Ms.rewriteWith spec) $ \(x, xs) -> do
xv <- Bare.lookupGhcVar env name "Var1" x
xvs <- mapM (Bare.lookupGhcVar env name "Var2") xs
return (xv, xvs)
makeAutoSize :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.TyCon)
makeAutoSize env name
= fmap S.fromList
. mapM (Bare.lookupGhcTyCon env name "TyCon")
. S.toList
. Ms.autosize
makeSize :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup (S.HashSet Ghc.Var)
makeSize env name
= fmap S.fromList
. mapM (Bare.lookupGhcVar env name "Var")
. Mb.mapMaybe getSizeFuns
. Ms.dataDecls
getSizeFuns :: DataDecl -> Maybe LocSymbol
getSizeFuns decl
| Just x <- tycSFun decl
, SymSizeFun f <- x
= Just f
| otherwise
= Nothing
makeSpecLaws :: Bare.Env -> Bare.SigEnv -> [(Ghc.Var,LocSpecType)] -> Bare.MeasEnv -> Bare.ModSpecs
-> GhcSpecLaws
makeSpecLaws env sigEnv sigs menv specs = SpLaws
{ gsLawDefs = second (map (\(_,x,y) -> (x,y))) <$> Bare.meCLaws menv
, gsLawInst = Bare.makeInstanceLaws env sigEnv sigs specs
}
makeSpecRefl :: Config -> GhcSrc -> Bare.MeasEnv -> Bare.ModSpecs -> Bare.Env -> ModName -> GhcSpecSig -> Bare.TycEnv
-> Bare.Lookup GhcSpecRefl
makeSpecRefl cfg src menv specs env name sig tycEnv = do
autoInst <- makeAutoInst env name mySpec
rwr <- makeRewrite env name mySpec
rwrWith <- makeRewriteWith env name mySpec
wRefls <- Bare.wiredReflects cfg env name sig
xtes <- Bare.makeHaskellAxioms cfg src env tycEnv name lmap sig mySpec
let myAxioms =
[ Bare.qualifyTop
env
name
(F.loc lt)
e {eqName = s, eqRec = S.member s (exprSymbolsSet (eqBody e))}
| (x, lt, e) <- xtes
, let s = symbol x
]
++ (fst <$> gsRefSigs sig)
return SpRefl
{ gsLogicMap = lmap
, gsAutoInst = autoInst
, gsImpAxioms = concatMap (Ms.axeqs . snd) (M.toList specs)
, gsMyAxioms = F.notracepp "gsMyAxioms" myAxioms
, gsReflects = F.notracepp "gsReflects" (lawMethods ++ filter (isReflectVar rflSyms) sigVars ++ wRefls)
, gsHAxioms = F.notracepp "gsHAxioms" xtes
, gsWiredReft = wRefls
, gsRewrites = rwr
, gsRewritesWith = rwrWith
}
where
lawMethods = F.notracepp "Law Methods" $ concatMap Ghc.classMethods (fst <$> Bare.meCLaws menv)
mySpec = M.lookupDefault mempty name specs
rflSyms = S.fromList (getReflects specs)
lmap = Bare.reLMap env
isReflectVar :: S.HashSet F.Symbol -> Ghc.Var -> Bool
isReflectVar reflSyms v = S.member vx reflSyms
where
vx = GM.dropModuleNames (symbol v)
getReflects :: Bare.ModSpecs -> [Symbol]
getReflects = fmap val . S.toList . S.unions . fmap (names . snd) . M.toList
where
names z = S.unions [ Ms.reflects z, Ms.inlines z, Ms.hmeas z ]
addReflSigs :: Bare.Env -> ModName -> BareRTEnv -> GhcSpecRefl -> GhcSpecSig -> GhcSpecSig
addReflSigs env name rtEnv refl sig =
sig { gsRefSigs = F.notracepp ("gsRefSigs for " ++ F.showpp name) $ map expandReflectedSignature reflSigs
, gsAsmSigs = F.notracepp ("gsAsmSigs for " ++ F.showpp name) (wreflSigs ++ filter notReflected (gsAsmSigs sig))
}
where
See T1738 . We need to expand and qualify any reflected signature /here/ , after any
' BareSpec ' will have a ' reflects ' list of binders to reflect under the form of an opaque ' Var ' , that
qualifyExpand ca n't touch when we do a first pass in ' makeGhcSpec0 ' . However , once we reflected all
the functions , we are left with a pair ( Var , LocSpecType ) . The latter /needs/ to be qualified and
expanded again , for example in case it has expression aliases derived from ' inlines ' .
expandReflectedSignature :: (Ghc.Var, LocSpecType) -> (Ghc.Var, LocSpecType)
expandReflectedSignature = fmap (Bare.qualifyExpand env name rtEnv (F.dummyPos "expand-refSigs") [])
(wreflSigs, reflSigs) = L.partition ((`elem` gsWiredReft refl) . fst)
[ (x, t) | (x, t, _) <- gsHAxioms refl ]
reflected = fst <$> (wreflSigs ++ reflSigs)
notReflected xt = fst xt `notElem` reflected
makeAutoInst :: Bare.Env -> ModName -> Ms.BareSpec ->
Bare.Lookup (M.HashMap Ghc.Var (Maybe Int))
makeAutoInst env name spec = M.fromList <$> kvs
where
kvs = forM (M.toList (Ms.autois spec)) $ \(k, val) -> do
vk <- Bare.lookupGhcVar env name "Var" k
return (vk, val)
makeSpecSig :: Config -> ModName -> Bare.ModSpecs -> Bare.Env -> Bare.SigEnv -> Bare.TycEnv -> Bare.MeasEnv -> [Ghc.CoreBind]
-> Bare.Lookup GhcSpecSig
makeSpecSig cfg name specs env sigEnv tycEnv measEnv cbs = do
mySigs <- makeTySigs env sigEnv name mySpec
aSigs <- F.notracepp ("makeSpecSig aSigs " ++ F.showpp name) $ makeAsmSigs env sigEnv name specs
let asmSigs = Bare.tcSelVars tycEnv
++ aSigs
++ [ (x,t) | (_, x, t) <- concatMap snd (Bare.meCLaws measEnv) ]
let tySigs = strengthenSigs . concat $
the binders used in USER - defined sigs
newTys <- makeNewTypes env sigEnv allSpecs
relation <- makeRelation env name sigEnv (Ms.relational mySpec)
asmRel <- makeRelation env name sigEnv (Ms.asmRel mySpec)
return SpSig
{ gsTySigs = tySigs
, gsAsmSigs = asmSigs
, gsRefSigs = []
, gsDicts = dicts
, gsMethods = if then [ ] else dicts ( Bare.meClasses measEnv ) cbs
, gsMethods = if noclasscheck cfg then [] else Bare.makeMethodTypes (typeclass cfg) dicts (Bare.meClasses measEnv) cbs
, gsInSigs = mempty
, gsNewTypes = newTys
, gsTexprs = [ (v, t, es) | (v, t, Just es) <- mySigs ]
, gsRelation = relation
, gsAsmRel = asmRel
}
where
dicts = Bare.makeSpecDictionaries env sigEnv specs
mySpec = M.lookupDefault mempty name specs
allSpecs = M.toList specs
rtEnv = Bare.sigRTEnv sigEnv
strengthenSigs :: [(Ghc.Var, (Int, LocSpecType))] ->[(Ghc.Var, LocSpecType)]
strengthenSigs sigs = go <$> Misc.groupList sigs
where
go (v, ixs) = (v,) $ L.foldl1' (flip meetLoc) (F.notracepp ("STRENGTHEN-SIGS: " ++ F.showpp v) (prio ixs))
prio = fmap snd . Misc.sortOn fst
meetLoc :: LocSpecType -> LocSpecType -> LocSpecType
meetLoc t1 t2 = t1 {val = val t1 `F.meet` val t2}
makeMthSigs :: Bare.MeasEnv -> [(Ghc.Var, LocSpecType)]
makeMthSigs measEnv = [ (v, t) | (_, v, t) <- Bare.meMethods measEnv ]
makeInlSigs :: Bare.Env -> BareRTEnv -> [(ModName, Ms.BareSpec)] -> [(Ghc.Var, LocSpecType)]
makeInlSigs env rtEnv
= makeLiftedSigs rtEnv (CoreToLogic.inlineSpecType (typeclass (getConfig env)))
. makeFromSet "hinlines" Ms.inlines env
makeMsrSigs :: Bare.Env -> BareRTEnv -> [(ModName, Ms.BareSpec)] -> [(Ghc.Var, LocSpecType)]
makeMsrSigs env rtEnv
= makeLiftedSigs rtEnv (CoreToLogic.inlineSpecType (typeclass (getConfig env)))
. makeFromSet "hmeas" Ms.hmeas env
makeLiftedSigs :: BareRTEnv -> (Ghc.Var -> SpecType) -> [Ghc.Var] -> [(Ghc.Var, LocSpecType)]
makeLiftedSigs rtEnv f xs
= [(x, lt) | x <- xs
, let lx = GM.locNamedThing x
, let lt = expand $ lx {val = f x}
]
where
expand = Bare.specExpandType rtEnv
makeFromSet :: String -> (Ms.BareSpec -> S.HashSet LocSymbol) -> Bare.Env -> [(ModName, Ms.BareSpec)]
-> [Ghc.Var]
makeFromSet msg f env specs = concat [ mk n xs | (n, s) <- specs, let xs = S.toList (f s)]
where
mk name = Mb.mapMaybe (Bare.maybeResolveSym env name msg)
makeTySigs :: Bare.Env -> Bare.SigEnv -> ModName -> Ms.BareSpec
-> Bare.Lookup [(Ghc.Var, LocSpecType, Maybe [Located F.Expr])]
makeTySigs env sigEnv name spec = do
bareSigs <- bareTySigs env name spec
expSigs <- makeTExpr env name bareSigs rtEnv spec
let rawSigs = Bare.resolveLocalBinds env expSigs
return [ (x, cook x bt, z) | (x, bt, z) <- rawSigs ]
where
rtEnv = Bare.sigRTEnv sigEnv
cook x bt = Bare.cookSpecType env sigEnv name (Bare.HsTV x) bt
bareTySigs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, LocBareType)]
bareTySigs env name spec = checkDuplicateSigs <$> vts
where
vts = forM ( Ms.sigs spec ++ Ms.localSigs spec ) $ \ (x, t) -> do
v <- F.notracepp "LOOKUP-GHC-VAR" $ Bare.lookupGhcVar env name "rawTySigs" x
return (v, t)
checkDuplicateSigs : : [ ( Ghc . Var , LocSpecType ) ] - > [ ( Ghc . Var , LocSpecType ) ]
checkDuplicateSigs :: (Symbolic x) => [(x, F.Located t)] -> [(x, F.Located t)]
checkDuplicateSigs xts = case Misc.uniqueByKey symXs of
Left (k, ls) -> uError (errDupSpecs (pprint k) (GM.sourcePosSrcSpan <$> ls))
Right _ -> xts
where
symXs = [ (F.symbol x, F.loc t) | (x, t) <- xts ]
makeAsmSigs :: Bare.Env -> Bare.SigEnv -> ModName -> Bare.ModSpecs -> Bare.Lookup [(Ghc.Var, LocSpecType)]
makeAsmSigs env sigEnv myName specs = do
raSigs <- rawAsmSigs env myName specs
return [ (x, t) | (name, x, bt) <- raSigs, let t = Bare.cookSpecType env sigEnv name (Bare.LqTV x) bt ]
rawAsmSigs :: Bare.Env -> ModName -> Bare.ModSpecs -> Bare.Lookup [(ModName, Ghc.Var, LocBareType)]
rawAsmSigs env myName specs = do
aSigs <- allAsmSigs env myName specs
return [ (m, v, t) | (v, sigs) <- aSigs, let (m, t) = myAsmSig v sigs ]
myAsmSig :: Ghc.Var -> [(Bool, ModName, LocBareType)] -> (ModName, LocBareType)
myAsmSig v sigs = Mb.fromMaybe errImp (Misc.firstMaybes [mbHome, mbImp])
where
mbHome = takeUnique mkErr sigsHome
sigsHome = [(m, t) | (True, m, t) <- sigs ]
sigsImp = F.notracepp ("SIGS-IMP: " ++ F.showpp v)
[(d, (m, t)) | (False, m, t) <- sigs, let d = nameDistance vName m]
mkErr ts = ErrDupSpecs (Ghc.getSrcSpan v) (F.pprint v) (GM.sourcePosSrcSpan . F.loc . snd <$> ts) :: UserError
errImp = impossible Nothing "myAsmSig: cannot happen as sigs is non-null"
vName = GM.takeModuleNames (F.symbol v)
makeTExpr :: Bare.Env -> ModName -> [(Ghc.Var, LocBareType)] -> BareRTEnv -> Ms.BareSpec
-> Bare.Lookup [(Ghc.Var, LocBareType, Maybe [Located F.Expr])]
makeTExpr env name tySigs rtEnv spec = do
vExprs <- M.fromList <$> makeVarTExprs env name spec
let vSigExprs = Misc.hashMapMapWithKey (\v t -> (t, M.lookup v vExprs)) vSigs
return [ (v, t, qual t <$> es) | (v, (t, es)) <- M.toList vSigExprs ]
where
qual t es = qualifyTermExpr env name rtEnv t <$> es
vSigs = M.fromList tySigs
qualifyTermExpr :: Bare.Env -> ModName -> BareRTEnv -> LocBareType -> Located F.Expr
-> Located F.Expr
qualifyTermExpr env name rtEnv t le
= F.atLoc le (Bare.qualifyExpand env name rtEnv l bs e)
where
l = F.loc le
e = F.val le
bs = ty_binds . toRTypeRep . val $ t
makeVarTExprs :: Bare.Env -> ModName -> Ms.BareSpec -> Bare.Lookup [(Ghc.Var, [Located F.Expr])]
makeVarTExprs env name spec =
forM (Ms.termexprs spec) $ \(x, es) -> do
vx <- Bare.lookupGhcVar env name "Var" x
return (vx, es)
` Foo . Bar . . Quux.x ` over any other specification , IF GHC 's
fully qualified name for THING is ` Foo . Bar . . .
( 1 ) Data / ByteString.spec
( 2 ) Data / ByteString / Char8.spec
We end up resolving the ` head ` in ( 1 ) to the @Var@ ` Data.ByteString.Char8.head `
nameDistance :: F.Symbol -> ModName -> Int
nameDistance vName tName
| vName == F.symbol tName = 0
| otherwise = 1
takeUnique :: Ex.Exception e => ([a] -> e) -> [a] -> Maybe a
takeUnique _ [] = Nothing
takeUnique _ [x] = Just x
takeUnique f xs = Ex.throw (f xs)
allAsmSigs :: Bare.Env -> ModName -> Bare.ModSpecs ->
Bare.Lookup [(Ghc.Var, [(Bool, ModName, LocBareType)])]
allAsmSigs env myName specs = do
let aSigs = [ (name, must, x, t) | (name, spec) <- M.toList specs
, (must, x, t) <- getAsmSigs myName name spec ]
vSigs <- forM aSigs $ \(name, must, x, t) -> do
vMb <- resolveAsmVar env name must x
return (vMb, (must, name, t))
return $ Misc.groupList [ (v, z) | (Just v, z) <- vSigs ]
resolveAsmVar :: Bare.Env -> ModName -> Bool -> LocSymbol -> Bare.Lookup (Maybe Ghc.Var)
resolveAsmVar env name True lx = Just <$> Bare.lookupGhcVar env name "resolveAsmVar-True" lx
resolveAsmVar env name False lx = return $ Bare.maybeResolveSym env name "resolveAsmVar-False" lx <|> GM.maybeAuxVar (F.val lx)
getAsmSigs :: ModName -> ModName -> Ms.BareSpec -> [(Bool, LocSymbol, LocBareType)]
getAsmSigs myName name spec
| otherwise = [ (False, x', t) | (x, t) <- Ms.asmSigs spec
++ Ms.sigs spec
MAY - NOT resolve
where
qSym = fmap (GM.qualifySymbol ns)
ns = F.symbol name
TODO - REBARE : grepClassAssumes
_grepClassAssumes :: [RInstance t] -> [(Located F.Symbol, t)]
_grepClassAssumes = concatMap go
where
go xts = Mb.mapMaybe goOne (risigs xts)
goOne (x, RIAssumed t) = Just (fmap (F.symbol . (".$c" ++ ) . F.symbolString) x, t)
goOne (_, RISig _) = Nothing
makeSigEnv :: F.TCEmb Ghc.TyCon -> Bare.TyConMap -> S.HashSet StableName -> BareRTEnv -> Bare.SigEnv
makeSigEnv embs tyi exports rtEnv = Bare.SigEnv
{ sigEmbs = embs
, sigTyRTyMap = tyi
, sigExports = exports
, sigRTEnv = rtEnv
}
makeNewTypes :: Bare.Env -> Bare.SigEnv -> [(ModName, Ms.BareSpec)] ->
Bare.Lookup [(Ghc.TyCon, LocSpecType)]
makeNewTypes env sigEnv specs = do
fmap concat $
forM nameDecls $ uncurry (makeNewType env sigEnv)
where
nameDecls = [(name, d) | (name, spec) <- specs, d <- Ms.newtyDecls spec]
makeNewType :: Bare.Env -> Bare.SigEnv -> ModName -> DataDecl ->
Bare.Lookup [(Ghc.TyCon, LocSpecType)]
makeNewType env sigEnv name d = do
tcMb <- Bare.lookupGhcDnTyCon env name "makeNewType" tcName
case tcMb of
Just tc -> return [(tc, lst)]
_ -> return []
where
tcName = tycName d
lst = Bare.cookSpecType env sigEnv name Bare.GenTV bt
bt = getTy tcName (tycSrcPos d) (Mb.fromMaybe [] (tycDCons d))
getTy _ l [c]
| [(_, t)] <- dcFields c = Loc l l t
getTy n l _ = Ex.throw (mkErr n l)
mkErr n l = ErrOther (GM.sourcePosSrcSpan l) ("Bad new type declaration:" <+> F.pprint n) :: UserError
makeSpecData :: GhcSrc -> Bare.Env -> Bare.SigEnv -> Bare.MeasEnv -> GhcSpecSig -> Bare.ModSpecs
-> GhcSpecData
makeSpecData src env sigEnv measEnv sig specs = SpData
{ gsCtors = F.notracepp "GS-CTORS"
[ (x, if allowTC then t else tt)
| (x, t) <- Bare.meDataCons measEnv
, let tt = Bare.plugHoles (typeclass $ getConfig env) sigEnv name (Bare.LqTV x) t
]
, gsMeas = [ (F.symbol x, uRType <$> t) | (x, t) <- measVars ]
, gsMeasures = Bare.qualifyTopDummy env name <$> (ms1 ++ ms2)
, gsInvariants = Misc.nubHashOn (F.loc . snd) invs
, gsIaliases = concatMap (makeIAliases env sigEnv) (M.toList specs)
, gsUnsorted = usI ++ concatMap msUnSorted (concatMap measures specs)
}
where
allowTC = typeclass (getConfig env)
++ Bare.varMeasures env
measuresSp = Bare.meMeasureSpec measEnv
ms1 = M.elems (Ms.measMap measuresSp)
ms2 = Ms.imeas measuresSp
mySpec = M.lookupDefault mempty name specs
name = _giTargetMod src
(minvs,usI) = makeMeasureInvariants env name sig mySpec
invs = minvs ++ concatMap (makeInvariants env sigEnv) (M.toList specs)
makeIAliases :: Bare.Env -> Bare.SigEnv -> (ModName, Ms.BareSpec) -> [(LocSpecType, LocSpecType)]
makeIAliases env sigEnv (name, spec)
= [ z | Right z <- mkIA <$> Ms.ialiases spec ]
where
mkIA : : ( LocBareType , LocBareType ) - > Either _ ( LocSpecType , LocSpecType )
mkIA (t1, t2) = (,) <$> mkI' t1 <*> mkI' t2
mkI' = Bare.cookSpecTypeE env sigEnv name Bare.GenTV
makeInvariants :: Bare.Env -> Bare.SigEnv -> (ModName, Ms.BareSpec) -> [(Maybe Ghc.Var, Located SpecType)]
makeInvariants env sigEnv (name, spec) =
[ (Nothing, t)
| (_, bt) <- Ms.invariants spec
, Bare.knownGhcType env name bt
, let t = Bare.cookSpecType env sigEnv name Bare.GenTV bt
] ++
concat [ (Nothing,) . makeSizeInv l <$> ts
| (bts, l) <- Ms.dsize spec
, all (Bare.knownGhcType env name) bts
, let ts = Bare.cookSpecType env sigEnv name Bare.GenTV <$> bts
]
makeSizeInv :: F.LocSymbol -> Located SpecType -> Located SpecType
makeSizeInv s lst = lst{val = go (val lst)}
where go (RApp c ts rs r) = RApp c ts rs (r `meet` nat)
go (RAllT a t r) = RAllT a (go t) r
go t = t
nat = MkUReft (Reft (vv_, PAtom Le (ECon $ I 0) (EApp (EVar $ val s) (eVar vv_))))
mempty
makeMeasureInvariants :: Bare.Env -> ModName -> GhcSpecSig -> Ms.BareSpec
-> ([(Maybe Ghc.Var, LocSpecType)], [UnSortedExpr])
makeMeasureInvariants env name sig mySpec
= mapSnd Mb.catMaybes $
unzip (measureTypeToInv env name <$> [(x, (y, ty)) | x <- xs, (y, ty) <- sigs
, isSymbolOfVar (val x) y ])
where
sigs = gsTySigs sig
xs = S.toList (Ms.hmeas mySpec)
isSymbolOfVar :: Symbol -> Ghc.Var -> Bool
isSymbolOfVar x v = x == symbol' v
where
symbol' :: Ghc.Var -> Symbol
symbol' = GM.dropModuleNames . symbol . Ghc.getName
measureTypeToInv :: Bare.Env -> ModName -> (LocSymbol, (Ghc.Var, LocSpecType)) -> ((Maybe Ghc.Var, LocSpecType), Maybe UnSortedExpr)
measureTypeToInv env name (x, (v, t))
= notracepp "measureTypeToInv" ((Just v, t {val = Bare.qualifyTop env name (F.loc x) mtype}), usorted)
where
trep = toRTypeRep (val t)
rts = ty_args trep
args = ty_binds trep
res = ty_res trep
z = last args
tz = last rts
usorted = if isSimpleADT tz then Nothing else mapFst (:[]) <$> mkReft (dummyLoc $ F.symbol v) z tz res
mtype
| null rts
= uError $ ErrHMeas (GM.sourcePosSrcSpan $ loc t) (pprint x) "Measure has no arguments!"
| otherwise
= mkInvariant x z tz res
isSimpleADT (RApp _ ts _ _) = all isRVar ts
isSimpleADT _ = False
mkInvariant :: LocSymbol -> Symbol -> SpecType -> SpecType -> SpecType
mkInvariant x z t tr = strengthen (top <$> t) (MkUReft reft' mempty)
where
reft' = Mb.maybe mempty Reft mreft
mreft = mkReft x z t tr
mkReft :: LocSymbol -> Symbol -> SpecType -> SpecType -> Maybe (Symbol, Expr)
mkReft x z _t tr
| Just q <- stripRTypeBase tr
= let Reft (v, p) = toReft q
su = mkSubst [(v, mkEApp x [EVar v]), (z,EVar v)]
p ' = filter ( \e - > z ` notElem ` syms e ) $ conjuncts p
in Just (v, subst su p)
mkReft _ _ _ _
= Nothing
REBARE : formerly , makeGhcSpec3
makeSpecName :: Bare.Env -> Bare.TycEnv -> Bare.MeasEnv -> ModName -> GhcSpecNames
makeSpecName env tycEnv measEnv name = SpNames
{ gsFreeSyms = Bare.reSyms env
, gsDconsP = [ F.atLoc dc (dcpCon dc) | dc <- datacons ++ cls ]
, gsTconsP = Bare.qualifyTopDummy env name <$> tycons
, gsTcEmbeds = Bare.tcEmbs tycEnv
, gsADTs = Bare.tcAdts tycEnv
, gsTyconEnv = Bare.tcTyConMap tycEnv
}
where
datacons, cls :: [DataConP]
datacons = Bare.tcDataCons tycEnv
cls = F.notracepp "meClasses" $ Bare.meClasses measEnv
tycons = Bare.tcTyCons tycEnv
split into two to break circular dependency . we need dataconmap for core2logic
makeTycEnv0 :: Config -> ModName -> Bare.Env -> TCEmb Ghc.TyCon -> Ms.BareSpec -> Bare.ModSpecs
-> (Diagnostics, [Located DataConP], Bare.TycEnv)
makeTycEnv0 cfg myName env embs mySpec iSpecs = (diag0 <> diag1, datacons, Bare.TycEnv
{ tcTyCons = tycons
, tcSelMeasures = dcSelectors
, tcTyConMap = tyi
, tcAdts = adts
, tcDataConMap = dm
, tcEmbs = embs
, tcName = myName
})
where
(tcDds, dcs) = conTys
(diag0, conTys) = withDiagnostics $ Bare.makeConTypes myName env specs
specs = (myName, mySpec) : M.toList iSpecs
tcs = Misc.snd3 <$> tcDds
tyi = Bare.qualifyTopDummy env myName (makeTyConInfo embs fiTcs tycons)
datacons = Bare.makePluggedDataCons embs tyi ( Misc.replaceWith ( dcpCon . ) ( F.tracepp " DATACONS " $ concat dcs ) wiredDataCons )
tycons = tcs ++ knownWiredTyCons env myName
datacons = Bare.makePluggedDataCon (typeclass cfg) embs tyi <$> (concat dcs ++ knownWiredDataCons env myName)
tds = [(name, tcpCon tcp, dd) | (name, tcp, Just dd) <- tcDds]
(diag1, adts) = Bare.makeDataDecls cfg embs myName tds datacons
dm = Bare.dataConMap adts
dcSelectors = concatMap (Bare.makeMeasureSelectors cfg dm) (if reflection cfg then charDataCon:datacons else datacons)
fiTcs = _gsFiTcs (Bare.reSrc env)
makeTycEnv1 ::
ModName
-> Bare.Env
-> (Bare.TycEnv, [Located DataConP])
-> (Ghc.CoreExpr -> F.Expr)
-> (Ghc.CoreExpr -> Ghc.TcRn Ghc.CoreExpr)
-> Ghc.TcRn Bare.TycEnv
makeTycEnv1 myName env (tycEnv, datacons) coreToLg simplifier = do
fst for selector generation , snd for dataconsig generation
lclassdcs <- forM classdcs $ traverse (Bare.elaborateClassDcp coreToLg simplifier)
let recSelectors = Bare.makeRecordSelectorSigs env myName (dcs ++ (fmap . fmap) snd lclassdcs)
pure $
tycEnv {Bare.tcSelVars = recSelectors, Bare.tcDataCons = F.val <$> ((fmap . fmap) fst lclassdcs ++ dcs )}
where
(classdcs, dcs) =
L.partition
(Ghc.isClassTyCon . Ghc.dataConTyCon . dcpCon . F.val) datacons
knownWiredDataCons :: Bare.Env -> ModName -> [Located DataConP]
knownWiredDataCons env name = filter isKnown wiredDataCons
where
isKnown = Bare.knownGhcDataCon env name . GM.namedLocSymbol . dcpCon . val
knownWiredTyCons :: Bare.Env -> ModName -> [TyConP]
knownWiredTyCons env name = filter isKnown wiredTyCons
where
isKnown = Bare.knownGhcTyCon env name . GM.namedLocSymbol . tcpCon
makeMeasEnv :: Bare.Env -> Bare.TycEnv -> Bare.SigEnv -> Bare.ModSpecs ->
Bare.Lookup Bare.MeasEnv
makeMeasEnv env tycEnv sigEnv specs = do
laws <- Bare.makeCLaws env sigEnv name specs
(cls, mts) <- Bare.makeClasses env sigEnv name specs
let dms = Bare.makeDefaultMethods env mts
measures0 <- mapM (Bare.makeMeasureSpec env sigEnv name) (M.toList specs)
let measures = mconcat (Ms.mkMSpec' dcSelectors : measures0)
let (cs, ms) = Bare.makeMeasureSpec' (typeclass $ getConfig env) measures
let cms = Bare.makeClassMeasureSpec measures
let cms' = [ (x, Loc l l' $ cSort t) | (Loc l l' x, t) <- cms ]
let ms' = [ (F.val lx, F.atLoc lx t) | (lx, t) <- ms
, Mb.isNothing (lookup (val lx) cms') ]
let cs' = [ (v, txRefs v t) | (v, t) <- Bare.meetDataConSpec (typeclass (getConfig env)) embs cs (datacons ++ cls)]
return Bare.MeasEnv
{ meMeasureSpec = measures
, meClassSyms = cms'
, meSyms = ms'
, meDataCons = cs'
, meClasses = cls
, meMethods = mts ++ dms
, meCLaws = laws
}
where
txRefs v t = Bare.txRefSort tyi embs (t <$ GM.locNamedThing v)
tyi = Bare.tcTyConMap tycEnv
dcSelectors = Bare.tcSelMeasures tycEnv
datacons = Bare.tcDataCons tycEnv
embs = Bare.tcEmbs tycEnv
name = Bare.tcName tycEnv
| @makeLiftedSpec@ is used to generate the BareSpec object that should be serialized
makeLiftedSpec :: ModName -> GhcSrc -> Bare.Env
-> GhcSpecRefl -> GhcSpecData -> GhcSpecSig -> GhcSpecQual -> BareRTEnv
-> Ms.BareSpec -> Ms.BareSpec
makeLiftedSpec name src _env refl sData sig qual myRTE lSpec0 = lSpec0
{ Ms.asmSigs = F.notracepp ("makeLiftedSpec : ASSUMED-SIGS " ++ F.showpp name ) (xbs ++ myDCs)
, Ms.reflSigs = F.notracepp "REFL-SIGS" xbs
, Ms.sigs = F.notracepp ("makeLiftedSpec : LIFTED-SIGS " ++ F.showpp name ) $ mkSigs (gsTySigs sig)
, Ms.invariants = [ (varLocSym <$> x, Bare.specToBare <$> t)
| (x, t) <- gsInvariants sData
, isLocInFile srcF t
]
, Ms.axeqs = gsMyAxioms refl
, Ms.aliases = F.notracepp "MY-ALIASES" $ M.elems . typeAliases $ myRTE
, Ms.ealiases = M.elems . exprAliases $ myRTE
, Ms.qualifiers = filter (isLocInFile srcF) (gsQualifiers qual)
}
where
myDCs = [(x,t) | (x,t) <- mkSigs (gsCtors sData)
, F.symbol name == fst (GM.splitModuleName $ val x)]
mkSigs xts = [ toBare (x, t) | (x, t) <- xts
, S.member x sigVars && isExportedVar (view targetSrcIso src) x
]
toBare (x, t) = (varLocSym x, Bare.specToBare <$> t)
xbs = toBare <$> reflTySigs
sigVars = S.difference defVars reflVars
defVars = S.fromList (_giDefVars src)
reflTySigs = [(x, t) | (x,t,_) <- gsHAxioms refl, x `notElem` gsWiredReft refl]
reflVars = S.fromList (fst <$> reflTySigs)
srcF = _giTarget src
Haskell sources which have \"companion\ " specs defined alongside them , we also need to account for this
case , by stripping out the extensions and check that the LHS is a source and the RHS a spec file .
isLocInFile :: (F.Loc a) => FilePath -> a -> Bool
isLocInFile f lx = f == lifted || isCompanion
where
lifted :: FilePath
lifted = locFile lx
isCompanion :: Bool
isCompanion =
(==) (dropExtension f) (dropExtension lifted)
&& isExtFile Hs f
&& isExtFile Files.Spec lifted
locFile :: (F.Loc a) => a -> FilePath
locFile = Misc.fst3 . F.sourcePosElts . F.sp_start . F.srcSpan
varLocSym :: Ghc.Var -> LocSymbol
varLocSym v = F.symbol <$> GM.locNamedThing v
in the _ target _ file , " cooks " the aliases ( by conversion to SpecType ) , and
then saves them back as BareType .
myRTEnv :: GhcSrc -> Bare.Env -> Bare.SigEnv -> BareRTEnv -> BareRTEnv
myRTEnv src env sigEnv rtEnv = mkRTE tAs' eAs
where
tAs' = normalizeBareAlias env sigEnv name <$> tAs
tAs = myAliases typeAliases
eAs = myAliases exprAliases
myAliases fld = filter (isLocInFile srcF) . M.elems . fld $ rtEnv
srcF = _giTarget src
name = _giTargetMod src
mkRTE :: [Located (RTAlias x a)] -> [Located (RTAlias F.Symbol F.Expr)] -> RTEnv x a
mkRTE tAs eAs = RTE
{ typeAliases = M.fromList [ (aName a, a) | a <- tAs ]
, exprAliases = M.fromList [ (aName a, a) | a <- eAs ]
}
where aName = rtName . F.val
normalizeBareAlias :: Bare.Env -> Bare.SigEnv -> ModName -> Located BareRTAlias
-> Located BareRTAlias
normalizeBareAlias env sigEnv name lx = fixRTA <$> lx
where
fixRTA :: BareRTAlias -> BareRTAlias
fixRTA = mapRTAVars fixArg . fmap fixBody
fixArg :: Symbol -> Symbol
fixArg = F.symbol . GM.symbolTyVar
fixBody :: BareType -> BareType
fixBody = Bare.specToBare
. F.val
. Bare.cookSpecType env sigEnv name Bare.RawTV
. F.atLoc lx
withDiagnostics :: (Monoid a) => Bare.Lookup a -> (Diagnostics, a)
withDiagnostics (Left es) = (mkDiagnostics [] es, mempty)
withDiagnostics (Right v) = (emptyDiagnostics, v)
|
de1e1a8661988d5cb5112d7231e3d89759fa549b78552930bbb2698011f57fe6 | readevalprintlove/black | cnv.scm | (define zip
(lambda (xs ys)
(cond
((or (null? xs) (null? ys))
'())
(else
(cons
(cons (car xs) (car ys))
(zip (cdr xs) (cdr ys)))))))
(define cnv2
(lambda (xs ys)
(define walk
(lambda (xs k)
(cond
((null? xs)
(k '() ys))
(else
(walk (cdr xs)
(lambda (r ys) (k (cons (cons (car xs) (car ys)) r)
(cdr ys))))))))
(walk xs (lambda (r ys) r))))
(define cnv3
(lambda (xs ys)
(define walk
(lambda (xs)
(cond
((null? xs)
(cons '() ys))
(else
(let ((rys (walk (cdr xs))))
(let ((r (car rys))
(ys (cdr rys)))
(cons (cons (cons (car xs) (car ys)) r)
(cdr ys))))))))
(car (walk xs))))
#;
(begin
(zip '(1 2 3) '(a b c))
(cnv2 '(1 2 3) '(a b c))
(cnv3 '(1 2 3) '(a b c))
)
| null | https://raw.githubusercontent.com/readevalprintlove/black/a45193c98473004f76319a6dfe48867a11507103/examples/cnv.scm | scheme | (define zip
(lambda (xs ys)
(cond
((or (null? xs) (null? ys))
'())
(else
(cons
(cons (car xs) (car ys))
(zip (cdr xs) (cdr ys)))))))
(define cnv2
(lambda (xs ys)
(define walk
(lambda (xs k)
(cond
((null? xs)
(k '() ys))
(else
(walk (cdr xs)
(lambda (r ys) (k (cons (cons (car xs) (car ys)) r)
(cdr ys))))))))
(walk xs (lambda (r ys) r))))
(define cnv3
(lambda (xs ys)
(define walk
(lambda (xs)
(cond
((null? xs)
(cons '() ys))
(else
(let ((rys (walk (cdr xs))))
(let ((r (car rys))
(ys (cdr rys)))
(cons (cons (cons (car xs) (car ys)) r)
(cdr ys))))))))
(car (walk xs))))
(begin
(zip '(1 2 3) '(a b c))
(cnv2 '(1 2 3) '(a b c))
(cnv3 '(1 2 3) '(a b c))
)
| |
9c6f07bf68efcb27e5317eb67cf88cf501bad156959a4b2a9e311d18ff1beb43 | jeaye/orchestra | project.clj | (defproject orchestra "2021.01.01-1"
:description "Complete instrumentation for clojure.spec"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1" :scope "provided"]
[org.clojure/clojurescript "1.10.773" :scope "provided"]
[org.clojure/spec.alpha "0.2.187" :scope "provided"]]
:plugins [[lein-cloverage "1.1.2"]
[lein-cljsbuild "1.1.8"]
[lein-figwheel "0.5.20"]
[com.jakemccrary/lein-test-refresh "0.24.1"]
[lein-shell "0.5.0"]
[lein-auto "0.1.3"]]
:aliases {"test-cljs"
["do"
["cljsbuild" "once" "app"]
["shell" "node" "target/test.js"]]}
:global-vars {*warn-on-reflection* true}
:test-refresh {:quiet true
:focus-flag :refresh}
:source-paths ["src/clj/" "src/cljc/" "src/cljs/"]
:cljsbuild {:builds {:app
{:source-paths ["src/cljs/"]
:compiler
{:optimizations :none
:pretty-print false
:parallel-build true
:output-dir "target/test"
:output-to "target/test.js"}}}}
:profiles {:uberjar {:aot :all}
:dev {:dependencies [[expound "0.8.5"]
[lein-doo "0.1.11"]]
:source-paths ["test/clj/" "test/cljc/"]
:cljsbuild {:builds {:app
{:source-paths ["test/cljs/" "test/cljc/"]
:compiler
{:main orchestra-cljs.test
:target :nodejs}}}}}})
| null | https://raw.githubusercontent.com/jeaye/orchestra/81e5181f7b42e5e2763a2b37db17954f3be0314e/project.clj | clojure | (defproject orchestra "2021.01.01-1"
:description "Complete instrumentation for clojure.spec"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1" :scope "provided"]
[org.clojure/clojurescript "1.10.773" :scope "provided"]
[org.clojure/spec.alpha "0.2.187" :scope "provided"]]
:plugins [[lein-cloverage "1.1.2"]
[lein-cljsbuild "1.1.8"]
[lein-figwheel "0.5.20"]
[com.jakemccrary/lein-test-refresh "0.24.1"]
[lein-shell "0.5.0"]
[lein-auto "0.1.3"]]
:aliases {"test-cljs"
["do"
["cljsbuild" "once" "app"]
["shell" "node" "target/test.js"]]}
:global-vars {*warn-on-reflection* true}
:test-refresh {:quiet true
:focus-flag :refresh}
:source-paths ["src/clj/" "src/cljc/" "src/cljs/"]
:cljsbuild {:builds {:app
{:source-paths ["src/cljs/"]
:compiler
{:optimizations :none
:pretty-print false
:parallel-build true
:output-dir "target/test"
:output-to "target/test.js"}}}}
:profiles {:uberjar {:aot :all}
:dev {:dependencies [[expound "0.8.5"]
[lein-doo "0.1.11"]]
:source-paths ["test/clj/" "test/cljc/"]
:cljsbuild {:builds {:app
{:source-paths ["test/cljs/" "test/cljc/"]
:compiler
{:main orchestra-cljs.test
:target :nodejs}}}}}})
| |
e99d3f4e853816dc9e74532eef3a3ecc62f66b1e6c952effd274fcdc67bf3dc5 | eponai/sulolive | message.cljc | (ns eponai.client.parser.message
(:require [datascript.db :as db]
[datascript.core :as d]
[om.next :as om]
[eponai.common.database :as database]
[eponai.common.parser :as parser]
[taoensso.timbre :refer [debug warn error]])
#?(:clj (:import [datascript.db DB])))
for public API to make it easier to use .
(def pending? parser/pending?)
(def final? parser/final?)
(def success? parser/success?)
(def message parser/message)
;; -----------------------------
;; -- Mutation message public api
(defn mutation-message? [x]
(and (satisfies? parser/IMutationMessage x)
(some? (message x))))
;; ------------------------------------
;; -- Datascript mutation message storage implementation
(defn- new-message [db id mutation-message]
(d/db-with db [{:mutation-message/message (message mutation-message)
:mutation-message/history-id id
:mutation-message/mutation-key (:mutation-key mutation-message)
:mutation-message/message-type (:message-type mutation-message)}]))
(defn- update-message [db old-mm new-mm]
{:pre [(some? (:db/id old-mm))]}
(assert (pending? old-mm)
(str "Stored message was not pending. Can only update pending messages."
"Old message:" old-mm " new message: " new-mm))
(d/db-with db [{:db/id (:db/id old-mm)
:mutation-message/message (message new-mm)
:mutation-message/message-type (:message-type new-mm)}]))
(defn- entity->MutationMessage [entity]
{:pre [(:db/id entity)]}
(-> (parser/->MutationMessage (:mutation-message/mutation-key entity)
(:mutation-message/message entity)
(:mutation-message/message-type entity))
(assoc :db/id (:db/id entity))))
(defn- store-message-datascript [this history-id mutation-message]
(assert (some? history-id))
(assert (mutation-message? mutation-message)
(str "Mutation message was not a valid mutation-message. Was: " mutation-message))
(let [existing ((parser/get-message-fn this) history-id (:mutation-key mutation-message))]
(debug "Storing message with history-id: " [history-id mutation-message :existing? existing])
(if existing
(update-message this existing mutation-message)
(new-message this history-id mutation-message))))
(defn- get-message-fn-datascript [this]
(fn [history-id mutation-key]
(assert (some? history-id) (str "Called (get-message-fn ) with history-id nil."
" Needs history-id to look up messages. mutation-key: " mutation-key))
(when-let [[id tx] (first (database/find-with
this {:find '[?e ?tx]
:where '[[?e :mutation-message/history-id ?history-id ?tx]
[?e :mutation-message/mutation-key ?mutation-key]]
:symbols {'?history-id history-id
'?mutation-key mutation-key}}))]
(-> (d/entity this id)
(entity->MutationMessage)
(assoc :tx tx)))))
(defn- get-messages-datascript [this]
(some->> (database/find-with this {:find '[?e ?tx]
:where '[[?e :mutation-message/history-id _ ?tx]]})
(into []
(comp (map (fn [[id tx]] (into {:tx tx :db/id id}
(d/entity this id))))
(map entity->MutationMessage)))
(sort-by :tx)))
(extend-protocol parser/IStoreMessages
#?@(:clj [DB
(store-message [this history-id mutation-message]
(store-message-datascript this history-id mutation-message))
(get-message-fn [this]
(get-message-fn-datascript this))
(get-messages [this]
(get-messages-datascript this))]
:cljs [db/DB
(store-message [this history-id mutation-message]
(store-message-datascript this history-id mutation-message))
(get-message-fn [this]
(get-message-fn-datascript this))
(get-messages [this]
(get-messages-datascript this))]))
;;;;;;;;;;;;;;;; Developer facing API. ;;;;;;;;;;;;;;;;;;
;; Usage:
;; (require '[eponai.client.parser.message :as msg])
;;
;; Use `om-transact!` to perform mutation and get history-id.
;; (let [history-id (msg/om-transact! this '[(mutate/this) :read/that])]
;; (om/update-state! this assoc :pending-action {:id history-id :mutation 'mutate/this}))
;;
;; Use `find-message` to retrieve your mesage:
;; (let [{:keys [pending-action]} (om/get-state this)
;; message (msg/find-message this (:id pending-action) (:mutation pending-action))]
;; (if (msg/final? message)
;; ;; Do fancy stuff with either success or error message.
;; ;; Message pending, render spinner or something?
;; ))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cache the assert for preventing it to happen everytime it's called.
(let [cache (atom {})]
(defn- assert-query
"Makes sure component has :query/messages in its query, so it is
refreshed when messages arrive."
[component]
(let [{:keys [components queries]} @cache
query (om/get-query component)]
(when-not (or (contains? components component)
(contains? queries (om/get-query component)))
(let [parser (om/parser {:read (fn [_ k _]
(when (= k :query/messages)
{:value true}))
:mutate (constantly nil)})
has-query-messages? (:query/messages (parser nil query))]
(when-not has-query-messages?
(error (str "Component did not have :query/messages in its query."
" Having :query/messages is needed for the component to"
" get refreshed when new messages are merged."
" Component: " (pr-str component))))
(swap! cache (fn [m] (-> m (update :queries (fnil conj #{}) query
:components (fnil conj #{}) component)))))))))
(defn om-transact!
"Like om.next/transact! but it returns the history-id generated from the transaction."
[x tx]
{:pre [(or (om/component? x)
(om/reconciler? x))]}
(om/transact! x tx)
(let [history-id (parser/reconciler->history-id (cond-> x (om/component? x) (om/get-reconciler)))]
(when (om/component? x)
(assert-query x)
(om/update-state! x update ::component-messages
(fn [messages]
(let [mutation-keys (sequence (comp (filter coll?)
(map first)
(filter symbol?))
tx)]
(reduce (fn [m mutation-key]
(update m mutation-key (fnil conj []) history-id))
messages
mutation-keys)))))
history-id))
(defn find-message
"Takes a component, a history id and a mutation-key which was used in the mutation
and returns the message or nil if not found."
[x history-id mutation-key]
(let [db (database/to-db x)
msg-fn (parser/get-message-fn db)]
(msg-fn history-id mutation-key)))
(defn- message-ids-for-key [c k]
(get-in (om/get-state c) [::component-messages k]))
(defn all-messages
"Returns all messages for a component and key in order when the message's id was
transacted, i.e. when it became pending."
[component mutation-key]
(assert-query component)
(->> (message-ids-for-key component mutation-key)
(map (fn [id]
{:post [(or (nil? %) (number? (:tx %)))]}
(find-message component id mutation-key)))
(filter some?)
(sort-by :tx)
(into [])))
(defn any-messages?
"Returns true if there are any messages for the given mutation keys."
[component mutation-keys]
(not-empty (mapcat #(all-messages component %) mutation-keys)))
(defn last-message
"Returns the latest message for a component and key, where latest is defined by :tx order."
[component mutation-key]
(let [messages (all-messages component mutation-key)]
(if (vector? messages)
(peek messages)
(last messages))))
(defn clear-messages! [component mutation-key]
(om/update-state! component update ::component-messages dissoc mutation-key))
(defn clear-one-message! [component mutation-key]
(when (< 1 (count (message-ids-for-key component mutation-key)))
(warn "Found more than one message in call to clear-one-message for component: " (pr-str component)
" mutation-key: " mutation-key
". Will clear all messages."
". There is possibly a bug in your UI?"
" Use `clear-messages` if you want to clear multiple messages."))
(clear-messages! component mutation-key))
(defn message-status [component mutation-key & [not-found]]
(if-let [msg (last-message component mutation-key)]
(cond (pending? msg)
::pending
(final? msg)
(if (success? msg) ::success ::failure)
:else
(throw (ex-info "Message was neither success, error or failure."
{:message msg
:component (pr-str component)
:mutation-key mutation-key})))
(or not-found ::not-found)))
(defn message-data [component mutation-key]
(message (last-message component mutation-key)))
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/src/eponai/client/parser/message.cljc | clojure | -----------------------------
-- Mutation message public api
------------------------------------
-- Datascript mutation message storage implementation
Developer facing API. ;;;;;;;;;;;;;;;;;;
Usage:
(require '[eponai.client.parser.message :as msg])
Use `om-transact!` to perform mutation and get history-id.
(let [history-id (msg/om-transact! this '[(mutate/this) :read/that])]
(om/update-state! this assoc :pending-action {:id history-id :mutation 'mutate/this}))
Use `find-message` to retrieve your mesage:
(let [{:keys [pending-action]} (om/get-state this)
message (msg/find-message this (:id pending-action) (:mutation pending-action))]
(if (msg/final? message)
;; Do fancy stuff with either success or error message.
;; Message pending, render spinner or something?
))
Cache the assert for preventing it to happen everytime it's called. | (ns eponai.client.parser.message
(:require [datascript.db :as db]
[datascript.core :as d]
[om.next :as om]
[eponai.common.database :as database]
[eponai.common.parser :as parser]
[taoensso.timbre :refer [debug warn error]])
#?(:clj (:import [datascript.db DB])))
for public API to make it easier to use .
(def pending? parser/pending?)
(def final? parser/final?)
(def success? parser/success?)
(def message parser/message)
(defn mutation-message? [x]
(and (satisfies? parser/IMutationMessage x)
(some? (message x))))
(defn- new-message [db id mutation-message]
(d/db-with db [{:mutation-message/message (message mutation-message)
:mutation-message/history-id id
:mutation-message/mutation-key (:mutation-key mutation-message)
:mutation-message/message-type (:message-type mutation-message)}]))
(defn- update-message [db old-mm new-mm]
{:pre [(some? (:db/id old-mm))]}
(assert (pending? old-mm)
(str "Stored message was not pending. Can only update pending messages."
"Old message:" old-mm " new message: " new-mm))
(d/db-with db [{:db/id (:db/id old-mm)
:mutation-message/message (message new-mm)
:mutation-message/message-type (:message-type new-mm)}]))
(defn- entity->MutationMessage [entity]
{:pre [(:db/id entity)]}
(-> (parser/->MutationMessage (:mutation-message/mutation-key entity)
(:mutation-message/message entity)
(:mutation-message/message-type entity))
(assoc :db/id (:db/id entity))))
(defn- store-message-datascript [this history-id mutation-message]
(assert (some? history-id))
(assert (mutation-message? mutation-message)
(str "Mutation message was not a valid mutation-message. Was: " mutation-message))
(let [existing ((parser/get-message-fn this) history-id (:mutation-key mutation-message))]
(debug "Storing message with history-id: " [history-id mutation-message :existing? existing])
(if existing
(update-message this existing mutation-message)
(new-message this history-id mutation-message))))
(defn- get-message-fn-datascript [this]
(fn [history-id mutation-key]
(assert (some? history-id) (str "Called (get-message-fn ) with history-id nil."
" Needs history-id to look up messages. mutation-key: " mutation-key))
(when-let [[id tx] (first (database/find-with
this {:find '[?e ?tx]
:where '[[?e :mutation-message/history-id ?history-id ?tx]
[?e :mutation-message/mutation-key ?mutation-key]]
:symbols {'?history-id history-id
'?mutation-key mutation-key}}))]
(-> (d/entity this id)
(entity->MutationMessage)
(assoc :tx tx)))))
(defn- get-messages-datascript [this]
(some->> (database/find-with this {:find '[?e ?tx]
:where '[[?e :mutation-message/history-id _ ?tx]]})
(into []
(comp (map (fn [[id tx]] (into {:tx tx :db/id id}
(d/entity this id))))
(map entity->MutationMessage)))
(sort-by :tx)))
(extend-protocol parser/IStoreMessages
#?@(:clj [DB
(store-message [this history-id mutation-message]
(store-message-datascript this history-id mutation-message))
(get-message-fn [this]
(get-message-fn-datascript this))
(get-messages [this]
(get-messages-datascript this))]
:cljs [db/DB
(store-message [this history-id mutation-message]
(store-message-datascript this history-id mutation-message))
(get-message-fn [this]
(get-message-fn-datascript this))
(get-messages [this]
(get-messages-datascript this))]))
(let [cache (atom {})]
(defn- assert-query
"Makes sure component has :query/messages in its query, so it is
refreshed when messages arrive."
[component]
(let [{:keys [components queries]} @cache
query (om/get-query component)]
(when-not (or (contains? components component)
(contains? queries (om/get-query component)))
(let [parser (om/parser {:read (fn [_ k _]
(when (= k :query/messages)
{:value true}))
:mutate (constantly nil)})
has-query-messages? (:query/messages (parser nil query))]
(when-not has-query-messages?
(error (str "Component did not have :query/messages in its query."
" Having :query/messages is needed for the component to"
" get refreshed when new messages are merged."
" Component: " (pr-str component))))
(swap! cache (fn [m] (-> m (update :queries (fnil conj #{}) query
:components (fnil conj #{}) component)))))))))
(defn om-transact!
"Like om.next/transact! but it returns the history-id generated from the transaction."
[x tx]
{:pre [(or (om/component? x)
(om/reconciler? x))]}
(om/transact! x tx)
(let [history-id (parser/reconciler->history-id (cond-> x (om/component? x) (om/get-reconciler)))]
(when (om/component? x)
(assert-query x)
(om/update-state! x update ::component-messages
(fn [messages]
(let [mutation-keys (sequence (comp (filter coll?)
(map first)
(filter symbol?))
tx)]
(reduce (fn [m mutation-key]
(update m mutation-key (fnil conj []) history-id))
messages
mutation-keys)))))
history-id))
(defn find-message
"Takes a component, a history id and a mutation-key which was used in the mutation
and returns the message or nil if not found."
[x history-id mutation-key]
(let [db (database/to-db x)
msg-fn (parser/get-message-fn db)]
(msg-fn history-id mutation-key)))
(defn- message-ids-for-key [c k]
(get-in (om/get-state c) [::component-messages k]))
(defn all-messages
"Returns all messages for a component and key in order when the message's id was
transacted, i.e. when it became pending."
[component mutation-key]
(assert-query component)
(->> (message-ids-for-key component mutation-key)
(map (fn [id]
{:post [(or (nil? %) (number? (:tx %)))]}
(find-message component id mutation-key)))
(filter some?)
(sort-by :tx)
(into [])))
(defn any-messages?
"Returns true if there are any messages for the given mutation keys."
[component mutation-keys]
(not-empty (mapcat #(all-messages component %) mutation-keys)))
(defn last-message
"Returns the latest message for a component and key, where latest is defined by :tx order."
[component mutation-key]
(let [messages (all-messages component mutation-key)]
(if (vector? messages)
(peek messages)
(last messages))))
(defn clear-messages! [component mutation-key]
(om/update-state! component update ::component-messages dissoc mutation-key))
(defn clear-one-message! [component mutation-key]
(when (< 1 (count (message-ids-for-key component mutation-key)))
(warn "Found more than one message in call to clear-one-message for component: " (pr-str component)
" mutation-key: " mutation-key
". Will clear all messages."
". There is possibly a bug in your UI?"
" Use `clear-messages` if you want to clear multiple messages."))
(clear-messages! component mutation-key))
(defn message-status [component mutation-key & [not-found]]
(if-let [msg (last-message component mutation-key)]
(cond (pending? msg)
::pending
(final? msg)
(if (success? msg) ::success ::failure)
:else
(throw (ex-info "Message was neither success, error or failure."
{:message msg
:component (pr-str component)
:mutation-key mutation-key})))
(or not-found ::not-found)))
(defn message-data [component mutation-key]
(message (last-message component mutation-key)))
|
ba65ae600a271f22d0305f37c72d105943822a2693a2d4c3a8dec11b37d51213 | gethop-dev/payments.stripe | charge_test.clj | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
;; file, You can obtain one at /
(ns dev.gethop.payments.stripe.charge-test
(:require [clojure.test :refer :all]
[dev.gethop.payments.core :as core]
[dev.gethop.payments.stripe]
[integrant.core :as ig])
(:import [java.util UUID]))
(def ^:const test-config {:api-key (System/getenv "STRIPE_TEST_API_KEY")})
(def ^:const test-charge-data {:amount (rand-int 3000)
:currency "eur"
:source "tok_mastercard"
:description "Charge for "})
(deftest ^:integration create-charge
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Create a charge successfully"
(let [result (core/create-charge payments-adapter test-charge-data)]
(is (:success? result))
(is (map? (:charge result)))))))
(deftest ^:integration get-charge
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Get charge successfully"
(let [charge-id (-> (core/create-charge payments-adapter test-charge-data) :charge :id)
result (core/get-charge payments-adapter charge-id)]
(is (:success? result))
(is (map? (:charge result)))))
(testing "Wrong charge-id"
(let [result (core/get-charge payments-adapter (str (UUID/randomUUID)))]
(is (not (:success? result)))
(is (= :not-found (:reason result)))))))
(deftest ^:integration get-all-charges
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Get charges successfully"
(let [result (core/get-all-charges payments-adapter {})]
(is (:success? result))
(is (vector? (:charges result)))))))
| null | https://raw.githubusercontent.com/gethop-dev/payments.stripe/2379ba790123d7380ffc397cf4920cbe2936f840/test/dev/gethop/payments/stripe/charge_test.clj | clojure | file, You can obtain one at / | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
(ns dev.gethop.payments.stripe.charge-test
(:require [clojure.test :refer :all]
[dev.gethop.payments.core :as core]
[dev.gethop.payments.stripe]
[integrant.core :as ig])
(:import [java.util UUID]))
(def ^:const test-config {:api-key (System/getenv "STRIPE_TEST_API_KEY")})
(def ^:const test-charge-data {:amount (rand-int 3000)
:currency "eur"
:source "tok_mastercard"
:description "Charge for "})
(deftest ^:integration create-charge
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Create a charge successfully"
(let [result (core/create-charge payments-adapter test-charge-data)]
(is (:success? result))
(is (map? (:charge result)))))))
(deftest ^:integration get-charge
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Get charge successfully"
(let [charge-id (-> (core/create-charge payments-adapter test-charge-data) :charge :id)
result (core/get-charge payments-adapter charge-id)]
(is (:success? result))
(is (map? (:charge result)))))
(testing "Wrong charge-id"
(let [result (core/get-charge payments-adapter (str (UUID/randomUUID)))]
(is (not (:success? result)))
(is (= :not-found (:reason result)))))))
(deftest ^:integration get-all-charges
(let [payments-adapter (ig/init-key :dev.gethop.payments/stripe test-config)]
(testing "Get charges successfully"
(let [result (core/get-all-charges payments-adapter {})]
(is (:success? result))
(is (vector? (:charges result)))))))
|
7faa0d82d1686dfd47f7eb7a8013bf1c5d3b3ec9b74f952a9c478bd6a2c65519 | icicle-lang/x-ambiata | Merge.hs | # LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PatternGuards #
# LANGUAGE LambdaCase #
module X.Data.Vector.Stream.Merge
( MergePullFrom(..)
, mergePullOrd
, mergePullJoin
, mergePullJoinBy
, mergeList
, merge
) where
import qualified Data.Vector.Fusion.Stream.Monadic as VS
import P
-- | Which stream to pull from during a merge, and a single value to emit.
-- The value to emit will often be the read value - if pulling from left, emit the left, etc.
data MergePullFrom a
= MergePullLeft a
| MergePullRight a
| MergePullBoth a
| Merge two ascending streams with given instance .
-- Left-biased: when elements from both inputs are equal, pull from left.
mergePullOrd :: Ord b => (a -> b) -> a -> a -> MergePullFrom a
mergePullOrd f
= mergePullJoin (\l _ -> MergePullLeft l) f
# INLINE mergePullOrd #
| Merge two ascending streams , using given merge function when two elements are equal
mergePullJoin :: Ord b => (a -> a -> MergePullFrom a) -> (a -> b) -> a -> a -> MergePullFrom a
mergePullJoin f c
= mergePullJoinBy f (compare `on` c)
# INLINE mergePullJoin #
| Merge two ascending streams with Ordering function , use given merge function when two elements are equal
mergePullJoinBy :: (a -> a -> MergePullFrom a) -> (a -> a -> Ordering) -> a -> a -> MergePullFrom a
mergePullJoinBy f c l r
= case c l r of
LT -> MergePullLeft l
EQ -> f l r
GT -> MergePullRight r
# INLINE mergePullJoinBy #
| Merge two lists together .
If they are already sorted and unique , the result will be sorted and unique union of the two .
-- This is really just here as a specification.
mergeList :: (a -> a -> MergePullFrom a) -> [a] -> [a] -> [a]
mergeList f l r = go l r
where
go xs [] = xs
go [] ys = ys
go (x:xs) (y:ys)
= case f x y of
-- Note that this is effectively pushing back onto the top of 'ys'.
-- This 'peek' is important for the implementation over streams, below.
MergePullLeft x' -> x' : go xs (y:ys)
MergePullBoth v' -> v' : go xs ys
MergePullRight y' -> y' : go (x:xs) ys
| Merge two streams together .
If they are already sorted and unique , the result will be sorted and unique union of the two .
--
-- This is a fair bit more complicated than the mergeList implementation above, but the idea is the same.
-- The streams themselves have no way of peeking at the head of the stream or putting a value back,
-- so we need to implement that by hand.
-- This is not particularly hard, but it does explode the number of possible states.
merge
( Stream left - state ) ( Stream right - state )
State :
( Maybe left - state , Maybe right - state
, Maybe left - peek , Maybe right - peek )
If we have a * -peek value , we will not pull from * -state .
If * -state is Nothing , the left stream is finished , but a final value may be in * -peek .
Initial state is ( Just * -state ) , with empty peeks .
While there are sixteen possibilities of the Maybes , there are only six real state types :
merge :
When we have a value in both peeks , we can compare them together and emit something .
The peek that is used is thrown away .
read - L / read - R :
We do not have a value in left / right peek , but left / right stream is not finished .
Attempt to read from left / right stream .
fill - L / fill - R :
We have a value in left / right peek , and the other stream is finished .
We can emit this value as - is . Eventually the entire leftover stream will be emitted .
done :
Both streams are finished , and both peeks are finished .
Here is a picture showing the sixteen possibilities , and which action they relate to .
left - peek
Just | Nothing
left - state | right - state | | right - peek | |
| | Just | Nothing | Just | Nothing |
------------|-------------|---------|---------------|-------------|---------------|
| Just | merge | read - R | read - L | read - L |
Just | ------------|---------|---------------|-------------|---------------|
| Nothing | merge | fill - L | read - L | read - L |
------------|-------------|---------|---------------|-------------|---------------|
| Just | merge | read - R | fill - R | read - R |
Nothing | ------------|---------|---------------|-------------|---------------|
| Nothing | merge | fill - L | fill - R | done |
------------|-------------|---------|---------------|-------------|---------------|
merge
(Stream left-state) (Stream right-state)
State:
( Maybe left-state, Maybe right-state
, Maybe left-peek, Maybe right-peek )
If we have a *-peek value, we will not pull from *-state.
If *-state is Nothing, the left stream is finished, but a final value may be in *-peek.
Initial state is (Just *-state), with empty peeks.
While there are sixteen possibilities of the Maybes, there are only six real state types:
merge:
When we have a value in both peeks, we can compare them together and emit something.
The peek that is used is thrown away.
read-L / read-R:
We do not have a value in left/right peek, but left/right stream is not finished.
Attempt to read from left/right stream.
fill-L / fill-R:
We have a value in left/right peek, and the other stream is finished.
We can emit this value as-is. Eventually the entire leftover stream will be emitted.
done:
Both streams are finished, and both peeks are finished.
Here is a picture showing the sixteen possibilities, and which action they relate to.
left-peek
Just | Nothing
left-state | right-state | | right-peek | |
| | Just | Nothing | Just | Nothing |
------------|-------------|---------|---------------|-------------|---------------|
| Just | merge | read-R | read-L | read-L |
Just | ------------|---------|---------------|-------------|---------------|
| Nothing | merge | fill-L | read-L | read-L |
------------|-------------|---------|---------------|-------------|---------------|
| Just | merge | read-R | fill-R | read-R |
Nothing | ------------|---------|---------------|-------------|---------------|
| Nothing | merge | fill-L | fill-R | done |
------------|-------------|---------|---------------|-------------|---------------|
-}
merge :: Monad m => (a -> a -> MergePullFrom a) -> VS.Stream m a -> VS.Stream m a -> VS.Stream m a
merge f (VS.Stream l'step l'state) (VS.Stream r'step r'state)
= VS.Stream go'step (MergeState (Just l'state) (Just r'state) Nothing Nothing)
where
-- I originally had a somewhat neater version that matched against multiple things like:
--
> ( MergeResult ls rs ( Just lv ) ( Just rv ) ) = doMerge lv rv
> ... = rest ...
--
-- however, pattern match desugaring ends up producing something like
--
-- > fail = rest
> = case ... of
-- > Just lv -> case ... of
-- > Just rv -> doMerge
-- > Nothing -> fail
-- > Nothing -> fail
--
-- and since 'fail' is used multiple times, it won't be inlined into the result.
This does n't play nicely with SpecConstr , I guess because it does n't look through bindings ,
-- so the Maybes never get removed.
--
-- The lesson here is not to use compound pattern matching in stream transformers.
--
go'step m
= case peekL m of
Just lv
-> case peekR m of
Just rv
-> doMerge m lv rv
Nothing
-> case stateR m of
Just rs
-> doReadR m rs
Nothing
-> doFillL m lv
Nothing
-> case stateL m of
Just ls
-> doReadL m ls
Nothing
-> case peekR m of
Just rv
-> doFillR m rv
Nothing
-> case stateR m of
Just rs
-> doReadR m rs
Nothing
-> return $ VS.Done
# INLINE go'step #
doMerge m lv rv
= case f lv rv of
MergePullLeft a -> return $ VS.Yield a m { peekL = Nothing }
MergePullRight a -> return $ VS.Yield a m { peekR = Nothing }
MergePullBoth a -> return $ VS.Yield a m { peekL = Nothing, peekR = Nothing }
# INLINE doMerge #
doReadL m ls
= do step <- l'step ls
case step of
VS.Yield lv ls' -> return $ VS.Skip m { stateL = Just ls', peekL = Just lv }
VS.Skip ls' -> return $ VS.Skip m { stateL = Just ls', peekL = Nothing }
VS.Done -> return $ VS.Skip m { stateL = Nothing, peekL = Nothing }
# INLINE doReadL #
doReadR m rs
= do step <- r'step rs
case step of
VS.Yield rv rs' -> return $ VS.Skip m { stateR = Just rs', peekR = Just rv }
VS.Skip rs' -> return $ VS.Skip m { stateR = Just rs', peekR = Nothing }
VS.Done -> return $ VS.Skip m { stateR = Nothing, peekR = Nothing }
# INLINE doReadR #
doFillL m lv
= return $ VS.Yield lv m { peekL = Nothing }
# INLINE doFillL #
doFillR m rv
= return $ VS.Yield rv m { peekR = Nothing }
# INLINE doFillR #
# INLINE merge #
data MergeState l r a
= MergeState
{ stateL :: Maybe l
, stateR :: Maybe r
, peekL :: Maybe a
, peekR :: Maybe a }
| null | https://raw.githubusercontent.com/icicle-lang/x-ambiata/532f8473084b24fb9d8c90fda7fee9858b9fbe30/x-vector/src/X/Data/Vector/Stream/Merge.hs | haskell | | Which stream to pull from during a merge, and a single value to emit.
The value to emit will often be the read value - if pulling from left, emit the left, etc.
Left-biased: when elements from both inputs are equal, pull from left.
This is really just here as a specification.
Note that this is effectively pushing back onto the top of 'ys'.
This 'peek' is important for the implementation over streams, below.
This is a fair bit more complicated than the mergeList implementation above, but the idea is the same.
The streams themselves have no way of peeking at the head of the stream or putting a value back,
so we need to implement that by hand.
This is not particularly hard, but it does explode the number of possible states.
----------|-------------|---------|---------------|-------------|---------------|
----------|---------|---------------|-------------|---------------|
----------|-------------|---------|---------------|-------------|---------------|
----------|---------|---------------|-------------|---------------|
----------|-------------|---------|---------------|-------------|---------------|
----------|-------------|---------|---------------|-------------|---------------|
----------|---------|---------------|-------------|---------------|
----------|-------------|---------|---------------|-------------|---------------|
----------|---------|---------------|-------------|---------------|
----------|-------------|---------|---------------|-------------|---------------|
I originally had a somewhat neater version that matched against multiple things like:
however, pattern match desugaring ends up producing something like
> fail = rest
> Just lv -> case ... of
> Just rv -> doMerge
> Nothing -> fail
> Nothing -> fail
and since 'fail' is used multiple times, it won't be inlined into the result.
so the Maybes never get removed.
The lesson here is not to use compound pattern matching in stream transformers.
| # LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PatternGuards #
# LANGUAGE LambdaCase #
module X.Data.Vector.Stream.Merge
( MergePullFrom(..)
, mergePullOrd
, mergePullJoin
, mergePullJoinBy
, mergeList
, merge
) where
import qualified Data.Vector.Fusion.Stream.Monadic as VS
import P
data MergePullFrom a
= MergePullLeft a
| MergePullRight a
| MergePullBoth a
| Merge two ascending streams with given instance .
mergePullOrd :: Ord b => (a -> b) -> a -> a -> MergePullFrom a
mergePullOrd f
= mergePullJoin (\l _ -> MergePullLeft l) f
# INLINE mergePullOrd #
| Merge two ascending streams , using given merge function when two elements are equal
mergePullJoin :: Ord b => (a -> a -> MergePullFrom a) -> (a -> b) -> a -> a -> MergePullFrom a
mergePullJoin f c
= mergePullJoinBy f (compare `on` c)
# INLINE mergePullJoin #
| Merge two ascending streams with Ordering function , use given merge function when two elements are equal
mergePullJoinBy :: (a -> a -> MergePullFrom a) -> (a -> a -> Ordering) -> a -> a -> MergePullFrom a
mergePullJoinBy f c l r
= case c l r of
LT -> MergePullLeft l
EQ -> f l r
GT -> MergePullRight r
# INLINE mergePullJoinBy #
| Merge two lists together .
If they are already sorted and unique , the result will be sorted and unique union of the two .
mergeList :: (a -> a -> MergePullFrom a) -> [a] -> [a] -> [a]
mergeList f l r = go l r
where
go xs [] = xs
go [] ys = ys
go (x:xs) (y:ys)
= case f x y of
MergePullLeft x' -> x' : go xs (y:ys)
MergePullBoth v' -> v' : go xs ys
MergePullRight y' -> y' : go (x:xs) ys
| Merge two streams together .
If they are already sorted and unique , the result will be sorted and unique union of the two .
merge
( Stream left - state ) ( Stream right - state )
State :
( Maybe left - state , Maybe right - state
, Maybe left - peek , Maybe right - peek )
If we have a * -peek value , we will not pull from * -state .
If * -state is Nothing , the left stream is finished , but a final value may be in * -peek .
Initial state is ( Just * -state ) , with empty peeks .
While there are sixteen possibilities of the Maybes , there are only six real state types :
merge :
When we have a value in both peeks , we can compare them together and emit something .
The peek that is used is thrown away .
read - L / read - R :
We do not have a value in left / right peek , but left / right stream is not finished .
Attempt to read from left / right stream .
fill - L / fill - R :
We have a value in left / right peek , and the other stream is finished .
We can emit this value as - is . Eventually the entire leftover stream will be emitted .
done :
Both streams are finished , and both peeks are finished .
Here is a picture showing the sixteen possibilities , and which action they relate to .
left - peek
Just | Nothing
left - state | right - state | | right - peek | |
| | Just | Nothing | Just | Nothing |
| Just | merge | read - R | read - L | read - L |
| Nothing | merge | fill - L | read - L | read - L |
| Just | merge | read - R | fill - R | read - R |
| Nothing | merge | fill - L | fill - R | done |
merge
(Stream left-state) (Stream right-state)
State:
( Maybe left-state, Maybe right-state
, Maybe left-peek, Maybe right-peek )
If we have a *-peek value, we will not pull from *-state.
If *-state is Nothing, the left stream is finished, but a final value may be in *-peek.
Initial state is (Just *-state), with empty peeks.
While there are sixteen possibilities of the Maybes, there are only six real state types:
merge:
When we have a value in both peeks, we can compare them together and emit something.
The peek that is used is thrown away.
read-L / read-R:
We do not have a value in left/right peek, but left/right stream is not finished.
Attempt to read from left/right stream.
fill-L / fill-R:
We have a value in left/right peek, and the other stream is finished.
We can emit this value as-is. Eventually the entire leftover stream will be emitted.
done:
Both streams are finished, and both peeks are finished.
Here is a picture showing the sixteen possibilities, and which action they relate to.
left-peek
Just | Nothing
left-state | right-state | | right-peek | |
| | Just | Nothing | Just | Nothing |
| Just | merge | read-R | read-L | read-L |
| Nothing | merge | fill-L | read-L | read-L |
| Just | merge | read-R | fill-R | read-R |
| Nothing | merge | fill-L | fill-R | done |
-}
merge :: Monad m => (a -> a -> MergePullFrom a) -> VS.Stream m a -> VS.Stream m a -> VS.Stream m a
merge f (VS.Stream l'step l'state) (VS.Stream r'step r'state)
= VS.Stream go'step (MergeState (Just l'state) (Just r'state) Nothing Nothing)
where
> ( MergeResult ls rs ( Just lv ) ( Just rv ) ) = doMerge lv rv
> ... = rest ...
> = case ... of
This does n't play nicely with SpecConstr , I guess because it does n't look through bindings ,
go'step m
= case peekL m of
Just lv
-> case peekR m of
Just rv
-> doMerge m lv rv
Nothing
-> case stateR m of
Just rs
-> doReadR m rs
Nothing
-> doFillL m lv
Nothing
-> case stateL m of
Just ls
-> doReadL m ls
Nothing
-> case peekR m of
Just rv
-> doFillR m rv
Nothing
-> case stateR m of
Just rs
-> doReadR m rs
Nothing
-> return $ VS.Done
# INLINE go'step #
doMerge m lv rv
= case f lv rv of
MergePullLeft a -> return $ VS.Yield a m { peekL = Nothing }
MergePullRight a -> return $ VS.Yield a m { peekR = Nothing }
MergePullBoth a -> return $ VS.Yield a m { peekL = Nothing, peekR = Nothing }
# INLINE doMerge #
doReadL m ls
= do step <- l'step ls
case step of
VS.Yield lv ls' -> return $ VS.Skip m { stateL = Just ls', peekL = Just lv }
VS.Skip ls' -> return $ VS.Skip m { stateL = Just ls', peekL = Nothing }
VS.Done -> return $ VS.Skip m { stateL = Nothing, peekL = Nothing }
# INLINE doReadL #
doReadR m rs
= do step <- r'step rs
case step of
VS.Yield rv rs' -> return $ VS.Skip m { stateR = Just rs', peekR = Just rv }
VS.Skip rs' -> return $ VS.Skip m { stateR = Just rs', peekR = Nothing }
VS.Done -> return $ VS.Skip m { stateR = Nothing, peekR = Nothing }
# INLINE doReadR #
doFillL m lv
= return $ VS.Yield lv m { peekL = Nothing }
# INLINE doFillL #
doFillR m rv
= return $ VS.Yield rv m { peekR = Nothing }
# INLINE doFillR #
# INLINE merge #
data MergeState l r a
= MergeState
{ stateL :: Maybe l
, stateR :: Maybe r
, peekL :: Maybe a
, peekR :: Maybe a }
|
bfa8ff73307d9e18455374ebe6a0b731f6702c558d6d029407f5f5a995f9a083 | NorfairKing/really-safe-money | Account.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
-- === Importing this module
--
-- This module is designed to be imported as follows:
--
-- @
-- import Money.Account (Account)
-- import qualified Money.Account as Account
-- @
--
Or , if you have an @Account@ type already , maybe in a
--
-- @
-- import qualified Money.Account as Money (Account)
-- import qualified Money.Account as Account
-- @
module Money.Account
( Account (..),
fromMinimalQuantisations,
toMinimalQuantisations,
fromDouble,
toDouble,
fromRational,
toRational,
zero,
add,
sum,
subtract,
abs,
multiply,
distribute,
AccountDistribution (..),
fraction,
)
where
import Control.DeepSeq
import Control.Monad
import Data.Foldable hiding (sum)
import Data.Function
import Data.Int
import Data.Monoid
import Data.Ratio
import Data.Validity
import Data.Word
import GHC.Generics (Generic)
import Money.Amount (Amount (..))
import qualified Money.Amount as Amount
import Numeric.Natural
import Prelude hiding (abs, fromRational, subtract, sum, toRational)
import qualified Prelude
-- | An account of money. Like 'Amount' but can also be negative.
data Account
= Positive !Amount
| Negative !Amount
deriving (Show, Read, Generic)
instance Validity Account
instance NFData Account
instance Eq Account where
(==) = (==) `on` toMinimalQuantisations
instance Ord Account where
compare = compare `on` toMinimalQuantisations
-- | Turn a number of minimal quantisations into an account.
--
This will fail if the integer is not in the range @[- 2 ^ 64 .. 2 ^ 64]@
fromMinimalQuantisations :: Integer -> Maybe Account
fromMinimalQuantisations i =
let maxBoundI :: Integer
maxBoundI = (toInteger :: Word64 -> Integer) (maxBound :: Word64)
a :: Integer
a = (Prelude.abs :: Integer -> Integer) i
in if a > maxBoundI
then Nothing
else
let w :: Word64
w = (fromIntegral :: Integer -> Word64) a
amount :: Amount
amount = Amount.fromMinimalQuantisations w
in Just $
if i >= 0
then Positive amount
else Negative amount
-- | Turn an amount into a number of minimal quantisations.
--
-- === API Note
--
We return ' Integer ' because the result does not fit into a ' Word64 '
toMinimalQuantisations :: Account -> Integer
toMinimalQuantisations account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ (fromIntegral :: Word64 -> Integer) $ Amount.toMinimalQuantisations (abs account)
-- | Turn an amount of money into a 'Double'.
--
WARNING : the result will be infinite or NaN if the quantisation factor is @0@
toDouble :: Word32 -> Account -> Double
toDouble quantisationFactor account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ Amount.toDouble quantisationFactor (abs account)
-- | Turn a 'Double' into an amount of money.
--
-- This function will fail if the 'Double':
--
* is @NaN@
-- * is infinite
-- * does not represent an integral amount of minimal quantisations
--
-- WARNING: This function _does not_ roundtrip with toDouble because 'Account' contains more precision than 'Double' does.
fromDouble :: Word32 -> Double -> Maybe Account
fromDouble quantisationFactor d =
let d' = Prelude.abs d
f = if d >= 0 then Positive else Negative
in f <$> Amount.fromDouble quantisationFactor d'
-- | Turn an amount of money into a 'Rational'.
--
WARNING : that the result will be @Account : % 0@ if the quantisation factor is @0@.
toRational :: Word32 -> Account -> Rational
toRational quantisationFactor account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ Amount.toRational quantisationFactor (abs account)
-- | Turn a 'Rational' into an amount of money.
--
-- This function will fail if the 'Rational':
--
-- * Is NaN (0 :% 0)
-- * Is infinite (1 :% 0) or (-1 :% 0)
* Is non - normalised ( 5 : % 5 )
-- * Does represent an integer number of minimal quantisations.
fromRational :: Word32 -> Rational -> Maybe Account
fromRational quantisationFactor r =
let r' = Prelude.abs r
f = if r >= 0 then Positive else Negative
in f <$> Amount.fromRational quantisationFactor r'
-- | No money in the account
zero :: Account
zero = Positive Amount.zero
| Add two accounts of money .
--
-- This operation may fail when overflow over either bound occurs.
--
WARNING : This function can be used to accidentally add up two accounts of different currencies .
add :: Account -> Account -> Maybe Account
add (Positive a1) (Positive a2) = Positive <$> Amount.add a1 a2
add (Negative a1) (Negative a2) = Negative <$> Amount.add a1 a2
add a1 a2 =
let i1 :: Integer
i1 = toMinimalQuantisations a1
i2 :: Integer
i2 = toMinimalQuantisations a2
r :: Integer
r = i1 + i2
in fromMinimalQuantisations r
-- | Add a number of accounts of money together.
--
-- See 'add'
--
-- Note that this function will fail in the same ways that iteratively 'add' will fail.
sum :: forall f. Foldable f => f Account -> Maybe Account
sum = foldM add zero
| Add two accounts of money .
--
-- This operation may fail when overflow over either bound occurs.
--
WARNING : This function can be used to accidentally subtract two accounts of different currencies .
subtract :: Account -> Account -> Maybe Account
subtract (Positive a1) (Negative a2) = Positive <$> Amount.add a1 a2
subtract (Negative a1) (Positive a2) = Negative <$> Amount.add a1 a2
subtract a1 a2 =
let i1 :: Integer
i1 = toMinimalQuantisations a1
i2 :: Integer
i2 = toMinimalQuantisations a2
r :: Integer
r = i1 - i2
in fromMinimalQuantisations r
-- | The absolute value of the account
--
-- The 'Account' type has a symmetrical range so this function will always return a correct result.
--
-- Note that this returns an 'Amount' and not an 'Account' because the result is always positive.
abs :: Account -> Amount
abs = \case
Negative a -> a
Positive a -> a
-- | Multiply an account by an integer scalar
--
-- This operation will fail when overflow over either bound occurs.
multiply :: Int32 -> Account -> Maybe Account
multiply factor account =
let af = (fromIntegral :: Int32 -> Word32) ((Prelude.abs :: Int32 -> Int32) factor)
f = case (compare factor 0, compare account zero) of
(EQ, _) -> const zero
(_, EQ) -> const zero
(GT, GT) -> Positive
(GT, LT) -> Negative
(LT, GT) -> Negative
(LT, LT) -> Positive
in f <$> Amount.multiply af (abs account)
-- | Distribute an amount of money into chunks that are as evenly distributed as possible.
distribute :: Account -> Word16 -> AccountDistribution
distribute a f =
let aa = abs a
af = (fromIntegral :: Word16 -> Word32) (Prelude.abs f)
func =
if a >= zero
then Positive
else Negative
in case Amount.distribute aa af of
Amount.DistributedIntoZeroChunks -> DistributedIntoZeroChunks
Amount.DistributedZeroAmount -> DistributedZeroAccount
Amount.DistributedIntoEqualChunks numberOfChunks chunk ->
DistributedIntoEqualChunks
numberOfChunks
(func chunk)
Amount.DistributedIntoUnequalChunks numberOfLargerChunks largerChunk numberOfSmallerChunks smallerChunk ->
DistributedIntoUnequalChunks numberOfLargerChunks (func largerChunk) numberOfSmallerChunks (func smallerChunk)
-- | The result of 'distribute'
data AccountDistribution
| The second argument was zero .
DistributedIntoZeroChunks
| The first argument was a zero amount .
DistributedZeroAccount
| -- | Distributed into this many equal chunks of this amount
DistributedIntoEqualChunks !Word32 !Account
| Distributed into unequal chunks , this many of the first ( larger , in absolute value ) amount , and this many of the second ( slightly smaller ) amount .
DistributedIntoUnequalChunks !Word32 !Account !Word32 !Account
deriving (Show, Read, Eq, Generic)
instance Validity AccountDistribution where
validate ad =
mconcat
[ genericValidate ad,
case ad of
DistributedIntoUnequalChunks _ a1 _ a2 ->
declare "The larger chunks are larger in absolute value" $
abs a1 > abs a2
_ -> valid
]
instance NFData AccountDistribution
-- | Fractional multiplication
fraction ::
Account ->
Rational ->
(Account, Rational)
fraction account f =
let af = (realToFrac :: Rational -> Ratio Natural) ((Prelude.abs :: Rational -> Rational) f)
aa = abs account
(amount, actualFraction) = Amount.fraction aa af
func :: Amount -> Rational -> (Account, Rational)
func a r = case (compare account zero, compare f 0) of
(EQ, _) -> (zero, r)
(_, EQ) -> (zero, 0)
(GT, GT) -> (Positive a, r)
(GT, LT) -> (Negative a, -r)
(LT, GT) -> (Negative a, r)
(LT, LT) -> (Positive a, -r)
in func amount ((realToFrac :: Ratio Natural -> Rational) actualFraction)
| null | https://raw.githubusercontent.com/NorfairKing/really-safe-money/6a4ecbdd47d094e51c8e3874cda57d900a03ff31/really-safe-money/src/Money/Account.hs | haskell | === Importing this module
This module is designed to be imported as follows:
@
import Money.Account (Account)
import qualified Money.Account as Account
@
@
import qualified Money.Account as Money (Account)
import qualified Money.Account as Account
@
| An account of money. Like 'Amount' but can also be negative.
| Turn a number of minimal quantisations into an account.
| Turn an amount into a number of minimal quantisations.
=== API Note
| Turn an amount of money into a 'Double'.
| Turn a 'Double' into an amount of money.
This function will fail if the 'Double':
* is infinite
* does not represent an integral amount of minimal quantisations
WARNING: This function _does not_ roundtrip with toDouble because 'Account' contains more precision than 'Double' does.
| Turn an amount of money into a 'Rational'.
| Turn a 'Rational' into an amount of money.
This function will fail if the 'Rational':
* Is NaN (0 :% 0)
* Is infinite (1 :% 0) or (-1 :% 0)
* Does represent an integer number of minimal quantisations.
| No money in the account
This operation may fail when overflow over either bound occurs.
| Add a number of accounts of money together.
See 'add'
Note that this function will fail in the same ways that iteratively 'add' will fail.
This operation may fail when overflow over either bound occurs.
| The absolute value of the account
The 'Account' type has a symmetrical range so this function will always return a correct result.
Note that this returns an 'Amount' and not an 'Account' because the result is always positive.
| Multiply an account by an integer scalar
This operation will fail when overflow over either bound occurs.
| Distribute an amount of money into chunks that are as evenly distributed as possible.
| The result of 'distribute'
| Distributed into this many equal chunks of this amount
| Fractional multiplication | # LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
Or , if you have an @Account@ type already , maybe in a
module Money.Account
( Account (..),
fromMinimalQuantisations,
toMinimalQuantisations,
fromDouble,
toDouble,
fromRational,
toRational,
zero,
add,
sum,
subtract,
abs,
multiply,
distribute,
AccountDistribution (..),
fraction,
)
where
import Control.DeepSeq
import Control.Monad
import Data.Foldable hiding (sum)
import Data.Function
import Data.Int
import Data.Monoid
import Data.Ratio
import Data.Validity
import Data.Word
import GHC.Generics (Generic)
import Money.Amount (Amount (..))
import qualified Money.Amount as Amount
import Numeric.Natural
import Prelude hiding (abs, fromRational, subtract, sum, toRational)
import qualified Prelude
data Account
= Positive !Amount
| Negative !Amount
deriving (Show, Read, Generic)
instance Validity Account
instance NFData Account
instance Eq Account where
(==) = (==) `on` toMinimalQuantisations
instance Ord Account where
compare = compare `on` toMinimalQuantisations
This will fail if the integer is not in the range @[- 2 ^ 64 .. 2 ^ 64]@
fromMinimalQuantisations :: Integer -> Maybe Account
fromMinimalQuantisations i =
let maxBoundI :: Integer
maxBoundI = (toInteger :: Word64 -> Integer) (maxBound :: Word64)
a :: Integer
a = (Prelude.abs :: Integer -> Integer) i
in if a > maxBoundI
then Nothing
else
let w :: Word64
w = (fromIntegral :: Integer -> Word64) a
amount :: Amount
amount = Amount.fromMinimalQuantisations w
in Just $
if i >= 0
then Positive amount
else Negative amount
We return ' Integer ' because the result does not fit into a ' Word64 '
toMinimalQuantisations :: Account -> Integer
toMinimalQuantisations account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ (fromIntegral :: Word64 -> Integer) $ Amount.toMinimalQuantisations (abs account)
WARNING : the result will be infinite or NaN if the quantisation factor is @0@
toDouble :: Word32 -> Account -> Double
toDouble quantisationFactor account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ Amount.toDouble quantisationFactor (abs account)
* is @NaN@
fromDouble :: Word32 -> Double -> Maybe Account
fromDouble quantisationFactor d =
let d' = Prelude.abs d
f = if d >= 0 then Positive else Negative
in f <$> Amount.fromDouble quantisationFactor d'
WARNING : that the result will be @Account : % 0@ if the quantisation factor is @0@.
toRational :: Word32 -> Account -> Rational
toRational quantisationFactor account =
let f = case account of
Positive _ -> id
Negative _ -> negate
in f $ Amount.toRational quantisationFactor (abs account)
* Is non - normalised ( 5 : % 5 )
fromRational :: Word32 -> Rational -> Maybe Account
fromRational quantisationFactor r =
let r' = Prelude.abs r
f = if r >= 0 then Positive else Negative
in f <$> Amount.fromRational quantisationFactor r'
zero :: Account
zero = Positive Amount.zero
| Add two accounts of money .
WARNING : This function can be used to accidentally add up two accounts of different currencies .
add :: Account -> Account -> Maybe Account
add (Positive a1) (Positive a2) = Positive <$> Amount.add a1 a2
add (Negative a1) (Negative a2) = Negative <$> Amount.add a1 a2
add a1 a2 =
let i1 :: Integer
i1 = toMinimalQuantisations a1
i2 :: Integer
i2 = toMinimalQuantisations a2
r :: Integer
r = i1 + i2
in fromMinimalQuantisations r
sum :: forall f. Foldable f => f Account -> Maybe Account
sum = foldM add zero
| Add two accounts of money .
WARNING : This function can be used to accidentally subtract two accounts of different currencies .
subtract :: Account -> Account -> Maybe Account
subtract (Positive a1) (Negative a2) = Positive <$> Amount.add a1 a2
subtract (Negative a1) (Positive a2) = Negative <$> Amount.add a1 a2
subtract a1 a2 =
let i1 :: Integer
i1 = toMinimalQuantisations a1
i2 :: Integer
i2 = toMinimalQuantisations a2
r :: Integer
r = i1 - i2
in fromMinimalQuantisations r
abs :: Account -> Amount
abs = \case
Negative a -> a
Positive a -> a
multiply :: Int32 -> Account -> Maybe Account
multiply factor account =
let af = (fromIntegral :: Int32 -> Word32) ((Prelude.abs :: Int32 -> Int32) factor)
f = case (compare factor 0, compare account zero) of
(EQ, _) -> const zero
(_, EQ) -> const zero
(GT, GT) -> Positive
(GT, LT) -> Negative
(LT, GT) -> Negative
(LT, LT) -> Positive
in f <$> Amount.multiply af (abs account)
distribute :: Account -> Word16 -> AccountDistribution
distribute a f =
let aa = abs a
af = (fromIntegral :: Word16 -> Word32) (Prelude.abs f)
func =
if a >= zero
then Positive
else Negative
in case Amount.distribute aa af of
Amount.DistributedIntoZeroChunks -> DistributedIntoZeroChunks
Amount.DistributedZeroAmount -> DistributedZeroAccount
Amount.DistributedIntoEqualChunks numberOfChunks chunk ->
DistributedIntoEqualChunks
numberOfChunks
(func chunk)
Amount.DistributedIntoUnequalChunks numberOfLargerChunks largerChunk numberOfSmallerChunks smallerChunk ->
DistributedIntoUnequalChunks numberOfLargerChunks (func largerChunk) numberOfSmallerChunks (func smallerChunk)
data AccountDistribution
| The second argument was zero .
DistributedIntoZeroChunks
| The first argument was a zero amount .
DistributedZeroAccount
DistributedIntoEqualChunks !Word32 !Account
| Distributed into unequal chunks , this many of the first ( larger , in absolute value ) amount , and this many of the second ( slightly smaller ) amount .
DistributedIntoUnequalChunks !Word32 !Account !Word32 !Account
deriving (Show, Read, Eq, Generic)
instance Validity AccountDistribution where
validate ad =
mconcat
[ genericValidate ad,
case ad of
DistributedIntoUnequalChunks _ a1 _ a2 ->
declare "The larger chunks are larger in absolute value" $
abs a1 > abs a2
_ -> valid
]
instance NFData AccountDistribution
fraction ::
Account ->
Rational ->
(Account, Rational)
fraction account f =
let af = (realToFrac :: Rational -> Ratio Natural) ((Prelude.abs :: Rational -> Rational) f)
aa = abs account
(amount, actualFraction) = Amount.fraction aa af
func :: Amount -> Rational -> (Account, Rational)
func a r = case (compare account zero, compare f 0) of
(EQ, _) -> (zero, r)
(_, EQ) -> (zero, 0)
(GT, GT) -> (Positive a, r)
(GT, LT) -> (Negative a, -r)
(LT, GT) -> (Negative a, r)
(LT, LT) -> (Positive a, -r)
in func amount ((realToFrac :: Ratio Natural -> Rational) actualFraction)
|
9385db368b7a5450ac4cfaca15aee3aeaa96ad94127bb30357f7346e678af2f4 | bjpop/blip | Compile.hs | # LANGUAGE TypeFamilies , TypeSynonymInstances , FlexibleInstances ,
PatternGuards , RecordWildCards #
PatternGuards, RecordWildCards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Blip.Compiler.Compile
Copyright : ( c ) 2012 , 2013 , 2014
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : ghc
--
Compilation of Python 3 source code into bytecode .
--
-- Basic algorithm:
--
1 ) Parse the source code into an AST .
2 ) Compute the scope of all variables in the module
( one pass over the AST ) .
3 ) Compile the AST for the whole module into a ( possibly nested )
code object ( one pass over the AST ) .
4 ) Write the code object to a .pyc file .
--
-- The following Python constructs are compiled into code objects:
-- - The top-level of the module.
-- - Function definitions (def and lambda).
-- - Class definitions.
- .
--
-- The statements and expressions in each of the above constructs are
-- recursively compiled into bytecode instructions. Initially, the actual
-- addresses of jump instruction targets are not known. Instead the jump
-- targets are just labels. At the end of the compilation of each
-- construct the labelled instructions are converted into jumps to
actual addresses ( one pass over the bytecode stream ) .
Also the maximum stack size of each code object is computed ( one pass
-- over the bytecode stream).
--
-- We currently make no attempt to optimise the generated code.
--
Bytecode is generated directly from the AST , there is no intermediate
-- language, and no explict control-flow graph.
--
-----------------------------------------------------------------------------
module Blip.Compiler.Compile
(compileFile, compileReplInput, writePycFile)
where
import Prelude hiding (mapM)
import Blip.Compiler.Desugar (desugarComprehension, desugarWith, resultName)
import Blip.Compiler.Utils
( isPureExpr, isPyObjectExpr, mkAssignVar, mkList
, mkVar, mkMethodCall, mkStmtExpr, mkSet, mkDict, mkAssign
, mkSubscript, mkReturn, mkYield, spanToScopeIdentifier )
import Blip.Compiler.StackDepth (maxStackDepth)
import Blip.Compiler.State
( setBlockState, getBlockState, initBlockState, initState
, emitCodeNoArg, emitCodeArg, compileConstantEmit
, compileConstant, getFileName, newLabel, labelNextInstruction
, getObjectName, setObjectName
, getNestedScope, ifDump, getLocalScope
, indexedVarSetKeys, emitReadVar, emitWriteVar, emitDeleteVar
, lookupNameVar, lookupClosureVar, setFlag
, peekFrameBlock, withFrameBlock, setFastLocals, setArgCount
, setLineNumber, setFirstLineNumber )
import Blip.Compiler.Assemble (assemble)
import Blip.Compiler.Monad (Compile (..), runCompileMonad)
import Blip.Compiler.Types
( Identifier, CompileConfig (..)
, CompileState (..), BlockState (..)
, AnnotatedCode (..), Dumpable (..), IndexedVarSet, VarInfo (..)
, FrameBlockInfo (..), Context (..), ParameterTypes (..), LocalScope (..) )
import Blip.Compiler.Scope (topScope, renderScope)
import Blip.Marshal as Blip
( writePyc, PycFile (..), PyObject (..), co_generator )
import Blip.Bytecode (Opcode (..), encode)
import Language.Python.Version3.Parser (parseModule, parseStmt)
import Language.Python.Common.AST as AST
( Annotated (..), ModuleSpan, Module (..), StatementSpan, Statement (..)
, ExprSpan, Expr (..), Ident (..), ArgumentSpan, Argument (..)
, OpSpan, Op (..), Handler (..), HandlerSpan, ExceptClause (..)
, ExceptClauseSpan, ImportItem (..), ImportItemSpan, ImportRelative (..)
, ImportRelativeSpan, FromItems (..), FromItemsSpan, FromItem (..)
, FromItemSpan, DecoratorSpan, Decorator (..), ComprehensionSpan
, Comprehension (..), SliceSpan, Slice (..), AssignOpSpan, AssignOp (..)
, ComprehensionExpr (..), ComprehensionExprSpan
, ParameterSpan, Parameter (..), RaiseExpr (..), RaiseExprSpan
, DictKeyDatumList(DictMappingPair), YieldArg (..), YieldArgSpan )
import Language.Python.Common (prettyText)
import Language.Python.Common.StringEscape (unescapeString)
import Language.Python.Common.SrcLocation (SrcSpan (..))
import System.FilePath ((<.>), takeBaseName)
XXX Commented out to avoid bug in unix package when building on OS X ,
-- The unix package is depended on by the directory package.
import System . Directory ( getModificationTime , canonicalizePath )
import System . Time ( ClockTime ( .. ) )
import System.IO (openFile, IOMode(..), hClose, hFileSize, hGetContents)
import Data.Word (Word32, Word16)
import Data.Int (Int32)
import Data.Traversable as Traversable (mapM)
import qualified Data.ByteString.Lazy as B (pack)
import Data.String (fromString)
import Data.List (intersperse)
import Control.Monad (unless, forM_, when, replicateM_, foldM)
import Control.Monad.Trans (liftIO)
import Data.Bits ((.|.), shiftL)
-- Compile the input from the REPL command line to an object.
compileReplInput :: CompileConfig -> String -> IO PyObject
compileReplInput config replString = do
stmts <- parseStmtAndCheckErrors replString
let printWrapped = wrapWithPrint stmts
-- pretend that the statements are a module on their own to calculate the variable scope
(moduleLocals, nestedScope) <- topScope $ Module printWrapped
let state = initState ModuleContext moduleLocals
nestedScope config ""
compileReplStmts state printWrapped
Support for REPL printing of expressions .
-- If the statement entered at the REPL is an expression, then
-- we try to print it out.
-- We transform an expression E into:
-- _ = E
-- print(_)
--
-- XXX if the result of E is None then we should not print it out,
-- to be consistent with CPython.
-- Want something like this:
-- try:
-- _ = E
-- catch Exception as e:
-- stackTrace e
-- elif _ is not None:
-- print(e)
wrapWithPrint :: [StatementSpan] -> [StatementSpan]
wrapWithPrint [StmtExpr {..}] =
[assignStmt, printStmt]
where
assignStmt = Assign { assign_to = [underscoreVar], assign_expr = stmt_expr, stmt_annot = SpanEmpty }
underscoreIdent = Ident { ident_string = "_", ident_annot = SpanEmpty }
underscoreVar = Var { var_ident = underscoreIdent, expr_annot = SpanEmpty }
printIdent = Ident { ident_string = "print", ident_annot = SpanEmpty }
printVar = Var { var_ident = printIdent, expr_annot = SpanEmpty }
printArg = ArgExpr { arg_expr = underscoreVar, arg_annot = SpanEmpty }
printExpr = Call { call_fun = printVar, call_args = [printArg], expr_annot = SpanEmpty }
printStmt = StmtExpr { stmt_expr = printExpr, stmt_annot = SpanEmpty }
wrapWithPrint other = other
-- Compile Python source code to bytecode, returing a representation
-- of a .pyc file contents.
compileFile :: CompileConfig -- Configuration options
-> FilePath -- The file path of the input Python source
-> IO PycFile
compileFile config path = do
pyHandle <- openFile path ReadMode
sizeInBytes <- hFileSize pyHandle
fileContents <- hGetContents pyHandle
-- modifiedTime <- getModificationTime path
let modSeconds = case modifiedTime of secs _ picoSecs - > secs
let modSeconds = (0 :: Integer)
pyModule <- parseFileAndCheckErrors fileContents path
(moduleLocals, nestedScope) <- topScope pyModule
-- canonicalPath <- canonicalizePath path
canonicalPath <- return path
let state = initState ModuleContext moduleLocals
nestedScope config canonicalPath
pyc <- compileModule state (fromIntegral modSeconds)
(fromIntegral sizeInBytes) pyModule
return pyc
writePycFile :: PycFile -> FilePath -> IO ()
writePycFile pyc path = do
let pycFilePath = takeBaseName path <.> ".pyc"
pycHandle <- openFile pycFilePath WriteMode
writePyc pycHandle pyc
hClose pycHandle
Parse the Python source from a statement into an AST , check for any syntax errors .
parseStmtAndCheckErrors :: String -> IO [StatementSpan]
parseStmtAndCheckErrors stmtString =
case parseStmt stmtString "<stdin>" of
Left e -> error $ "parse error: " ++ prettyText e
Right (stmts, _comments) -> return stmts
-- Parse the Python source from a File into an AST, check for any syntax errors.
parseFileAndCheckErrors :: String -> FilePath -> IO ModuleSpan
parseFileAndCheckErrors fileContents sourceName =
case parseModule fileContents sourceName of
Left e -> error $ "parse error: " ++ prettyText e
Right (pyModule, _comments) -> return pyModule
compileModule :: CompileState -- initial compiler state
-> Word32 -- modification time
-> Word32 -- size in bytes
AST
-> IO PycFile
compileModule state pyFileModifiedTime pyFileSizeBytes mod = do
obj <- compiler mod state
return $ PycFile
{ magic = compileConfig_magic $ state_config state
, modified_time = pyFileModifiedTime
, size = pyFileSizeBytes
, object = obj }
compileReplStmts :: CompileState -> [StatementSpan] -> IO PyObject
compileReplStmts state replStatements =
compiler (Body replStatements) state
compiler :: Compilable a => a -> CompileState -> IO (CompileResult a)
compiler = runCompileMonad . compile
class Compilable a where
type CompileResult a :: *
compile :: a -> Compile (CompileResult a)
instance Compilable a => Compilable [a] where
type CompileResult [a] = [CompileResult a]
compile = mapM compile
instance Compilable ModuleSpan where
type CompileResult ModuleSpan = PyObject
compile ast@(Module stmts) = do
maybeDumpScope
maybeDumpAST ast
setObjectName "<module>"
compileClassModuleDocString stmts
compile $ Body stmts
-- body of module, function and class
newtype Body = Body [StatementSpan]
instance Compilable Body where
type CompileResult Body = PyObject
compile (Body stmts) = do
mapM_ compile stmts
-- XXX we could avoid this 'return None' if all branches in the code
-- ended with a return statement. Can fix this in an optimisation step
-- with control flow analysis.
returnNone
assemble
makeObject
-- Build an object from all the state computed during compilation, such
-- as the bytecode sequence, variable information and so on.
argcount is the number of arguments , not counting * or * * kwargs .
makeObject :: Compile PyObject
makeObject = do
annotatedCode <- getBlockState state_instructions
let stackDepth = maxStackDepth annotatedCode
names <- getBlockState state_names
constants <- getBlockState state_constants
freeVars <- getBlockState state_freeVars
cellVars <- getBlockState state_cellVars
argcount <- getBlockState state_argcount
flags <- getBlockState state_flags
fastLocals <- getBlockState state_fastLocals
firstLineNumber <- getBlockState state_firstLineNumber
lineNumberTable <- compileLineNumberTable firstLineNumber
let code = map annotatedCode_bytecode annotatedCode
localVarNames = map Unicode $ indexedVarSetKeys fastLocals
maxStackDepth = maxBound
if stackDepth > maxStackDepth
-- XXX make a better error message
then error $ "Maximum stack depth " ++ show maxStackDepth ++
" exceeded: " ++ show stackDepth
else do
pyFileName <- getFileName
objectName <- getObjectName
let obj = Code
{ argcount = argcount
, kwonlyargcount = 0
, nlocals = fromIntegral $ length localVarNames
, stacksize = stackDepth
, flags = flags
, code = String $ encode code
, consts = makeConstants constants
, names = makeNames names
, varnames = Blip.Tuple localVarNames
, freevars = makeVarSetTuple freeVars
, cellvars = makeVarSetTuple cellVars
, filename = Unicode pyFileName
, name = Unicode objectName
, firstlineno = firstLineNumber
, lnotab = lineNumberTable
}
return obj
where
makeVarSetTuple :: IndexedVarSet -> PyObject
makeVarSetTuple varSet =
Blip.Tuple $ map Unicode $ indexedVarSetKeys varSet
makeConstants :: [PyObject] -> PyObject
makeConstants = Blip.Tuple . reverse
makeNames :: [Identifier] -> PyObject
makeNames = Blip.Tuple . map Unicode . reverse
instance Compilable StatementSpan where
type CompileResult StatementSpan = ()
compile stmt =
setLineNumber (annot stmt) >>
compileStmt stmt
compileStmt :: StatementSpan -> Compile ()
compileStmt (Assign {..}) = do
compile assign_expr
compileAssignments assign_to
compileStmt (AugmentedAssign {..}) =
case aug_assign_to of
Var {..} -> do
let varIdent = ident_string var_ident
emitReadVar varIdent
compile aug_assign_expr
compile aug_assign_op
emitWriteVar varIdent
Subscript {..} -> do
compile subscriptee
compile subscript_expr
emitCodeNoArg DUP_TOP_TWO -- avoids re-doing the above two later when we store
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
SlicedExpr {..} -> do
compile slicee
compileSlices slices
emitCodeNoArg DUP_TOP_TWO -- avoids re-doing the above two later when we store
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
expr@(Dot {..}) -> do
compile dot_expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ dot_attribute
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) - > do
compile $ left_op_arg expr
emitCodeNoArg DUP_TOP
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) -> do
compile $ left_op_arg expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
-}
other -> error $ "unexpected expression in augmented assignment: " ++ prettyText other
compileStmt (Return { return_expr = Nothing }) = returnNone
compileStmt (Return { return_expr = Just expr }) =
compile expr >> emitCodeNoArg RETURN_VALUE
compileStmt (Pass {}) = return ()
compileStmt (StmtExpr {..}) =
unless (isPureExpr stmt_expr) $
compile stmt_expr >> emitCodeNoArg POP_TOP
compileStmt (Conditional {..}) = do
restLabel <- newLabel
mapM_ (compileGuard restLabel) cond_guards
mapM_ compile cond_else
labelNextInstruction restLabel
compileStmt (While {..}) = do
startLoop <- newLabel
endLoop <- newLabel
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
withFrameBlock (FrameBlockLoop startLoop) $ do
labelNextInstruction startLoop
compile while_cond
emitCodeArg POP_JUMP_IF_FALSE anchor
mapM_ compile while_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile while_else
labelNextInstruction endLoop
compileStmt (For {..}) = do
startLoop <- newLabel
endLoop <- newLabel
withFrameBlock (FrameBlockLoop startLoop) $ do
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
compile for_generator
emitCodeNoArg GET_ITER
labelNextInstruction startLoop
emitCodeArg FOR_ITER anchor
let num_targets = length for_targets
when (num_targets > 1) $ do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral num_targets
mapM_ compileAssignTo for_targets
mapM_ compile for_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile for_else
labelNextInstruction endLoop
compileStmt stmt@(Fun {..}) = compileFun stmt []
compileStmt stmt@(Class {..}) = compileClass stmt []
-- XXX assertions appear to be turned off if the code is compiled
-- for optimisation
If the assertion expression is a tuple of non - zero length , then
-- it is always True: CPython warns about this
compileStmt (Assert {..}) = do
case assert_exprs of
test_expr:restAssertExprs -> do
compile test_expr
end <- newLabel
emitCodeArg POP_JUMP_IF_TRUE end
assertionErrorVar <- lookupNameVar "AssertionError"
emitCodeArg LOAD_GLOBAL assertionErrorVar
case restAssertExprs of
assertMsgExpr:_ -> do
compile assertMsgExpr
emitCodeArg CALL_FUNCTION 1
_other -> return ()
emitCodeArg RAISE_VARARGS 1
labelNextInstruction end
_other -> error "assert with no test"
compileStmt stmt@(Try {..}) = compileTry stmt
compileStmt (Import {..}) = mapM_ compile import_items
-- XXX need to handle from __future__
compileStmt (FromImport {..}) = do
let level = 0 -- XXX this should be the level of nesting
compileConstantEmit $ Blip.Int level
let names = fromItemsIdentifiers from_items
namesTuple = Blip.Tuple $ map Unicode names
compileConstantEmit namesTuple
compileFromModule from_module
case from_items of
ImportEverything {} -> do
emitCodeNoArg IMPORT_STAR
FromItems {..} -> do
forM_ from_items_items $ \FromItem {..} -> do
index <- lookupNameVar $ ident_string from_item_name
emitCodeArg IMPORT_FROM index
let storeName = case from_as_name of
Nothing -> from_item_name
Just asName -> asName
emitWriteVar $ ident_string storeName
emitCodeNoArg POP_TOP
-- XXX should check that we are inside a loop
compileStmt (Break {}) = emitCodeNoArg BREAK_LOOP
compileStmt (Continue {}) = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just (FrameBlockLoop label) -> emitCodeArg JUMP_ABSOLUTE label
Just FrameBlockFinallyEnd ->
error finallyError
Just _other -> checkFrameBlocks
where
-- keep blocking the frame block stack until we either find
-- a loop entry, otherwise generate an error
checkFrameBlocks :: Compile ()
checkFrameBlocks = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just FrameBlockFinallyEnd -> error finallyError
Just (FrameBlockLoop label) ->
emitCodeArg CONTINUE_LOOP label
Just _other -> checkFrameBlocks
loopError = "'continue' not properly in loop"
finallyError = "'continue' not supported inside 'finally' clause"
compileStmt (NonLocal {}) = return ()
compileStmt (Global {}) = return ()
compileStmt (Decorated {..}) =
case decorated_def of
Fun {} -> compileFun decorated_def decorated_decorators
Class {} -> compileClass decorated_def decorated_decorators
other -> error $ "Decorated statement is not a function or a class: " ++ prettyText other
compileStmt (Delete {..}) = mapM_ compileDelete del_exprs
compileStmt stmt@(With {..})
-- desugar with statements containing multiple contexts into nested
-- with statements containing single contexts
| length with_context > 1 = compileWith $ desugarWith stmt
| otherwise = compileWith stmt
compileStmt (Raise {..}) = compile raise_expr
compileStmt other = error $ "Unsupported statement:\n" ++ prettyText other
instance Compilable ExprSpan where
type CompileResult ExprSpan = ()
compile expr =
setLineNumber (annot expr) >>
compileExpr expr
compileExpr :: ExprSpan -> Compile ()
compileExpr (Var { var_ident = ident }) = do
emitReadVar $ ident_string ident
compileExpr expr@(AST.Strings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.ByteStrings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Int {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Float {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Imaginary {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Bool {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.None {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Ellipsis {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr (AST.Paren {..}) = compile paren_expr
compileExpr (AST.CondExpr {..}) = do
compile ce_condition
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
compile ce_true_branch
restLabel <- newLabel
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
compile ce_false_branch
labelNextInstruction restLabel
compileExpr expr@(AST.Tuple {..})
| isPyObjectExpr expr =
compileConstantEmit $ constantToPyObject expr
| otherwise = do
mapM_ compile tuple_exprs
emitCodeArg BUILD_TUPLE $ fromIntegral $ length tuple_exprs
compileExpr (AST.List {..}) = do
mapM_ compile list_exprs
emitCodeArg BUILD_LIST $ fromIntegral $ length list_exprs
compileExpr (AST.Set {..}) = do
mapM_ compile set_exprs
emitCodeArg BUILD_SET $ fromIntegral $ length set_exprs
compileExpr (Dictionary {..}) = do
emitCodeArg BUILD_MAP $ fromIntegral $ length dict_mappings
forM_ dict_mappings $ \(DictMappingPair key value) -> do
compile value
compile key
emitCodeNoArg STORE_MAP
compileExpr (ListComp {..}) = do
let initStmt = [mkAssignVar resultName (mkList [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "append" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<listcomp>" initStmt updater returnStmt list_comprehension
compileExpr (SetComp {..}) = do
let initStmt = [mkAssignVar resultName (mkSet [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "add" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<setcomp>" initStmt updater returnStmt set_comprehension
compileExpr (DictComp {..}) = do
let initStmt = [mkAssignVar resultName (mkDict [])]
updater = \(ComprehensionDict (DictMappingPair key val)) ->
mkAssign (mkSubscript (mkVar $ resultName) key) val
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<dictcomp>" initStmt updater returnStmt dict_comprehension
compileExpr (Generator {..}) = do
let updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkYield expr
compileComprehension "<gencomp>" [] updater [] gen_comprehension
compileExpr (Yield { yield_arg = Nothing }) =
compileConstantEmit Blip.None >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr (Yield { yield_arg = Just (YieldExpr expr) }) =
compile expr >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr e@(Yield { yield_arg = Just (YieldFrom expr _) })
= error $ "yield from not supported: " ++ show e
compileExpr (Call {..}) = do
compile call_fun
compileCall 0 call_args
compileExpr (Subscript {..}) = do
compile subscriptee
compile subscript_expr
emitCodeNoArg BINARY_SUBSCR
compileExpr (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg BINARY_SUBSCR
compileExpr (Dot {..}) = do
compile dot_expr
varInfo <- lookupNameVar $ ident_string dot_attribute
emitCodeArg LOAD_ATTR varInfo
compileExpr exp@(BinaryOp {..})
| isBoolean operator = compileBoolOpExpr exp
| isComparison operator = compileCompareOpExpr exp
| otherwise = do
compile left_op_arg
compile right_op_arg
compileOp operator
compileExpr (UnaryOp {..}) = do
compile op_arg
compileUnaryOp operator
compileExpr (Lambda {..}) = do
funBodyObj <- nestedBlock FunctionContext expr_annot $ do
make the first constant None , to indicate no doc string
-- for the lambda
_ <- compileConstant Blip.None
compile lambda_body
emitCodeNoArg RETURN_VALUE
assemble
makeObject
numDefaults <- compileDefaultParams lambda_args
compileClosure "<lambda>" funBodyObj numDefaults
compileExpr other = error $ "Unsupported expr:\n" ++ prettyText other
instance Compilable AssignOpSpan where
type CompileResult AssignOpSpan = ()
compile = emitCodeNoArg . assignOpCode
instance Compilable DecoratorSpan where
type CompileResult DecoratorSpan = ()
compile dec@(Decorator {..}) = do
compileDottedName decorator_name
let numDecorators = length decorator_args
when (numDecorators > 0) $
compileCall 0 decorator_args
where
compileDottedName (name:rest) = do
emitReadVar $ ident_string name
forM_ rest $ \var -> do
index <- lookupNameVar $ ident_string var
emitCodeArg LOAD_ATTR index
compileDottedName [] =
error $ "decorator with no name: " ++ prettyText dec
instance Compilable ArgumentSpan where
type CompileResult ArgumentSpan = ()
compile (ArgExpr {..}) = compile arg_expr
compile other = error $ "Unsupported argument:\n" ++ prettyText other
instance Compilable ImportItemSpan where
type CompileResult ImportItemSpan = ()
compile (ImportItem {..}) = do
this always seems to be zero
compileConstantEmit Blip.None
let dottedNames = map ident_string import_item_name
-- assert (length dottedNames > 0)
let dottedNameStr =
concat $ intersperse "." dottedNames
index <- lookupNameVar dottedNameStr
emitCodeArg IMPORT_NAME index
storeName <-
case import_as_name of
Nothing -> return $ head import_item_name
Just asName -> do
forM_ (tail dottedNames) $ \attribute -> do
index <- lookupNameVar attribute
emitCodeArg LOAD_ATTR index
return asName
emitWriteVar $ ident_string storeName
instance Compilable RaiseExprSpan where
type CompileResult RaiseExprSpan = ()
compile (RaiseV3 maybeRaiseArg) = do
n <- case maybeRaiseArg of
Nothing -> return 0
Just (raiseExpr, maybeFrom) -> do
compile raiseExpr
case maybeFrom of
Nothing -> return 1
Just fromExpr -> do
compile fromExpr
return 2
emitCodeArg RAISE_VARARGS n
compile stmt@(RaiseV2 _) =
error $ "Python version 2 raise statement encountered: " ++ prettyText stmt
From CPython compile.c
Code generated for " try : S except E1 as V1 : S1 except E2 as V2 : S2 ... " :
( The contents of the value stack is shown in [ ] , with the top
at the right ; ' tb ' is trace - back info , ' val ' the exception 's
associated value , and ' exc ' the exception . )
Value stack Label Instruction Argument
[ ] SETUP_EXCEPT L1
[ ] < code for S >
[ ] POP_BLOCK
[ ] JUMP_FORWARD L0
[ tb , , exc ] L1 : DUP )
[ tb , , exc , exc ] < evaluate E1 > )
[ tb , , exc , exc , E1 ] COMPARE_OP EXC_MATCH ) only if E1
[ tb , , exc , 1 - or-0 ] POP_JUMP_IF_FALSE L2 )
[ tb , , exc ] POP
[ tb , ] < assign to V1 > ( or POP if no V1 )
[ tb ] POP
[ ] < code for S1 >
POP_EXCEPT
JUMP_FORWARD L0
[ tb , , exc ] L2 : DUP
............................. etc .......................
[ tb , , exc ] Ln+1 : END_FINALLY # re - raise exception
[ ] L0 : < next statement >
Of course , parts are not generated if or is not present .
From CPython compile.c
Code generated for "try: S except E1 as V1: S1 except E2 as V2: S2 ...":
(The contents of the value stack is shown in [], with the top
at the right; 'tb' is trace-back info, 'val' the exception's
associated value, and 'exc' the exception.)
Value stack Label Instruction Argument
[] SETUP_EXCEPT L1
[] <code for S>
[] POP_BLOCK
[] JUMP_FORWARD L0
[tb, val, exc] L1: DUP )
[tb, val, exc, exc] <evaluate E1> )
[tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
[tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
[tb, val, exc] POP
[tb, val] <assign to V1> (or POP if no V1)
[tb] POP
[] <code for S1>
POP_EXCEPT
JUMP_FORWARD L0
[tb, val, exc] L2: DUP
.............................etc.......................
[tb, val, exc] Ln+1: END_FINALLY # re-raise exception
[] L0: <next statement>
Of course, parts are not generated if Vi or Ei is not present.
-}
compileTry :: StatementSpan -> Compile ()
compileTry stmt@(Try {..})
| length try_finally == 0 = compileTryExcept stmt
| otherwise = compileTryFinally stmt
compileTry other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryFinally :: StatementSpan -> Compile ()
compileTryFinally stmt@(Try {..}) = do
end <- newLabel
emitCodeArg SETUP_FINALLY end
body <- newLabel
labelNextInstruction body
withFrameBlock FrameBlockFinallyTry $ do
if length try_excepts > 0
then compileTryExcept stmt
else mapM_ compile try_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction end
withFrameBlock FrameBlockFinallyEnd $ do
mapM_ compile try_finally
emitCodeNoArg END_FINALLY
compileTryFinally other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryExcept :: StatementSpan -> Compile ()
compileTryExcept (Try {..}) = do
firstHandler <- newLabel -- L1
emitCodeArg SETUP_EXCEPT firstHandler -- pushes handler onto block stack
withFrameBlock FrameBlockExcept $ do
mapM_ compile try_body -- <code for S>
emitCodeNoArg POP_BLOCK -- pops handler off block stack
orElse <- newLabel
emitCodeArg JUMP_FORWARD orElse
end <- newLabel -- L0
compileHandlers end firstHandler try_excepts
labelNextInstruction orElse
mapM_ compile try_else
labelNextInstruction end -- L0: <next statement>
compileTryExcept other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
-- Compile a sequence of exception handlers
compileHandlers :: Word16 -> Word16 -> [HandlerSpan] -> Compile ()
compileHandlers _end handlerLabel [] = do
labelNextInstruction handlerLabel -- Ln+1, # re-raise exception
emitCodeNoArg END_FINALLY
compileHandlers end handlerLabel (Handler {..} : rest) = do
labelNextInstruction handlerLabel
nextLabel <- newLabel
compileHandlerClause nextLabel handler_clause
emitCodeNoArg POP_TOP -- pop the traceback (tb) off the stack
withFrameBlock FrameBlockFinallyTry $ do
mapM_ compile handler_suite -- <code for S1, S2 ..>
emitCodeNoArg POP_EXCEPT -- pop handler off the block stack
emitCodeArg JUMP_FORWARD end
compileHandlers end nextLabel rest
enter here with stack = = ( s + + [ tb , , exc ] ) , leave with stack = = s
compileHandlerClause :: Word16 -> ExceptClauseSpan -> Compile ()
compileHandlerClause nextHandler (ExceptClause {..}) = do
case except_clause of
Nothing -> do
emitCodeNoArg POP_TOP -- pop exc off the stack
emitCodeNoArg POP_TOP -- pop val off the stack
Just (target, asExpr) -> do
emitCodeNoArg DUP_TOP -- duplicate exc on stack
compile target -- <evaluate E1>
compare E1 to exc
emitCodeArg POP_JUMP_IF_FALSE nextHandler -- pop True/False and if no match try next handler
emitCodeNoArg POP_TOP -- pop exc off the stack
case asExpr of
Nothing -> emitCodeNoArg POP_TOP -- pop val off the stack
-- XXX we should del this name at the end.
assign the exception to the as name , will remove from stack
where
-- The code for an exact match operator.
exactMatchOp :: Word16
exactMatchOp = 10
withDecorators :: [DecoratorSpan] -> Compile () -> Compile ()
withDecorators decorators comp = do
-- push each of the decorators on the stack
mapM_ compile decorators
-- run the enclosed computation
comp
-- call each of the decorators
replicateM_ (length decorators) $
emitCodeArg CALL_FUNCTION 1
nestedBlock :: Context -> SrcSpan -> Compile a -> Compile a
nestedBlock context span comp = do
-- save the current block state
oldBlockState <- getBlockState id
-- set the new block state to initial values, and the
-- scope of the current definition
(name, localScope) <- getLocalScope $ spanToScopeIdentifier span
setBlockState $ initBlockState context localScope
-- set the new object name
setObjectName name
set the first line number of the block
setFirstLineNumber span
-- run the nested computation
result <- comp
-- restore the original block state
setBlockState oldBlockState
return result
-- Compile a function definition, possibly with decorators.
compileFun :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileFun (Fun {..}) decorators = do
let funName = ident_string $ fun_name
withDecorators decorators $ do
funBodyObj <- nestedBlock FunctionContext stmt_annot $ do
compileFunDocString fun_body
compile $ Body fun_body
numDefaults <- compileDefaultParams fun_args
compileClosure funName funBodyObj numDefaults
emitWriteVar funName
compileFun other _decorators = error $ "compileFun applied to a non function: " ++ prettyText other
-- Compile a class definition, possibly with decorators.
compileClass :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileClass (Class {..}) decorators = do
let className = ident_string $ class_name
withDecorators decorators $ do
classBodyObj <- nestedBlock ClassContext stmt_annot $ do
-- classes have a special argument called __locals__
-- it is the only argument they have in the byte code, but it
-- does not come from the source code, so we have to add it.
setFastLocals ["__locals__"]
setArgCount 1
emitCodeArg LOAD_FAST 0
emitCodeNoArg STORE_LOCALS
emitReadVar "__name__"
emitWriteVar "__module__"
compileConstantEmit $ Unicode className
emitWriteVar "__qualname__"
compileClassModuleDocString class_body
compile $ Body class_body
emitCodeNoArg LOAD_BUILD_CLASS
compileClosure className classBodyObj 0
compileConstantEmit $ Unicode className
compileCall 2 class_args
emitWriteVar className
compileClass other _decorators = error $ "compileClass applied to a non class: " ++ prettyText other
-- XXX CPython uses a "qualified" name for the code object. For instance
-- nested functions look like "f.<locals>.g", whereas we currently use
-- just "g".
-- The free variables in a code object will either be cell variables
-- or free variables in the enclosing object. If there are no free
-- variables then we can avoid building the closure, and just make the function.
compileClosure :: String -> PyObject -> Word16 -> Compile ()
compileClosure name obj numDefaults = do
-- get the list of free variables from the code object
let Blip.Tuple freeVarStringObjs = freevars obj
freeVarIdentifiers = map unicode freeVarStringObjs
numFreeVars = length freeVarIdentifiers
if numFreeVars == 0
then do
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_FUNCTION numDefaults
else do
forM_ freeVarIdentifiers $ \var -> do
maybeVarInfo <- lookupClosureVar var
-- we don't use emitReadVar because it would generate
-- LOAD_DEREF instructions, but we want LOAD_CLOSURE
-- instead.
case maybeVarInfo of
Just (CellVar index) -> emitCodeArg LOAD_CLOSURE index
Just (FreeVar index) -> emitCodeArg LOAD_CLOSURE index
_other -> error $ name ++ " closure free variable not cell or free var in outer context: " ++ var
emitCodeArg BUILD_TUPLE $ fromIntegral numFreeVars
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_CLOSURE numDefaults
-- Compile default parameters and return how many there are
compileDefaultParams :: [ParameterSpan] -> Compile Word16
compileDefaultParams = foldM compileParam 0
where
compileParam :: Word16 -> ParameterSpan -> Compile Word16
compileParam count (Param {..}) = do
case param_default of
Nothing -> return count
Just expr -> do
compile expr
return $ count + 1
compileParam count _other = return count
-- Compile a 'from module import'.
compileFromModule :: ImportRelativeSpan -> Compile ()
-- XXX what to do about the initial dots?
compileFromModule (ImportRelative {..}) = do
let moduleName =
case import_relative_module of
Nothing -> ""
Just dottedNames ->
concat $ intersperse "." $ map ident_string dottedNames
index <- lookupNameVar moduleName
emitCodeArg IMPORT_NAME index
fromItemsIdentifiers :: FromItemsSpan -> [Identifier]
fromItemsIdentifiers (ImportEverything {}) = ["*"]
fromItemsIdentifiers (FromItems {..}) =
map fromItemIdentifier from_items_items
where
fromItemIdentifier :: FromItemSpan -> Identifier
fromItemIdentifier (FromItem {..}) = ident_string $ from_item_name
-- compile multiple possible assignments:
-- x = y = z = rhs
compileAssignments :: [ExprSpan] -> Compile ()
compileAssignments [] = return ()
compileAssignments [e] = compileAssignTo e
compileAssignments (e1:e2:rest) = do
emitCodeNoArg DUP_TOP
compileAssignTo e1
compileAssignments (e2:rest)
the lhs of an assignment statement
-- we can assume that the parser has only accepted the appropriate
-- subset of expression types
compileAssignTo :: ExprSpan -> Compile ()
compileAssignTo (Var {..}) =
emitWriteVar $ ident_string var_ident
compileAssignTo (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg STORE_SUBSCR
XXX this can be optimised in places where the rhs is a
-- manifest list or tuple, avoiding the building list/tuple
-- only to deconstruct again
compileAssignTo (AST.Tuple {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length tuple_exprs
mapM_ compileAssignTo tuple_exprs
compileAssignTo (AST.List {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length list_exprs
mapM_ compileAssignTo list_exprs
compileAssignTo (AST.Paren {..}) = compileAssignTo paren_expr
compileAssignTo expr@(Dot {..} ) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg STORE_ATTR index
compileAssignTo expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) = do
compile $ left_op_arg expr
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg STORE_ATTR index
compileAssignTo expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg STORE_ATTR index
-}
compileAssignTo (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg STORE_SUBSCR
compileAssignTo other = error $ "assignment to unexpected expression:\n" ++ prettyText other
compileDelete :: ExprSpan -> Compile ()
compileDelete (Var {..}) = do
emitDeleteVar $ ident_string var_ident
compileDelete (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg DELETE_SUBSCR
compileDelete (AST.Paren {..}) = compileDelete paren_expr
compileDelete (Dot {..}) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg DELETE_ATTR index
compileDelete ( expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) ) = do
compile $ left_op_arg expr
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg index
compileDelete (expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}})) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg DELETE_ATTR index
-}
compileDelete (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg DELETE_SUBSCR
compileDelete other = error $ "delete of unexpected expression:\n" ++ prettyText other
compileWith :: StatementSpan -> Compile ()
compileWith stmt@(With {..}) =
case with_context of
[(context, maybeAs)] -> do
blockLabel <- newLabel
finallyLabel <- newLabel
compile context
emitCodeArg SETUP_WITH finallyLabel
labelNextInstruction blockLabel
withFrameBlock FrameBlockFinallyTry $ do
case maybeAs of
-- Discard result from context.__enter__()
Nothing -> emitCodeNoArg POP_TOP
Just expr -> compileAssignTo expr
mapM_ compile with_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction finallyLabel
withFrameBlock FrameBlockFinallyEnd $ do
emitCodeNoArg WITH_CLEANUP
emitCodeNoArg END_FINALLY
_other -> error $ "compileWith applied to non desugared with statement: " ++ prettyText stmt
compileWith other = error $ "compileWith applied to non with statement: " ++ prettyText other
Check for a docstring in the first statement of a function body .
The first constant in the corresponding code object is inspected
-- by the interpreter for the docstring. If there is no docstring
then the first constant must be None
compileFunDocString :: [StatementSpan] -> Compile ()
compileFunDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
= compileConstant (constantToPyObject stmt_expr) >> return ()
| otherwise = compileConstant Blip.None >> return ()
compileFunDocString [] = compileConstant Blip.None >> return ()
compileClassModuleDocString :: [StatementSpan] -> Compile ()
compileClassModuleDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
-- XXX what if another __doc__ is in scope?
= do compileConstantEmit $ constantToPyObject stmt_expr
emitWriteVar "__doc__"
| otherwise = return ()
compileClassModuleDocString [] = return ()
-- Compile a conditional guard
compileGuard :: Word16 -> (ExprSpan, [StatementSpan]) -> Compile ()
compileGuard restLabel (expr, stmts) = do
compile expr
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
mapM_ compile stmts
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
the comprehension into a zero - arity function ( body ) with
-- a (possibly nested) for loop, then call the function.
compileComprehension
:: Identifier
-> [StatementSpan]
-> (ComprehensionExprSpan -> StatementSpan)
-> [StatementSpan]
-> ComprehensionSpan
-> Compile ()
compileComprehension name initStmt updater returnStmt comprehension = do
let desugaredComp = desugarComprehension initStmt updater returnStmt comprehension
comprehensionSpan = comprehension_annot comprehension
funObj <- nestedBlock
FunctionContext
comprehensionSpan
(compile $ Body desugaredComp)
compileClosure name funObj 0
(_name, localScope) <- getLocalScope $ spanToScopeIdentifier comprehensionSpan
let parameterNames = parameterTypes_pos $ localScope_params localScope
mapM_ emitReadVar parameterNames
emitCodeArg CALL_FUNCTION $ fromIntegral $ length parameterNames
-- Convert a constant expression into the equivalent object. This
-- only works for expressions which have a counterpart in the object
-- representation used in .pyc files.
constantToPyObject :: ExprSpan -> PyObject
constantToPyObject (AST.Int {..})
| int_value > (fromIntegral max32BitSignedInt) ||
int_value < (fromIntegral min32BitSignedInt)
= Blip.Long int_value
| otherwise = Blip.Int $ fromIntegral int_value
where
max32BitSignedInt :: Int32
max32BitSignedInt = maxBound
min32BitSignedInt :: Int32
min32BitSignedInt = minBound
constantToPyObject (AST.Float {..}) = Blip.Float $ float_value
-- XXX we could optimise the case where we have 'float + imaginary j',
to generate a Complex number directly , rather than by doing
-- the addition operation.
constantToPyObject (AST.Imaginary {..}) =
Blip.Complex { real = 0.0, imaginary = imaginary_value }
constantToPyObject (AST.Bool { bool_value = True }) = Blip.TrueObj
constantToPyObject (AST.Bool { bool_value = False }) = Blip.FalseObj
constantToPyObject (AST.None {}) = Blip.None
constantToPyObject (AST.Ellipsis {}) = Blip.Ellipsis
-- assumes all the tuple elements are constant
constantToPyObject (AST.Tuple {..}) =
Blip.Tuple { elements = map constantToPyObject tuple_exprs }
constantToPyObject (AST.Strings {..}) =
Blip.Unicode { unicode = concat $ map normaliseString strings_strings }
constantToPyObject (AST.ByteStrings {..}) =
-- error $ show $ map normaliseString byte_string_strings
Blip.String { string = fromString $ concat $ map normaliseString byte_string_strings }
constantToPyObject other =
error $ "constantToPyObject applied to an unexpected expression: " ++ prettyText other
The strings in the AST retain their original quote marks which
-- need to be removed, we have to remove single or triple quotes.
-- We assume the parser has correctly matched the quotes.
Escaped characters such as are parsed as multiple characters
-- and need to be converted back into single characters.
normaliseString :: String -> String
normaliseString ('r':'b':rest) = removeQuotes rest
normaliseString ('b':'r':rest) = removeQuotes rest
normaliseString ('b':rest) = unescapeString $ removeQuotes rest
normaliseString ('r':rest) = removeQuotes rest
normaliseString other = unescapeString $ removeQuotes other
removeQuotes :: String -> String
removeQuotes ('\'':'\'':'\'':rest) = take (length rest - 3) rest
removeQuotes ('"':'"':'"':rest) = take (length rest - 3) rest
removeQuotes ('\'':rest) = init rest
removeQuotes ('"':rest) = init rest
removeQuotes other = error $ "bad literal string: " ++ other
data CallArgs =
CallArgs
{ callArgs_pos :: !Word16
, callArgs_keyword :: !Word16
, callArgs_varPos :: !Bool
, callArgs_varKeyword :: !Bool
}
initCallArgs :: CallArgs
initCallArgs =
CallArgs
{ callArgs_pos = 0
, callArgs_keyword = 0
, callArgs_varPos = False
, callArgs_varKeyword = False
}
-- Compile the arguments to a call and
decide which particular CALL_FUNCTION bytecode to emit .
-- numExtraArgs counts any additional arguments the function
-- might have been applied to, which is necessary for classes
-- which get extra arguments beyond the ones mentioned in the
-- program source.
compileCall :: Word16 -> [ArgumentSpan] -> Compile ()
compileCall numExtraArgs args = do
CallArgs {..} <- compileCallArgs args
let opArg = (callArgs_pos + numExtraArgs) .|. callArgs_keyword `shiftL` 8
case (callArgs_varPos, callArgs_varKeyword) of
(False, False) -> emitCodeArg CALL_FUNCTION opArg
(True, False) -> emitCodeArg CALL_FUNCTION_VAR opArg
(False, True) -> emitCodeArg CALL_FUNCTION_KW opArg
(True, True) -> emitCodeArg CALL_FUNCTION_VAR_KW opArg
-- Compile the arguments to a function call and return the number
-- of positional arguments, and the number of keyword arguments.
compileCallArgs :: [ArgumentSpan] -> Compile CallArgs
compileCallArgs = foldM compileArg initCallArgs
where
compileArg :: CallArgs -> ArgumentSpan -> Compile CallArgs
compileArg callArgs@(CallArgs {..}) (ArgExpr {..}) = do
compile arg_expr
return $ callArgs { callArgs_pos = callArgs_pos + 1 }
compileArg callArgs@(CallArgs {..}) (ArgKeyword {..}) = do
compileConstantEmit $ Unicode $ ident_string arg_keyword
compile arg_expr
return $ callArgs { callArgs_keyword = callArgs_keyword + 1 }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsPos {..}) = do
compile arg_expr
return $ callArgs { callArgs_varPos = True }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsKeyword {..}) = do
compile arg_expr
return $ callArgs { callArgs_varKeyword = True }
-- XXX need to handle extended slices, slice expressions and ellipsis
compileSlices :: [SliceSpan] -> Compile ()
compileSlices [SliceProper {..}] = do
case slice_lower of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_upper of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_stride of
Nothing -> emitCodeArg BUILD_SLICE 2
-- Not sure about this, maybe it is None
Just Nothing -> emitCodeArg BUILD_SLICE 2
Just (Just expr) -> do
compile expr
emitCodeArg BUILD_SLICE 3
compileSlices other = error $ "unsupported slice: " ++ show other
-- Return the opcode for a given assignment operator.
assignOpCode :: AssignOpSpan -> Opcode
assignOpCode assign =
case assign of
PlusAssign {} -> INPLACE_ADD
MinusAssign {} -> INPLACE_SUBTRACT
MultAssign {} -> INPLACE_MULTIPLY
DivAssign {} -> INPLACE_TRUE_DIVIDE
ModAssign {} -> INPLACE_MODULO
PowAssign {} -> INPLACE_POWER
BinAndAssign {} -> INPLACE_AND
BinOrAssign {} -> INPLACE_OR
BinXorAssign {} -> INPLACE_XOR
LeftShiftAssign {} -> INPLACE_LSHIFT
RightShiftAssign {} -> INPLACE_RSHIFT
FloorDivAssign {} -> INPLACE_FLOOR_DIVIDE
isDot : : OpSpan - > Bool
isDot ( Dot { } ) = True
isDot _ other = False
isDot :: OpSpan -> Bool
isDot (Dot {}) = True
isDot _other = False
-}
isBoolean :: OpSpan -> Bool
isBoolean (And {}) = True
isBoolean (Or {}) = True
isBoolean _other = False
isComparison :: OpSpan -> Bool
isComparison (LessThan {}) = True
isComparison (GreaterThan {}) = True
isComparison (Equality {}) = True
isComparison (GreaterThanEquals {}) = True
isComparison (LessThanEquals {}) = True
isComparison (NotEquals {}) = True
isComparison (In {}) = True
isComparison (NotIn {}) = True
isComparison (IsNot {}) = True
isComparison (Is {}) = True
isComparison _other = False
compileDot : : ExprSpan - > Compile ( )
compileDot ( BinaryOp { .. } ) = do
compile left_op_arg
case right_op_arg of
{ .. } - > do
-- the right argument should be treated like name variable
varInfo < - lookupNameVar $ ident_string var_ident
emitCodeArg LOAD_ATTR varInfo
other - > error $ " right argument of dot operator not a variable:\n " + + prettyText other
compileDot other =
error $ " compileDot applied to an unexpected expression : " + + prettyText other
compileDot :: ExprSpan -> Compile ()
compileDot (BinaryOp {..}) = do
compile left_op_arg
case right_op_arg of
Var {..} -> do
-- the right argument should be treated like name variable
varInfo <- lookupNameVar $ ident_string var_ident
emitCodeArg LOAD_ATTR varInfo
other -> error $ "right argument of dot operator not a variable:\n" ++ prettyText other
compileDot other =
error $ "compileDot applied to an unexpected expression: " ++ prettyText other
-}
compileBoolOpExpr :: ExprSpan -> Compile ()
compileBoolOpExpr (BinaryOp {..}) = do
endLabel <- newLabel
compile left_op_arg
case operator of
And {..} -> emitCodeArg JUMP_IF_FALSE_OR_POP endLabel
Or {..} -> emitCodeArg JUMP_IF_TRUE_OR_POP endLabel
other -> error $ "Unexpected boolean operator:\n" ++ prettyText other
compile right_op_arg
labelNextInstruction endLabel
compileBoolOpExpr other =
error $ "compileBoolOpExpr applied to an unexpected expression: " ++ prettyText other
compileOp :: OpSpan -> Compile ()
compileOp operator =
emitCodeNoArg $ case operator of
BinaryOr {} -> BINARY_OR
Xor {} -> BINARY_XOR
BinaryAnd {} -> BINARY_AND
ShiftLeft {} -> BINARY_LSHIFT
ShiftRight {} -> BINARY_RSHIFT
Exponent {} -> BINARY_POWER
Multiply {} -> BINARY_MULTIPLY
Plus {} -> BINARY_ADD
Minus {} -> BINARY_SUBTRACT
Divide {} -> BINARY_TRUE_DIVIDE
FloorDivide {} -> BINARY_FLOOR_DIVIDE
Modulo {} -> BINARY_MODULO
_other -> error $ "Unexpected operator:\n" ++ prettyText operator
compileUnaryOp :: OpSpan -> Compile ()
compileUnaryOp operator =
emitCodeNoArg $ case operator of
Minus {} -> UNARY_NEGATIVE
Plus {} -> UNARY_POSITIVE
Not {} -> UNARY_NOT
Invert {} -> UNARY_INVERT
other -> error $ "Unexpected unary operator: " ++ prettyText other
from object.h
# define Py_LT 0
# define Py_LE 1
# define Py_EQ 2
# define Py_NE 3
# define Py_GT 4
# define Py_GE 5
and from opcode.h
enum cmp_op { PyCmp_LT = Py_LT , PyCmp_LE = Py_LE , PyCmp_EQ = , PyCmp_NE = Py_NE , PyCmp_GT = Py_GT , = Py_GE ,
PyCmp_IN , PyCmp_NOT_IN , PyCmp_IS , PyCmp_IS_NOT , PyCmp_EXC_MATCH , PyCmp_BAD } ;
from object.h
#define Py_LT 0
#define Py_LE 1
#define Py_EQ 2
#define Py_NE 3
#define Py_GT 4
#define Py_GE 5
and from opcode.h
enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE,
PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
-}
Operator chaining :
The parser treats comparison operators as left associative .
So : w < x < y < z is parsed as
( ( ( w < x ) < y ) < z )
We want to compile this to :
[ w ]
[ x ]
DUP_TOP # make a copy of the result of x
ROT_THREE # put the copy of [ x ] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[ y ]
DUP_TOP # make a copy of [ y ]
ROT_THREE # put the copy of [ y ] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[ z ]
<
JUMP_FORWARD end
cleanup :
ROT_TWO # put the result of the last comparison on the bottom
# and put the duplicated [ y ] on the top
POP_TOP # remove the duplicated [ y ] from the top
end :
# whatever code follows
The parser treats comparison operators as left associative.
So: w < x < y < z is parsed as
(((w < x) < y) < z)
We want to compile this to:
[w]
[x]
DUP_TOP # make a copy of the result of x
ROT_THREE # put the copy of [x] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[y]
DUP_TOP # make a copy of [y]
ROT_THREE # put the copy of [y] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[z]
<
JUMP_FORWARD end
cleanup:
ROT_TWO # put the result of the last comparison on the bottom
# and put the duplicated [y] on the top
POP_TOP # remove the duplicated [y] from the top
end:
# whatever code follows
-}
compileCompareOpExpr :: ExprSpan -> Compile ()
compileCompareOpExpr expr@(BinaryOp {}) =
compileChain numOps chain
where
chain :: [ChainItem]
chain = flattenComparisonChain [] expr
numOps :: Int
numOps = length chain `div` 2
compileChain :: Int -> [ChainItem] -> Compile ()
compileChain numOps (Comparator e1 : internal@(Operator op : Comparator e2 : _rest)) = do
compile e1
if numOps == 1
then do
compile e2
emitCodeArg COMPARE_OP $ comparisonOpCode op
else do
cleanup <- newLabel
(lastOp, lastArg) <- compileChainInternal cleanup internal
compile lastArg
emitCodeArg COMPARE_OP $ comparisonOpCode lastOp
end <- newLabel
emitCodeArg JUMP_FORWARD end
labelNextInstruction cleanup
emitCodeNoArg ROT_TWO
emitCodeNoArg POP_TOP
labelNextInstruction end
compileChain _numOps _items = error $ "bad operator chain: " ++ prettyText expr
compileChainInternal :: Word16 -> [ChainItem] -> Compile (OpSpan, ExprSpan)
compileChainInternal _cleanup [Operator op, Comparator exp] = return (op, exp)
compileChainInternal cleanup (Operator op : Comparator e : rest) = do
compile e
emitCodeNoArg DUP_TOP
emitCodeNoArg ROT_THREE
emitCodeArg COMPARE_OP $ comparisonOpCode op
emitCodeArg JUMP_IF_FALSE_OR_POP cleanup
compileChainInternal cleanup rest
compileChainInternal _cleanup _other = error $ "bad comparison chain: " ++ prettyText expr
comparisonOpCode :: OpSpan -> Word16
comparisonOpCode (LessThan {}) = 0
comparisonOpCode (LessThanEquals {}) = 1
comparisonOpCode (Equality {}) = 2
comparisonOpCode (NotEquals {}) = 3
comparisonOpCode (GreaterThan {}) = 4
comparisonOpCode (GreaterThanEquals {}) = 5
comparisonOpCode (In {}) = 6
comparisonOpCode (NotIn {}) = 7
comparisonOpCode (Is {}) = 8
comparisonOpCode (IsNot {}) = 9
-- XXX we don't appear to have an exact match operator in the AST
comparisonOpCode operator = error $ "Unexpected comparison operator:\n" ++ prettyText operator
compileCompareOpExpr other = error $ "Unexpected comparison operator:\n" ++ prettyText other
data ChainItem = Comparator ExprSpan | Operator OpSpan
flattenComparisonChain :: [ChainItem] -> ExprSpan -> [ChainItem]
flattenComparisonChain acc opExpr@(BinaryOp {..})
| isComparison operator
= flattenComparisonChain newAcc left_op_arg
| otherwise = [Comparator opExpr] ++ acc
where
newAcc = [Operator operator, Comparator right_op_arg] ++ acc
flattenComparisonChain acc other = [Comparator other] ++ acc
-- Emit an instruction that returns the None contant.
returnNone :: Compile ()
returnNone = compileConstantEmit Blip.None >> emitCodeNoArg RETURN_VALUE
-- Print out the variable scope of the module if requested on the command line.
maybeDumpScope :: Compile ()
maybeDumpScope =
ifDump DumpScope $ do
nestedScope <- getNestedScope
liftIO $ putStrLn $ renderScope nestedScope
-- Print out the AST of the module if requested on the command line.
maybeDumpAST :: ModuleSpan -> Compile ()
maybeDumpAST ast = do
ifDump DumpAST $ do
liftIO $ putStrLn "Abstract Syntax Tree:"
liftIO $ putStrLn $ show ast
From Cpython : Objects / lnotab_notes.txt
Code objects store a field named co_lnotab . This is an array of unsigned bytes
disguised as a Python string . It is used to map bytecode offsets to source code
line # s for tracebacks and to identify line number boundaries for line tracing .
The array is conceptually a compressed list of
( bytecode offset increment , line number increment )
pairs . The details are important and delicate , best illustrated by example :
byte code offset source code line number
0 1
6 2
50 7
350 307
361 308
Instead of storing these numbers literally , we compress the list by storing only
the increments from one row to the next . Conceptually , the stored list might
look like :
0 , 1 , 6 , 1 , 44 , 5 , 300 , 300 , 11 , 1
The above does n't really work , but it 's a start . Note that an unsigned byte
ca n't hold negative values , or values larger than 255 , and the above example
contains two such values . So we make two tweaks :
( a ) there 's a deep assumption that byte code offsets and their corresponding
line # s both increase monotonically , and
( b ) if at least one column jumps by more than 255 from one row to the next ,
more than one pair is written to the table . In case # b , there 's no way to know
from looking at the table later how many were written . That 's the delicate
part . A user of co_lnotab desiring to find the source line number
corresponding to a bytecode address A should do something like this
lineno = addr = 0
for addr_incr , line_incr in co_lnotab :
addr + = addr_incr
if addr > A :
return lineno
lineno + = line_incr
( In C , this is implemented by ( ) . ) In order for this to work ,
when the addr field increments by more than 255 , the line # increment in each
pair generated must be 0 until the remaining addr increment is < 256 . So , in
the example above , assemble_lnotab in compile.c should not ( as was actually done
until 2.2 ) expand 300 , 300 to
255 , 255 , 45 , 45 ,
but to
255 , 0 , 45 , 255 , 0 , 45 .
From Cpython: Objects/lnotab_notes.txt
Code objects store a field named co_lnotab. This is an array of unsigned bytes
disguised as a Python string. It is used to map bytecode offsets to source code
line #s for tracebacks and to identify line number boundaries for line tracing.
The array is conceptually a compressed list of
(bytecode offset increment, line number increment)
pairs. The details are important and delicate, best illustrated by example:
byte code offset source code line number
0 1
6 2
50 7
350 307
361 308
Instead of storing these numbers literally, we compress the list by storing only
the increments from one row to the next. Conceptually, the stored list might
look like:
0, 1, 6, 1, 44, 5, 300, 300, 11, 1
The above doesn't really work, but it's a start. Note that an unsigned byte
can't hold negative values, or values larger than 255, and the above example
contains two such values. So we make two tweaks:
(a) there's a deep assumption that byte code offsets and their corresponding
line #s both increase monotonically, and
(b) if at least one column jumps by more than 255 from one row to the next,
more than one pair is written to the table. In case #b, there's no way to know
from looking at the table later how many were written. That's the delicate
part. A user of co_lnotab desiring to find the source line number
corresponding to a bytecode address A should do something like this
lineno = addr = 0
for addr_incr, line_incr in co_lnotab:
addr += addr_incr
if addr > A:
return lineno
lineno += line_incr
(In C, this is implemented by PyCode_Addr2Line().) In order for this to work,
when the addr field increments by more than 255, the line # increment in each
pair generated must be 0 until the remaining addr increment is < 256. So, in
the example above, assemble_lnotab in compile.c should not (as was actually done
until 2.2) expand 300, 300 to
255, 255, 45, 45,
but to
255, 0, 45, 255, 0, 45.
-}
-- Returns the bytestring representation of the compressed line number table
compileLineNumberTable :: Word32 -> Compile PyObject
compileLineNumberTable firstLineNumber = do
offsetToLine <- reverse `fmap` getBlockState state_lineNumberTable
let compressedTable = compress (0, firstLineNumber) offsetToLine
bs = B.pack $ concat
[ [fromIntegral offset, fromIntegral line] |
(offset, line) <- compressedTable ]
return Blip.String { string = bs }
where
compress :: (Word16, Word32) -> [(Word16, Word32)] -> [(Word16, Word32)]
compress _prev [] = []
compress (prevOffset, prevLine) (next@(nextOffset, nextLine):rest)
-- make sure all increments are non-negative
-- skipping any entries which are less than the predecessor
| nextLine < prevLine || nextOffset < prevOffset =
compress (prevOffset, prevLine) rest
| otherwise = chunkDeltas (offsetDelta, lineDelta) ++ compress next rest
where
offsetDelta = nextOffset - prevOffset
lineDelta = nextLine - prevLine
both and lineDelta must be non - negative
chunkDeltas :: (Word16, Word32) -> [(Word16, Word32)]
chunkDeltas (offsetDelta, lineDelta)
| offsetDelta < 256 =
if lineDelta < 256
then [(offsetDelta, lineDelta)]
else (offsetDelta, 255) : chunkDeltas (0, lineDelta - 255)
we must wait until is less than 256 before reducing lineDelta
| otherwise = (255, 0) : chunkDeltas (offsetDelta - 255, lineDelta)
| null | https://raw.githubusercontent.com/bjpop/blip/3d9105a44d1afb7bd007da3742fb19dc69372e10/blipcompiler/src/Blip/Compiler/Compile.hs | haskell | ---------------------------------------------------------------------------
|
Module : Blip.Compiler.Compile
License : BSD-style
Maintainer :
Stability : experimental
Basic algorithm:
The following Python constructs are compiled into code objects:
- The top-level of the module.
- Function definitions (def and lambda).
- Class definitions.
The statements and expressions in each of the above constructs are
recursively compiled into bytecode instructions. Initially, the actual
addresses of jump instruction targets are not known. Instead the jump
targets are just labels. At the end of the compilation of each
construct the labelled instructions are converted into jumps to
over the bytecode stream).
We currently make no attempt to optimise the generated code.
language, and no explict control-flow graph.
---------------------------------------------------------------------------
The unix package is depended on by the directory package.
Compile the input from the REPL command line to an object.
pretend that the statements are a module on their own to calculate the variable scope
If the statement entered at the REPL is an expression, then
we try to print it out.
We transform an expression E into:
_ = E
print(_)
XXX if the result of E is None then we should not print it out,
to be consistent with CPython.
Want something like this:
try:
_ = E
catch Exception as e:
stackTrace e
elif _ is not None:
print(e)
Compile Python source code to bytecode, returing a representation
of a .pyc file contents.
Configuration options
The file path of the input Python source
modifiedTime <- getModificationTime path
canonicalPath <- canonicalizePath path
Parse the Python source from a File into an AST, check for any syntax errors.
initial compiler state
modification time
size in bytes
body of module, function and class
XXX we could avoid this 'return None' if all branches in the code
ended with a return statement. Can fix this in an optimisation step
with control flow analysis.
Build an object from all the state computed during compilation, such
as the bytecode sequence, variable information and so on.
XXX make a better error message
avoids re-doing the above two later when we store
avoids re-doing the above two later when we store
XXX assertions appear to be turned off if the code is compiled
for optimisation
it is always True: CPython warns about this
XXX need to handle from __future__
XXX this should be the level of nesting
XXX should check that we are inside a loop
keep blocking the frame block stack until we either find
a loop entry, otherwise generate an error
desugar with statements containing multiple contexts into nested
with statements containing single contexts
for the lambda
assert (length dottedNames > 0)
L1
pushes handler onto block stack
<code for S>
pops handler off block stack
L0
L0: <next statement>
Compile a sequence of exception handlers
Ln+1, # re-raise exception
pop the traceback (tb) off the stack
<code for S1, S2 ..>
pop handler off the block stack
pop exc off the stack
pop val off the stack
duplicate exc on stack
<evaluate E1>
pop True/False and if no match try next handler
pop exc off the stack
pop val off the stack
XXX we should del this name at the end.
The code for an exact match operator.
push each of the decorators on the stack
run the enclosed computation
call each of the decorators
save the current block state
set the new block state to initial values, and the
scope of the current definition
set the new object name
run the nested computation
restore the original block state
Compile a function definition, possibly with decorators.
Compile a class definition, possibly with decorators.
classes have a special argument called __locals__
it is the only argument they have in the byte code, but it
does not come from the source code, so we have to add it.
XXX CPython uses a "qualified" name for the code object. For instance
nested functions look like "f.<locals>.g", whereas we currently use
just "g".
The free variables in a code object will either be cell variables
or free variables in the enclosing object. If there are no free
variables then we can avoid building the closure, and just make the function.
get the list of free variables from the code object
we don't use emitReadVar because it would generate
LOAD_DEREF instructions, but we want LOAD_CLOSURE
instead.
Compile default parameters and return how many there are
Compile a 'from module import'.
XXX what to do about the initial dots?
compile multiple possible assignments:
x = y = z = rhs
we can assume that the parser has only accepted the appropriate
subset of expression types
manifest list or tuple, avoiding the building list/tuple
only to deconstruct again
Discard result from context.__enter__()
by the interpreter for the docstring. If there is no docstring
XXX what if another __doc__ is in scope?
Compile a conditional guard
a (possibly nested) for loop, then call the function.
Convert a constant expression into the equivalent object. This
only works for expressions which have a counterpart in the object
representation used in .pyc files.
XXX we could optimise the case where we have 'float + imaginary j',
the addition operation.
assumes all the tuple elements are constant
error $ show $ map normaliseString byte_string_strings
need to be removed, we have to remove single or triple quotes.
We assume the parser has correctly matched the quotes.
and need to be converted back into single characters.
Compile the arguments to a call and
numExtraArgs counts any additional arguments the function
might have been applied to, which is necessary for classes
which get extra arguments beyond the ones mentioned in the
program source.
Compile the arguments to a function call and return the number
of positional arguments, and the number of keyword arguments.
XXX need to handle extended slices, slice expressions and ellipsis
Not sure about this, maybe it is None
Return the opcode for a given assignment operator.
the right argument should be treated like name variable
the right argument should be treated like name variable
XXX we don't appear to have an exact match operator in the AST
Emit an instruction that returns the None contant.
Print out the variable scope of the module if requested on the command line.
Print out the AST of the module if requested on the command line.
Returns the bytestring representation of the compressed line number table
make sure all increments are non-negative
skipping any entries which are less than the predecessor | # LANGUAGE TypeFamilies , TypeSynonymInstances , FlexibleInstances ,
PatternGuards , RecordWildCards #
PatternGuards, RecordWildCards #-}
Copyright : ( c ) 2012 , 2013 , 2014
Portability : ghc
Compilation of Python 3 source code into bytecode .
1 ) Parse the source code into an AST .
2 ) Compute the scope of all variables in the module
( one pass over the AST ) .
3 ) Compile the AST for the whole module into a ( possibly nested )
code object ( one pass over the AST ) .
4 ) Write the code object to a .pyc file .
- .
actual addresses ( one pass over the bytecode stream ) .
Also the maximum stack size of each code object is computed ( one pass
Bytecode is generated directly from the AST , there is no intermediate
module Blip.Compiler.Compile
(compileFile, compileReplInput, writePycFile)
where
import Prelude hiding (mapM)
import Blip.Compiler.Desugar (desugarComprehension, desugarWith, resultName)
import Blip.Compiler.Utils
( isPureExpr, isPyObjectExpr, mkAssignVar, mkList
, mkVar, mkMethodCall, mkStmtExpr, mkSet, mkDict, mkAssign
, mkSubscript, mkReturn, mkYield, spanToScopeIdentifier )
import Blip.Compiler.StackDepth (maxStackDepth)
import Blip.Compiler.State
( setBlockState, getBlockState, initBlockState, initState
, emitCodeNoArg, emitCodeArg, compileConstantEmit
, compileConstant, getFileName, newLabel, labelNextInstruction
, getObjectName, setObjectName
, getNestedScope, ifDump, getLocalScope
, indexedVarSetKeys, emitReadVar, emitWriteVar, emitDeleteVar
, lookupNameVar, lookupClosureVar, setFlag
, peekFrameBlock, withFrameBlock, setFastLocals, setArgCount
, setLineNumber, setFirstLineNumber )
import Blip.Compiler.Assemble (assemble)
import Blip.Compiler.Monad (Compile (..), runCompileMonad)
import Blip.Compiler.Types
( Identifier, CompileConfig (..)
, CompileState (..), BlockState (..)
, AnnotatedCode (..), Dumpable (..), IndexedVarSet, VarInfo (..)
, FrameBlockInfo (..), Context (..), ParameterTypes (..), LocalScope (..) )
import Blip.Compiler.Scope (topScope, renderScope)
import Blip.Marshal as Blip
( writePyc, PycFile (..), PyObject (..), co_generator )
import Blip.Bytecode (Opcode (..), encode)
import Language.Python.Version3.Parser (parseModule, parseStmt)
import Language.Python.Common.AST as AST
( Annotated (..), ModuleSpan, Module (..), StatementSpan, Statement (..)
, ExprSpan, Expr (..), Ident (..), ArgumentSpan, Argument (..)
, OpSpan, Op (..), Handler (..), HandlerSpan, ExceptClause (..)
, ExceptClauseSpan, ImportItem (..), ImportItemSpan, ImportRelative (..)
, ImportRelativeSpan, FromItems (..), FromItemsSpan, FromItem (..)
, FromItemSpan, DecoratorSpan, Decorator (..), ComprehensionSpan
, Comprehension (..), SliceSpan, Slice (..), AssignOpSpan, AssignOp (..)
, ComprehensionExpr (..), ComprehensionExprSpan
, ParameterSpan, Parameter (..), RaiseExpr (..), RaiseExprSpan
, DictKeyDatumList(DictMappingPair), YieldArg (..), YieldArgSpan )
import Language.Python.Common (prettyText)
import Language.Python.Common.StringEscape (unescapeString)
import Language.Python.Common.SrcLocation (SrcSpan (..))
import System.FilePath ((<.>), takeBaseName)
XXX Commented out to avoid bug in unix package when building on OS X ,
import System . Directory ( getModificationTime , canonicalizePath )
import System . Time ( ClockTime ( .. ) )
import System.IO (openFile, IOMode(..), hClose, hFileSize, hGetContents)
import Data.Word (Word32, Word16)
import Data.Int (Int32)
import Data.Traversable as Traversable (mapM)
import qualified Data.ByteString.Lazy as B (pack)
import Data.String (fromString)
import Data.List (intersperse)
import Control.Monad (unless, forM_, when, replicateM_, foldM)
import Control.Monad.Trans (liftIO)
import Data.Bits ((.|.), shiftL)
compileReplInput :: CompileConfig -> String -> IO PyObject
compileReplInput config replString = do
stmts <- parseStmtAndCheckErrors replString
let printWrapped = wrapWithPrint stmts
(moduleLocals, nestedScope) <- topScope $ Module printWrapped
let state = initState ModuleContext moduleLocals
nestedScope config ""
compileReplStmts state printWrapped
Support for REPL printing of expressions .
wrapWithPrint :: [StatementSpan] -> [StatementSpan]
wrapWithPrint [StmtExpr {..}] =
[assignStmt, printStmt]
where
assignStmt = Assign { assign_to = [underscoreVar], assign_expr = stmt_expr, stmt_annot = SpanEmpty }
underscoreIdent = Ident { ident_string = "_", ident_annot = SpanEmpty }
underscoreVar = Var { var_ident = underscoreIdent, expr_annot = SpanEmpty }
printIdent = Ident { ident_string = "print", ident_annot = SpanEmpty }
printVar = Var { var_ident = printIdent, expr_annot = SpanEmpty }
printArg = ArgExpr { arg_expr = underscoreVar, arg_annot = SpanEmpty }
printExpr = Call { call_fun = printVar, call_args = [printArg], expr_annot = SpanEmpty }
printStmt = StmtExpr { stmt_expr = printExpr, stmt_annot = SpanEmpty }
wrapWithPrint other = other
-> IO PycFile
compileFile config path = do
pyHandle <- openFile path ReadMode
sizeInBytes <- hFileSize pyHandle
fileContents <- hGetContents pyHandle
let modSeconds = case modifiedTime of secs _ picoSecs - > secs
let modSeconds = (0 :: Integer)
pyModule <- parseFileAndCheckErrors fileContents path
(moduleLocals, nestedScope) <- topScope pyModule
canonicalPath <- return path
let state = initState ModuleContext moduleLocals
nestedScope config canonicalPath
pyc <- compileModule state (fromIntegral modSeconds)
(fromIntegral sizeInBytes) pyModule
return pyc
writePycFile :: PycFile -> FilePath -> IO ()
writePycFile pyc path = do
let pycFilePath = takeBaseName path <.> ".pyc"
pycHandle <- openFile pycFilePath WriteMode
writePyc pycHandle pyc
hClose pycHandle
Parse the Python source from a statement into an AST , check for any syntax errors .
parseStmtAndCheckErrors :: String -> IO [StatementSpan]
parseStmtAndCheckErrors stmtString =
case parseStmt stmtString "<stdin>" of
Left e -> error $ "parse error: " ++ prettyText e
Right (stmts, _comments) -> return stmts
parseFileAndCheckErrors :: String -> FilePath -> IO ModuleSpan
parseFileAndCheckErrors fileContents sourceName =
case parseModule fileContents sourceName of
Left e -> error $ "parse error: " ++ prettyText e
Right (pyModule, _comments) -> return pyModule
AST
-> IO PycFile
compileModule state pyFileModifiedTime pyFileSizeBytes mod = do
obj <- compiler mod state
return $ PycFile
{ magic = compileConfig_magic $ state_config state
, modified_time = pyFileModifiedTime
, size = pyFileSizeBytes
, object = obj }
compileReplStmts :: CompileState -> [StatementSpan] -> IO PyObject
compileReplStmts state replStatements =
compiler (Body replStatements) state
compiler :: Compilable a => a -> CompileState -> IO (CompileResult a)
compiler = runCompileMonad . compile
class Compilable a where
type CompileResult a :: *
compile :: a -> Compile (CompileResult a)
instance Compilable a => Compilable [a] where
type CompileResult [a] = [CompileResult a]
compile = mapM compile
instance Compilable ModuleSpan where
type CompileResult ModuleSpan = PyObject
compile ast@(Module stmts) = do
maybeDumpScope
maybeDumpAST ast
setObjectName "<module>"
compileClassModuleDocString stmts
compile $ Body stmts
newtype Body = Body [StatementSpan]
instance Compilable Body where
type CompileResult Body = PyObject
compile (Body stmts) = do
mapM_ compile stmts
returnNone
assemble
makeObject
argcount is the number of arguments , not counting * or * * kwargs .
makeObject :: Compile PyObject
makeObject = do
annotatedCode <- getBlockState state_instructions
let stackDepth = maxStackDepth annotatedCode
names <- getBlockState state_names
constants <- getBlockState state_constants
freeVars <- getBlockState state_freeVars
cellVars <- getBlockState state_cellVars
argcount <- getBlockState state_argcount
flags <- getBlockState state_flags
fastLocals <- getBlockState state_fastLocals
firstLineNumber <- getBlockState state_firstLineNumber
lineNumberTable <- compileLineNumberTable firstLineNumber
let code = map annotatedCode_bytecode annotatedCode
localVarNames = map Unicode $ indexedVarSetKeys fastLocals
maxStackDepth = maxBound
if stackDepth > maxStackDepth
then error $ "Maximum stack depth " ++ show maxStackDepth ++
" exceeded: " ++ show stackDepth
else do
pyFileName <- getFileName
objectName <- getObjectName
let obj = Code
{ argcount = argcount
, kwonlyargcount = 0
, nlocals = fromIntegral $ length localVarNames
, stacksize = stackDepth
, flags = flags
, code = String $ encode code
, consts = makeConstants constants
, names = makeNames names
, varnames = Blip.Tuple localVarNames
, freevars = makeVarSetTuple freeVars
, cellvars = makeVarSetTuple cellVars
, filename = Unicode pyFileName
, name = Unicode objectName
, firstlineno = firstLineNumber
, lnotab = lineNumberTable
}
return obj
where
makeVarSetTuple :: IndexedVarSet -> PyObject
makeVarSetTuple varSet =
Blip.Tuple $ map Unicode $ indexedVarSetKeys varSet
makeConstants :: [PyObject] -> PyObject
makeConstants = Blip.Tuple . reverse
makeNames :: [Identifier] -> PyObject
makeNames = Blip.Tuple . map Unicode . reverse
instance Compilable StatementSpan where
type CompileResult StatementSpan = ()
compile stmt =
setLineNumber (annot stmt) >>
compileStmt stmt
compileStmt :: StatementSpan -> Compile ()
compileStmt (Assign {..}) = do
compile assign_expr
compileAssignments assign_to
compileStmt (AugmentedAssign {..}) =
case aug_assign_to of
Var {..} -> do
let varIdent = ident_string var_ident
emitReadVar varIdent
compile aug_assign_expr
compile aug_assign_op
emitWriteVar varIdent
Subscript {..} -> do
compile subscriptee
compile subscript_expr
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
SlicedExpr {..} -> do
compile slicee
compileSlices slices
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
expr@(Dot {..}) -> do
compile dot_expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ dot_attribute
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) - > do
compile $ left_op_arg expr
emitCodeNoArg DUP_TOP
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) -> do
compile $ left_op_arg expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
-}
other -> error $ "unexpected expression in augmented assignment: " ++ prettyText other
compileStmt (Return { return_expr = Nothing }) = returnNone
compileStmt (Return { return_expr = Just expr }) =
compile expr >> emitCodeNoArg RETURN_VALUE
compileStmt (Pass {}) = return ()
compileStmt (StmtExpr {..}) =
unless (isPureExpr stmt_expr) $
compile stmt_expr >> emitCodeNoArg POP_TOP
compileStmt (Conditional {..}) = do
restLabel <- newLabel
mapM_ (compileGuard restLabel) cond_guards
mapM_ compile cond_else
labelNextInstruction restLabel
compileStmt (While {..}) = do
startLoop <- newLabel
endLoop <- newLabel
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
withFrameBlock (FrameBlockLoop startLoop) $ do
labelNextInstruction startLoop
compile while_cond
emitCodeArg POP_JUMP_IF_FALSE anchor
mapM_ compile while_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile while_else
labelNextInstruction endLoop
compileStmt (For {..}) = do
startLoop <- newLabel
endLoop <- newLabel
withFrameBlock (FrameBlockLoop startLoop) $ do
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
compile for_generator
emitCodeNoArg GET_ITER
labelNextInstruction startLoop
emitCodeArg FOR_ITER anchor
let num_targets = length for_targets
when (num_targets > 1) $ do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral num_targets
mapM_ compileAssignTo for_targets
mapM_ compile for_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile for_else
labelNextInstruction endLoop
compileStmt stmt@(Fun {..}) = compileFun stmt []
compileStmt stmt@(Class {..}) = compileClass stmt []
If the assertion expression is a tuple of non - zero length , then
compileStmt (Assert {..}) = do
case assert_exprs of
test_expr:restAssertExprs -> do
compile test_expr
end <- newLabel
emitCodeArg POP_JUMP_IF_TRUE end
assertionErrorVar <- lookupNameVar "AssertionError"
emitCodeArg LOAD_GLOBAL assertionErrorVar
case restAssertExprs of
assertMsgExpr:_ -> do
compile assertMsgExpr
emitCodeArg CALL_FUNCTION 1
_other -> return ()
emitCodeArg RAISE_VARARGS 1
labelNextInstruction end
_other -> error "assert with no test"
compileStmt stmt@(Try {..}) = compileTry stmt
compileStmt (Import {..}) = mapM_ compile import_items
compileStmt (FromImport {..}) = do
compileConstantEmit $ Blip.Int level
let names = fromItemsIdentifiers from_items
namesTuple = Blip.Tuple $ map Unicode names
compileConstantEmit namesTuple
compileFromModule from_module
case from_items of
ImportEverything {} -> do
emitCodeNoArg IMPORT_STAR
FromItems {..} -> do
forM_ from_items_items $ \FromItem {..} -> do
index <- lookupNameVar $ ident_string from_item_name
emitCodeArg IMPORT_FROM index
let storeName = case from_as_name of
Nothing -> from_item_name
Just asName -> asName
emitWriteVar $ ident_string storeName
emitCodeNoArg POP_TOP
compileStmt (Break {}) = emitCodeNoArg BREAK_LOOP
compileStmt (Continue {}) = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just (FrameBlockLoop label) -> emitCodeArg JUMP_ABSOLUTE label
Just FrameBlockFinallyEnd ->
error finallyError
Just _other -> checkFrameBlocks
where
checkFrameBlocks :: Compile ()
checkFrameBlocks = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just FrameBlockFinallyEnd -> error finallyError
Just (FrameBlockLoop label) ->
emitCodeArg CONTINUE_LOOP label
Just _other -> checkFrameBlocks
loopError = "'continue' not properly in loop"
finallyError = "'continue' not supported inside 'finally' clause"
compileStmt (NonLocal {}) = return ()
compileStmt (Global {}) = return ()
compileStmt (Decorated {..}) =
case decorated_def of
Fun {} -> compileFun decorated_def decorated_decorators
Class {} -> compileClass decorated_def decorated_decorators
other -> error $ "Decorated statement is not a function or a class: " ++ prettyText other
compileStmt (Delete {..}) = mapM_ compileDelete del_exprs
compileStmt stmt@(With {..})
| length with_context > 1 = compileWith $ desugarWith stmt
| otherwise = compileWith stmt
compileStmt (Raise {..}) = compile raise_expr
compileStmt other = error $ "Unsupported statement:\n" ++ prettyText other
instance Compilable ExprSpan where
type CompileResult ExprSpan = ()
compile expr =
setLineNumber (annot expr) >>
compileExpr expr
compileExpr :: ExprSpan -> Compile ()
compileExpr (Var { var_ident = ident }) = do
emitReadVar $ ident_string ident
compileExpr expr@(AST.Strings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.ByteStrings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Int {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Float {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Imaginary {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Bool {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.None {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Ellipsis {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr (AST.Paren {..}) = compile paren_expr
compileExpr (AST.CondExpr {..}) = do
compile ce_condition
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
compile ce_true_branch
restLabel <- newLabel
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
compile ce_false_branch
labelNextInstruction restLabel
compileExpr expr@(AST.Tuple {..})
| isPyObjectExpr expr =
compileConstantEmit $ constantToPyObject expr
| otherwise = do
mapM_ compile tuple_exprs
emitCodeArg BUILD_TUPLE $ fromIntegral $ length tuple_exprs
compileExpr (AST.List {..}) = do
mapM_ compile list_exprs
emitCodeArg BUILD_LIST $ fromIntegral $ length list_exprs
compileExpr (AST.Set {..}) = do
mapM_ compile set_exprs
emitCodeArg BUILD_SET $ fromIntegral $ length set_exprs
compileExpr (Dictionary {..}) = do
emitCodeArg BUILD_MAP $ fromIntegral $ length dict_mappings
forM_ dict_mappings $ \(DictMappingPair key value) -> do
compile value
compile key
emitCodeNoArg STORE_MAP
compileExpr (ListComp {..}) = do
let initStmt = [mkAssignVar resultName (mkList [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "append" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<listcomp>" initStmt updater returnStmt list_comprehension
compileExpr (SetComp {..}) = do
let initStmt = [mkAssignVar resultName (mkSet [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "add" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<setcomp>" initStmt updater returnStmt set_comprehension
compileExpr (DictComp {..}) = do
let initStmt = [mkAssignVar resultName (mkDict [])]
updater = \(ComprehensionDict (DictMappingPair key val)) ->
mkAssign (mkSubscript (mkVar $ resultName) key) val
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<dictcomp>" initStmt updater returnStmt dict_comprehension
compileExpr (Generator {..}) = do
let updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkYield expr
compileComprehension "<gencomp>" [] updater [] gen_comprehension
compileExpr (Yield { yield_arg = Nothing }) =
compileConstantEmit Blip.None >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr (Yield { yield_arg = Just (YieldExpr expr) }) =
compile expr >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr e@(Yield { yield_arg = Just (YieldFrom expr _) })
= error $ "yield from not supported: " ++ show e
compileExpr (Call {..}) = do
compile call_fun
compileCall 0 call_args
compileExpr (Subscript {..}) = do
compile subscriptee
compile subscript_expr
emitCodeNoArg BINARY_SUBSCR
compileExpr (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg BINARY_SUBSCR
compileExpr (Dot {..}) = do
compile dot_expr
varInfo <- lookupNameVar $ ident_string dot_attribute
emitCodeArg LOAD_ATTR varInfo
compileExpr exp@(BinaryOp {..})
| isBoolean operator = compileBoolOpExpr exp
| isComparison operator = compileCompareOpExpr exp
| otherwise = do
compile left_op_arg
compile right_op_arg
compileOp operator
compileExpr (UnaryOp {..}) = do
compile op_arg
compileUnaryOp operator
compileExpr (Lambda {..}) = do
funBodyObj <- nestedBlock FunctionContext expr_annot $ do
make the first constant None , to indicate no doc string
_ <- compileConstant Blip.None
compile lambda_body
emitCodeNoArg RETURN_VALUE
assemble
makeObject
numDefaults <- compileDefaultParams lambda_args
compileClosure "<lambda>" funBodyObj numDefaults
compileExpr other = error $ "Unsupported expr:\n" ++ prettyText other
instance Compilable AssignOpSpan where
type CompileResult AssignOpSpan = ()
compile = emitCodeNoArg . assignOpCode
instance Compilable DecoratorSpan where
type CompileResult DecoratorSpan = ()
compile dec@(Decorator {..}) = do
compileDottedName decorator_name
let numDecorators = length decorator_args
when (numDecorators > 0) $
compileCall 0 decorator_args
where
compileDottedName (name:rest) = do
emitReadVar $ ident_string name
forM_ rest $ \var -> do
index <- lookupNameVar $ ident_string var
emitCodeArg LOAD_ATTR index
compileDottedName [] =
error $ "decorator with no name: " ++ prettyText dec
instance Compilable ArgumentSpan where
type CompileResult ArgumentSpan = ()
compile (ArgExpr {..}) = compile arg_expr
compile other = error $ "Unsupported argument:\n" ++ prettyText other
instance Compilable ImportItemSpan where
type CompileResult ImportItemSpan = ()
compile (ImportItem {..}) = do
this always seems to be zero
compileConstantEmit Blip.None
let dottedNames = map ident_string import_item_name
let dottedNameStr =
concat $ intersperse "." dottedNames
index <- lookupNameVar dottedNameStr
emitCodeArg IMPORT_NAME index
storeName <-
case import_as_name of
Nothing -> return $ head import_item_name
Just asName -> do
forM_ (tail dottedNames) $ \attribute -> do
index <- lookupNameVar attribute
emitCodeArg LOAD_ATTR index
return asName
emitWriteVar $ ident_string storeName
instance Compilable RaiseExprSpan where
type CompileResult RaiseExprSpan = ()
compile (RaiseV3 maybeRaiseArg) = do
n <- case maybeRaiseArg of
Nothing -> return 0
Just (raiseExpr, maybeFrom) -> do
compile raiseExpr
case maybeFrom of
Nothing -> return 1
Just fromExpr -> do
compile fromExpr
return 2
emitCodeArg RAISE_VARARGS n
compile stmt@(RaiseV2 _) =
error $ "Python version 2 raise statement encountered: " ++ prettyText stmt
From CPython compile.c
Code generated for " try : S except E1 as V1 : S1 except E2 as V2 : S2 ... " :
( The contents of the value stack is shown in [ ] , with the top
at the right ; ' tb ' is trace - back info , ' val ' the exception 's
associated value , and ' exc ' the exception . )
Value stack Label Instruction Argument
[ ] SETUP_EXCEPT L1
[ ] < code for S >
[ ] POP_BLOCK
[ ] JUMP_FORWARD L0
[ tb , , exc ] L1 : DUP )
[ tb , , exc , exc ] < evaluate E1 > )
[ tb , , exc , exc , E1 ] COMPARE_OP EXC_MATCH ) only if E1
[ tb , , exc , 1 - or-0 ] POP_JUMP_IF_FALSE L2 )
[ tb , , exc ] POP
[ tb , ] < assign to V1 > ( or POP if no V1 )
[ tb ] POP
[ ] < code for S1 >
POP_EXCEPT
JUMP_FORWARD L0
[ tb , , exc ] L2 : DUP
............................. etc .......................
[ tb , , exc ] Ln+1 : END_FINALLY # re - raise exception
[ ] L0 : < next statement >
Of course , parts are not generated if or is not present .
From CPython compile.c
Code generated for "try: S except E1 as V1: S1 except E2 as V2: S2 ...":
(The contents of the value stack is shown in [], with the top
at the right; 'tb' is trace-back info, 'val' the exception's
associated value, and 'exc' the exception.)
Value stack Label Instruction Argument
[] SETUP_EXCEPT L1
[] <code for S>
[] POP_BLOCK
[] JUMP_FORWARD L0
[tb, val, exc] L1: DUP )
[tb, val, exc, exc] <evaluate E1> )
[tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
[tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
[tb, val, exc] POP
[tb, val] <assign to V1> (or POP if no V1)
[tb] POP
[] <code for S1>
POP_EXCEPT
JUMP_FORWARD L0
[tb, val, exc] L2: DUP
.............................etc.......................
[tb, val, exc] Ln+1: END_FINALLY # re-raise exception
[] L0: <next statement>
Of course, parts are not generated if Vi or Ei is not present.
-}
compileTry :: StatementSpan -> Compile ()
compileTry stmt@(Try {..})
| length try_finally == 0 = compileTryExcept stmt
| otherwise = compileTryFinally stmt
compileTry other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryFinally :: StatementSpan -> Compile ()
compileTryFinally stmt@(Try {..}) = do
end <- newLabel
emitCodeArg SETUP_FINALLY end
body <- newLabel
labelNextInstruction body
withFrameBlock FrameBlockFinallyTry $ do
if length try_excepts > 0
then compileTryExcept stmt
else mapM_ compile try_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction end
withFrameBlock FrameBlockFinallyEnd $ do
mapM_ compile try_finally
emitCodeNoArg END_FINALLY
compileTryFinally other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryExcept :: StatementSpan -> Compile ()
compileTryExcept (Try {..}) = do
withFrameBlock FrameBlockExcept $ do
orElse <- newLabel
emitCodeArg JUMP_FORWARD orElse
compileHandlers end firstHandler try_excepts
labelNextInstruction orElse
mapM_ compile try_else
compileTryExcept other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileHandlers :: Word16 -> Word16 -> [HandlerSpan] -> Compile ()
compileHandlers _end handlerLabel [] = do
emitCodeNoArg END_FINALLY
compileHandlers end handlerLabel (Handler {..} : rest) = do
labelNextInstruction handlerLabel
nextLabel <- newLabel
compileHandlerClause nextLabel handler_clause
withFrameBlock FrameBlockFinallyTry $ do
emitCodeArg JUMP_FORWARD end
compileHandlers end nextLabel rest
enter here with stack = = ( s + + [ tb , , exc ] ) , leave with stack = = s
compileHandlerClause :: Word16 -> ExceptClauseSpan -> Compile ()
compileHandlerClause nextHandler (ExceptClause {..}) = do
case except_clause of
Nothing -> do
Just (target, asExpr) -> do
compare E1 to exc
case asExpr of
assign the exception to the as name , will remove from stack
where
exactMatchOp :: Word16
exactMatchOp = 10
withDecorators :: [DecoratorSpan] -> Compile () -> Compile ()
withDecorators decorators comp = do
mapM_ compile decorators
comp
replicateM_ (length decorators) $
emitCodeArg CALL_FUNCTION 1
nestedBlock :: Context -> SrcSpan -> Compile a -> Compile a
nestedBlock context span comp = do
oldBlockState <- getBlockState id
(name, localScope) <- getLocalScope $ spanToScopeIdentifier span
setBlockState $ initBlockState context localScope
setObjectName name
set the first line number of the block
setFirstLineNumber span
result <- comp
setBlockState oldBlockState
return result
compileFun :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileFun (Fun {..}) decorators = do
let funName = ident_string $ fun_name
withDecorators decorators $ do
funBodyObj <- nestedBlock FunctionContext stmt_annot $ do
compileFunDocString fun_body
compile $ Body fun_body
numDefaults <- compileDefaultParams fun_args
compileClosure funName funBodyObj numDefaults
emitWriteVar funName
compileFun other _decorators = error $ "compileFun applied to a non function: " ++ prettyText other
compileClass :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileClass (Class {..}) decorators = do
let className = ident_string $ class_name
withDecorators decorators $ do
classBodyObj <- nestedBlock ClassContext stmt_annot $ do
setFastLocals ["__locals__"]
setArgCount 1
emitCodeArg LOAD_FAST 0
emitCodeNoArg STORE_LOCALS
emitReadVar "__name__"
emitWriteVar "__module__"
compileConstantEmit $ Unicode className
emitWriteVar "__qualname__"
compileClassModuleDocString class_body
compile $ Body class_body
emitCodeNoArg LOAD_BUILD_CLASS
compileClosure className classBodyObj 0
compileConstantEmit $ Unicode className
compileCall 2 class_args
emitWriteVar className
compileClass other _decorators = error $ "compileClass applied to a non class: " ++ prettyText other
compileClosure :: String -> PyObject -> Word16 -> Compile ()
compileClosure name obj numDefaults = do
let Blip.Tuple freeVarStringObjs = freevars obj
freeVarIdentifiers = map unicode freeVarStringObjs
numFreeVars = length freeVarIdentifiers
if numFreeVars == 0
then do
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_FUNCTION numDefaults
else do
forM_ freeVarIdentifiers $ \var -> do
maybeVarInfo <- lookupClosureVar var
case maybeVarInfo of
Just (CellVar index) -> emitCodeArg LOAD_CLOSURE index
Just (FreeVar index) -> emitCodeArg LOAD_CLOSURE index
_other -> error $ name ++ " closure free variable not cell or free var in outer context: " ++ var
emitCodeArg BUILD_TUPLE $ fromIntegral numFreeVars
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_CLOSURE numDefaults
compileDefaultParams :: [ParameterSpan] -> Compile Word16
compileDefaultParams = foldM compileParam 0
where
compileParam :: Word16 -> ParameterSpan -> Compile Word16
compileParam count (Param {..}) = do
case param_default of
Nothing -> return count
Just expr -> do
compile expr
return $ count + 1
compileParam count _other = return count
compileFromModule :: ImportRelativeSpan -> Compile ()
compileFromModule (ImportRelative {..}) = do
let moduleName =
case import_relative_module of
Nothing -> ""
Just dottedNames ->
concat $ intersperse "." $ map ident_string dottedNames
index <- lookupNameVar moduleName
emitCodeArg IMPORT_NAME index
fromItemsIdentifiers :: FromItemsSpan -> [Identifier]
fromItemsIdentifiers (ImportEverything {}) = ["*"]
fromItemsIdentifiers (FromItems {..}) =
map fromItemIdentifier from_items_items
where
fromItemIdentifier :: FromItemSpan -> Identifier
fromItemIdentifier (FromItem {..}) = ident_string $ from_item_name
compileAssignments :: [ExprSpan] -> Compile ()
compileAssignments [] = return ()
compileAssignments [e] = compileAssignTo e
compileAssignments (e1:e2:rest) = do
emitCodeNoArg DUP_TOP
compileAssignTo e1
compileAssignments (e2:rest)
the lhs of an assignment statement
compileAssignTo :: ExprSpan -> Compile ()
compileAssignTo (Var {..}) =
emitWriteVar $ ident_string var_ident
compileAssignTo (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg STORE_SUBSCR
XXX this can be optimised in places where the rhs is a
compileAssignTo (AST.Tuple {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length tuple_exprs
mapM_ compileAssignTo tuple_exprs
compileAssignTo (AST.List {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length list_exprs
mapM_ compileAssignTo list_exprs
compileAssignTo (AST.Paren {..}) = compileAssignTo paren_expr
compileAssignTo expr@(Dot {..} ) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg STORE_ATTR index
compileAssignTo expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) = do
compile $ left_op_arg expr
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg STORE_ATTR index
compileAssignTo expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg STORE_ATTR index
-}
compileAssignTo (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg STORE_SUBSCR
compileAssignTo other = error $ "assignment to unexpected expression:\n" ++ prettyText other
compileDelete :: ExprSpan -> Compile ()
compileDelete (Var {..}) = do
emitDeleteVar $ ident_string var_ident
compileDelete (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg DELETE_SUBSCR
compileDelete (AST.Paren {..}) = compileDelete paren_expr
compileDelete (Dot {..}) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg DELETE_ATTR index
compileDelete ( expr@(BinaryOp { operator = Dot { } , right_op_arg = Var { .. } } ) ) = do
compile $ left_op_arg expr
index < - lookupNameVar $ ident_string $ var_ident
emitCodeArg index
compileDelete (expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}})) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg DELETE_ATTR index
-}
compileDelete (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg DELETE_SUBSCR
compileDelete other = error $ "delete of unexpected expression:\n" ++ prettyText other
compileWith :: StatementSpan -> Compile ()
compileWith stmt@(With {..}) =
case with_context of
[(context, maybeAs)] -> do
blockLabel <- newLabel
finallyLabel <- newLabel
compile context
emitCodeArg SETUP_WITH finallyLabel
labelNextInstruction blockLabel
withFrameBlock FrameBlockFinallyTry $ do
case maybeAs of
Nothing -> emitCodeNoArg POP_TOP
Just expr -> compileAssignTo expr
mapM_ compile with_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction finallyLabel
withFrameBlock FrameBlockFinallyEnd $ do
emitCodeNoArg WITH_CLEANUP
emitCodeNoArg END_FINALLY
_other -> error $ "compileWith applied to non desugared with statement: " ++ prettyText stmt
compileWith other = error $ "compileWith applied to non with statement: " ++ prettyText other
Check for a docstring in the first statement of a function body .
The first constant in the corresponding code object is inspected
then the first constant must be None
compileFunDocString :: [StatementSpan] -> Compile ()
compileFunDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
= compileConstant (constantToPyObject stmt_expr) >> return ()
| otherwise = compileConstant Blip.None >> return ()
compileFunDocString [] = compileConstant Blip.None >> return ()
compileClassModuleDocString :: [StatementSpan] -> Compile ()
compileClassModuleDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
= do compileConstantEmit $ constantToPyObject stmt_expr
emitWriteVar "__doc__"
| otherwise = return ()
compileClassModuleDocString [] = return ()
compileGuard :: Word16 -> (ExprSpan, [StatementSpan]) -> Compile ()
compileGuard restLabel (expr, stmts) = do
compile expr
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
mapM_ compile stmts
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
the comprehension into a zero - arity function ( body ) with
compileComprehension
:: Identifier
-> [StatementSpan]
-> (ComprehensionExprSpan -> StatementSpan)
-> [StatementSpan]
-> ComprehensionSpan
-> Compile ()
compileComprehension name initStmt updater returnStmt comprehension = do
let desugaredComp = desugarComprehension initStmt updater returnStmt comprehension
comprehensionSpan = comprehension_annot comprehension
funObj <- nestedBlock
FunctionContext
comprehensionSpan
(compile $ Body desugaredComp)
compileClosure name funObj 0
(_name, localScope) <- getLocalScope $ spanToScopeIdentifier comprehensionSpan
let parameterNames = parameterTypes_pos $ localScope_params localScope
mapM_ emitReadVar parameterNames
emitCodeArg CALL_FUNCTION $ fromIntegral $ length parameterNames
constantToPyObject :: ExprSpan -> PyObject
constantToPyObject (AST.Int {..})
| int_value > (fromIntegral max32BitSignedInt) ||
int_value < (fromIntegral min32BitSignedInt)
= Blip.Long int_value
| otherwise = Blip.Int $ fromIntegral int_value
where
max32BitSignedInt :: Int32
max32BitSignedInt = maxBound
min32BitSignedInt :: Int32
min32BitSignedInt = minBound
constantToPyObject (AST.Float {..}) = Blip.Float $ float_value
to generate a Complex number directly , rather than by doing
constantToPyObject (AST.Imaginary {..}) =
Blip.Complex { real = 0.0, imaginary = imaginary_value }
constantToPyObject (AST.Bool { bool_value = True }) = Blip.TrueObj
constantToPyObject (AST.Bool { bool_value = False }) = Blip.FalseObj
constantToPyObject (AST.None {}) = Blip.None
constantToPyObject (AST.Ellipsis {}) = Blip.Ellipsis
constantToPyObject (AST.Tuple {..}) =
Blip.Tuple { elements = map constantToPyObject tuple_exprs }
constantToPyObject (AST.Strings {..}) =
Blip.Unicode { unicode = concat $ map normaliseString strings_strings }
constantToPyObject (AST.ByteStrings {..}) =
Blip.String { string = fromString $ concat $ map normaliseString byte_string_strings }
constantToPyObject other =
error $ "constantToPyObject applied to an unexpected expression: " ++ prettyText other
The strings in the AST retain their original quote marks which
Escaped characters such as are parsed as multiple characters
normaliseString :: String -> String
normaliseString ('r':'b':rest) = removeQuotes rest
normaliseString ('b':'r':rest) = removeQuotes rest
normaliseString ('b':rest) = unescapeString $ removeQuotes rest
normaliseString ('r':rest) = removeQuotes rest
normaliseString other = unescapeString $ removeQuotes other
removeQuotes :: String -> String
removeQuotes ('\'':'\'':'\'':rest) = take (length rest - 3) rest
removeQuotes ('"':'"':'"':rest) = take (length rest - 3) rest
removeQuotes ('\'':rest) = init rest
removeQuotes ('"':rest) = init rest
removeQuotes other = error $ "bad literal string: " ++ other
data CallArgs =
CallArgs
{ callArgs_pos :: !Word16
, callArgs_keyword :: !Word16
, callArgs_varPos :: !Bool
, callArgs_varKeyword :: !Bool
}
initCallArgs :: CallArgs
initCallArgs =
CallArgs
{ callArgs_pos = 0
, callArgs_keyword = 0
, callArgs_varPos = False
, callArgs_varKeyword = False
}
decide which particular CALL_FUNCTION bytecode to emit .
compileCall :: Word16 -> [ArgumentSpan] -> Compile ()
compileCall numExtraArgs args = do
CallArgs {..} <- compileCallArgs args
let opArg = (callArgs_pos + numExtraArgs) .|. callArgs_keyword `shiftL` 8
case (callArgs_varPos, callArgs_varKeyword) of
(False, False) -> emitCodeArg CALL_FUNCTION opArg
(True, False) -> emitCodeArg CALL_FUNCTION_VAR opArg
(False, True) -> emitCodeArg CALL_FUNCTION_KW opArg
(True, True) -> emitCodeArg CALL_FUNCTION_VAR_KW opArg
compileCallArgs :: [ArgumentSpan] -> Compile CallArgs
compileCallArgs = foldM compileArg initCallArgs
where
compileArg :: CallArgs -> ArgumentSpan -> Compile CallArgs
compileArg callArgs@(CallArgs {..}) (ArgExpr {..}) = do
compile arg_expr
return $ callArgs { callArgs_pos = callArgs_pos + 1 }
compileArg callArgs@(CallArgs {..}) (ArgKeyword {..}) = do
compileConstantEmit $ Unicode $ ident_string arg_keyword
compile arg_expr
return $ callArgs { callArgs_keyword = callArgs_keyword + 1 }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsPos {..}) = do
compile arg_expr
return $ callArgs { callArgs_varPos = True }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsKeyword {..}) = do
compile arg_expr
return $ callArgs { callArgs_varKeyword = True }
compileSlices :: [SliceSpan] -> Compile ()
compileSlices [SliceProper {..}] = do
case slice_lower of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_upper of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_stride of
Nothing -> emitCodeArg BUILD_SLICE 2
Just Nothing -> emitCodeArg BUILD_SLICE 2
Just (Just expr) -> do
compile expr
emitCodeArg BUILD_SLICE 3
compileSlices other = error $ "unsupported slice: " ++ show other
assignOpCode :: AssignOpSpan -> Opcode
assignOpCode assign =
case assign of
PlusAssign {} -> INPLACE_ADD
MinusAssign {} -> INPLACE_SUBTRACT
MultAssign {} -> INPLACE_MULTIPLY
DivAssign {} -> INPLACE_TRUE_DIVIDE
ModAssign {} -> INPLACE_MODULO
PowAssign {} -> INPLACE_POWER
BinAndAssign {} -> INPLACE_AND
BinOrAssign {} -> INPLACE_OR
BinXorAssign {} -> INPLACE_XOR
LeftShiftAssign {} -> INPLACE_LSHIFT
RightShiftAssign {} -> INPLACE_RSHIFT
FloorDivAssign {} -> INPLACE_FLOOR_DIVIDE
isDot : : OpSpan - > Bool
isDot ( Dot { } ) = True
isDot _ other = False
isDot :: OpSpan -> Bool
isDot (Dot {}) = True
isDot _other = False
-}
isBoolean :: OpSpan -> Bool
isBoolean (And {}) = True
isBoolean (Or {}) = True
isBoolean _other = False
isComparison :: OpSpan -> Bool
isComparison (LessThan {}) = True
isComparison (GreaterThan {}) = True
isComparison (Equality {}) = True
isComparison (GreaterThanEquals {}) = True
isComparison (LessThanEquals {}) = True
isComparison (NotEquals {}) = True
isComparison (In {}) = True
isComparison (NotIn {}) = True
isComparison (IsNot {}) = True
isComparison (Is {}) = True
isComparison _other = False
compileDot : : ExprSpan - > Compile ( )
compileDot ( BinaryOp { .. } ) = do
compile left_op_arg
case right_op_arg of
{ .. } - > do
varInfo < - lookupNameVar $ ident_string var_ident
emitCodeArg LOAD_ATTR varInfo
other - > error $ " right argument of dot operator not a variable:\n " + + prettyText other
compileDot other =
error $ " compileDot applied to an unexpected expression : " + + prettyText other
compileDot :: ExprSpan -> Compile ()
compileDot (BinaryOp {..}) = do
compile left_op_arg
case right_op_arg of
Var {..} -> do
varInfo <- lookupNameVar $ ident_string var_ident
emitCodeArg LOAD_ATTR varInfo
other -> error $ "right argument of dot operator not a variable:\n" ++ prettyText other
compileDot other =
error $ "compileDot applied to an unexpected expression: " ++ prettyText other
-}
compileBoolOpExpr :: ExprSpan -> Compile ()
compileBoolOpExpr (BinaryOp {..}) = do
endLabel <- newLabel
compile left_op_arg
case operator of
And {..} -> emitCodeArg JUMP_IF_FALSE_OR_POP endLabel
Or {..} -> emitCodeArg JUMP_IF_TRUE_OR_POP endLabel
other -> error $ "Unexpected boolean operator:\n" ++ prettyText other
compile right_op_arg
labelNextInstruction endLabel
compileBoolOpExpr other =
error $ "compileBoolOpExpr applied to an unexpected expression: " ++ prettyText other
compileOp :: OpSpan -> Compile ()
compileOp operator =
emitCodeNoArg $ case operator of
BinaryOr {} -> BINARY_OR
Xor {} -> BINARY_XOR
BinaryAnd {} -> BINARY_AND
ShiftLeft {} -> BINARY_LSHIFT
ShiftRight {} -> BINARY_RSHIFT
Exponent {} -> BINARY_POWER
Multiply {} -> BINARY_MULTIPLY
Plus {} -> BINARY_ADD
Minus {} -> BINARY_SUBTRACT
Divide {} -> BINARY_TRUE_DIVIDE
FloorDivide {} -> BINARY_FLOOR_DIVIDE
Modulo {} -> BINARY_MODULO
_other -> error $ "Unexpected operator:\n" ++ prettyText operator
compileUnaryOp :: OpSpan -> Compile ()
compileUnaryOp operator =
emitCodeNoArg $ case operator of
Minus {} -> UNARY_NEGATIVE
Plus {} -> UNARY_POSITIVE
Not {} -> UNARY_NOT
Invert {} -> UNARY_INVERT
other -> error $ "Unexpected unary operator: " ++ prettyText other
from object.h
# define Py_LT 0
# define Py_LE 1
# define Py_EQ 2
# define Py_NE 3
# define Py_GT 4
# define Py_GE 5
and from opcode.h
enum cmp_op { PyCmp_LT = Py_LT , PyCmp_LE = Py_LE , PyCmp_EQ = , PyCmp_NE = Py_NE , PyCmp_GT = Py_GT , = Py_GE ,
PyCmp_IN , PyCmp_NOT_IN , PyCmp_IS , PyCmp_IS_NOT , PyCmp_EXC_MATCH , PyCmp_BAD } ;
from object.h
#define Py_LT 0
#define Py_LE 1
#define Py_EQ 2
#define Py_NE 3
#define Py_GT 4
#define Py_GE 5
and from opcode.h
enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE,
PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
-}
Operator chaining :
The parser treats comparison operators as left associative .
So : w < x < y < z is parsed as
( ( ( w < x ) < y ) < z )
We want to compile this to :
[ w ]
[ x ]
DUP_TOP # make a copy of the result of x
ROT_THREE # put the copy of [ x ] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[ y ]
DUP_TOP # make a copy of [ y ]
ROT_THREE # put the copy of [ y ] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[ z ]
<
JUMP_FORWARD end
cleanup :
ROT_TWO # put the result of the last comparison on the bottom
# and put the duplicated [ y ] on the top
POP_TOP # remove the duplicated [ y ] from the top
end :
# whatever code follows
The parser treats comparison operators as left associative.
So: w < x < y < z is parsed as
(((w < x) < y) < z)
We want to compile this to:
[w]
[x]
DUP_TOP # make a copy of the result of x
ROT_THREE # put the copy of [x] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[y]
DUP_TOP # make a copy of [y]
ROT_THREE # put the copy of [y] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[z]
<
JUMP_FORWARD end
cleanup:
ROT_TWO # put the result of the last comparison on the bottom
# and put the duplicated [y] on the top
POP_TOP # remove the duplicated [y] from the top
end:
# whatever code follows
-}
compileCompareOpExpr :: ExprSpan -> Compile ()
compileCompareOpExpr expr@(BinaryOp {}) =
compileChain numOps chain
where
chain :: [ChainItem]
chain = flattenComparisonChain [] expr
numOps :: Int
numOps = length chain `div` 2
compileChain :: Int -> [ChainItem] -> Compile ()
compileChain numOps (Comparator e1 : internal@(Operator op : Comparator e2 : _rest)) = do
compile e1
if numOps == 1
then do
compile e2
emitCodeArg COMPARE_OP $ comparisonOpCode op
else do
cleanup <- newLabel
(lastOp, lastArg) <- compileChainInternal cleanup internal
compile lastArg
emitCodeArg COMPARE_OP $ comparisonOpCode lastOp
end <- newLabel
emitCodeArg JUMP_FORWARD end
labelNextInstruction cleanup
emitCodeNoArg ROT_TWO
emitCodeNoArg POP_TOP
labelNextInstruction end
compileChain _numOps _items = error $ "bad operator chain: " ++ prettyText expr
compileChainInternal :: Word16 -> [ChainItem] -> Compile (OpSpan, ExprSpan)
compileChainInternal _cleanup [Operator op, Comparator exp] = return (op, exp)
compileChainInternal cleanup (Operator op : Comparator e : rest) = do
compile e
emitCodeNoArg DUP_TOP
emitCodeNoArg ROT_THREE
emitCodeArg COMPARE_OP $ comparisonOpCode op
emitCodeArg JUMP_IF_FALSE_OR_POP cleanup
compileChainInternal cleanup rest
compileChainInternal _cleanup _other = error $ "bad comparison chain: " ++ prettyText expr
comparisonOpCode :: OpSpan -> Word16
comparisonOpCode (LessThan {}) = 0
comparisonOpCode (LessThanEquals {}) = 1
comparisonOpCode (Equality {}) = 2
comparisonOpCode (NotEquals {}) = 3
comparisonOpCode (GreaterThan {}) = 4
comparisonOpCode (GreaterThanEquals {}) = 5
comparisonOpCode (In {}) = 6
comparisonOpCode (NotIn {}) = 7
comparisonOpCode (Is {}) = 8
comparisonOpCode (IsNot {}) = 9
comparisonOpCode operator = error $ "Unexpected comparison operator:\n" ++ prettyText operator
compileCompareOpExpr other = error $ "Unexpected comparison operator:\n" ++ prettyText other
data ChainItem = Comparator ExprSpan | Operator OpSpan
flattenComparisonChain :: [ChainItem] -> ExprSpan -> [ChainItem]
flattenComparisonChain acc opExpr@(BinaryOp {..})
| isComparison operator
= flattenComparisonChain newAcc left_op_arg
| otherwise = [Comparator opExpr] ++ acc
where
newAcc = [Operator operator, Comparator right_op_arg] ++ acc
flattenComparisonChain acc other = [Comparator other] ++ acc
returnNone :: Compile ()
returnNone = compileConstantEmit Blip.None >> emitCodeNoArg RETURN_VALUE
maybeDumpScope :: Compile ()
maybeDumpScope =
ifDump DumpScope $ do
nestedScope <- getNestedScope
liftIO $ putStrLn $ renderScope nestedScope
maybeDumpAST :: ModuleSpan -> Compile ()
maybeDumpAST ast = do
ifDump DumpAST $ do
liftIO $ putStrLn "Abstract Syntax Tree:"
liftIO $ putStrLn $ show ast
From Cpython : Objects / lnotab_notes.txt
Code objects store a field named co_lnotab . This is an array of unsigned bytes
disguised as a Python string . It is used to map bytecode offsets to source code
line # s for tracebacks and to identify line number boundaries for line tracing .
The array is conceptually a compressed list of
( bytecode offset increment , line number increment )
pairs . The details are important and delicate , best illustrated by example :
byte code offset source code line number
0 1
6 2
50 7
350 307
361 308
Instead of storing these numbers literally , we compress the list by storing only
the increments from one row to the next . Conceptually , the stored list might
look like :
0 , 1 , 6 , 1 , 44 , 5 , 300 , 300 , 11 , 1
The above does n't really work , but it 's a start . Note that an unsigned byte
ca n't hold negative values , or values larger than 255 , and the above example
contains two such values . So we make two tweaks :
( a ) there 's a deep assumption that byte code offsets and their corresponding
line # s both increase monotonically , and
( b ) if at least one column jumps by more than 255 from one row to the next ,
more than one pair is written to the table . In case # b , there 's no way to know
from looking at the table later how many were written . That 's the delicate
part . A user of co_lnotab desiring to find the source line number
corresponding to a bytecode address A should do something like this
lineno = addr = 0
for addr_incr , line_incr in co_lnotab :
addr + = addr_incr
if addr > A :
return lineno
lineno + = line_incr
( In C , this is implemented by ( ) . ) In order for this to work ,
when the addr field increments by more than 255 , the line # increment in each
pair generated must be 0 until the remaining addr increment is < 256 . So , in
the example above , assemble_lnotab in compile.c should not ( as was actually done
until 2.2 ) expand 300 , 300 to
255 , 255 , 45 , 45 ,
but to
255 , 0 , 45 , 255 , 0 , 45 .
From Cpython: Objects/lnotab_notes.txt
Code objects store a field named co_lnotab. This is an array of unsigned bytes
disguised as a Python string. It is used to map bytecode offsets to source code
line #s for tracebacks and to identify line number boundaries for line tracing.
The array is conceptually a compressed list of
(bytecode offset increment, line number increment)
pairs. The details are important and delicate, best illustrated by example:
byte code offset source code line number
0 1
6 2
50 7
350 307
361 308
Instead of storing these numbers literally, we compress the list by storing only
the increments from one row to the next. Conceptually, the stored list might
look like:
0, 1, 6, 1, 44, 5, 300, 300, 11, 1
The above doesn't really work, but it's a start. Note that an unsigned byte
can't hold negative values, or values larger than 255, and the above example
contains two such values. So we make two tweaks:
(a) there's a deep assumption that byte code offsets and their corresponding
line #s both increase monotonically, and
(b) if at least one column jumps by more than 255 from one row to the next,
more than one pair is written to the table. In case #b, there's no way to know
from looking at the table later how many were written. That's the delicate
part. A user of co_lnotab desiring to find the source line number
corresponding to a bytecode address A should do something like this
lineno = addr = 0
for addr_incr, line_incr in co_lnotab:
addr += addr_incr
if addr > A:
return lineno
lineno += line_incr
(In C, this is implemented by PyCode_Addr2Line().) In order for this to work,
when the addr field increments by more than 255, the line # increment in each
pair generated must be 0 until the remaining addr increment is < 256. So, in
the example above, assemble_lnotab in compile.c should not (as was actually done
until 2.2) expand 300, 300 to
255, 255, 45, 45,
but to
255, 0, 45, 255, 0, 45.
-}
compileLineNumberTable :: Word32 -> Compile PyObject
compileLineNumberTable firstLineNumber = do
offsetToLine <- reverse `fmap` getBlockState state_lineNumberTable
let compressedTable = compress (0, firstLineNumber) offsetToLine
bs = B.pack $ concat
[ [fromIntegral offset, fromIntegral line] |
(offset, line) <- compressedTable ]
return Blip.String { string = bs }
where
compress :: (Word16, Word32) -> [(Word16, Word32)] -> [(Word16, Word32)]
compress _prev [] = []
compress (prevOffset, prevLine) (next@(nextOffset, nextLine):rest)
| nextLine < prevLine || nextOffset < prevOffset =
compress (prevOffset, prevLine) rest
| otherwise = chunkDeltas (offsetDelta, lineDelta) ++ compress next rest
where
offsetDelta = nextOffset - prevOffset
lineDelta = nextLine - prevLine
both and lineDelta must be non - negative
chunkDeltas :: (Word16, Word32) -> [(Word16, Word32)]
chunkDeltas (offsetDelta, lineDelta)
| offsetDelta < 256 =
if lineDelta < 256
then [(offsetDelta, lineDelta)]
else (offsetDelta, 255) : chunkDeltas (0, lineDelta - 255)
we must wait until is less than 256 before reducing lineDelta
| otherwise = (255, 0) : chunkDeltas (offsetDelta - 255, lineDelta)
|
d6e3bd0b3a1a4ed884021391fa261a912c90013c351a0bdefbb0afe2ed8732b1 | heechul/crest-z3 | ciltools.ml | open Cil
Contributed by
let isOne e =
isInteger e = Some Int64.one
written by
let is_volatile_tp tp =
List.exists (function (Attr("volatile",_)) -> true
| _ -> false) (typeAttrs tp)
written by
let is_volatile_vi vi =
let vi_vol =
List.exists (function (Attr("volatile",_)) -> true
| _ -> false) vi.vattr in
let typ_vol = is_volatile_tp vi.vtype in
vi_vol || typ_vol
(*****************************************************************************
* A collection of useful functions that were not already in CIL as far as I
* could tell. However, I have been surprised before . . .
****************************************************************************)
type sign = Signed | Unsigned
exception Not_an_integer
(*****************************************************************************
* A bunch of functions for accessing integers. Originally written for
* somebody who didn't know CIL and just wanted to mess with it at the
* OCaml level.
****************************************************************************)
let unbox_int_type (ye : typ) : (int * sign) =
let tp = unrollType ye in
let s =
match tp with
TInt (i, _) ->
if (isSigned i) then
Signed
else
Unsigned
| _ -> raise Not_an_integer
in
(bitsSizeOf tp), s
(* depricated. Use isInteger directly instead *)
let unbox_int_exp (e : exp) : int64 =
match isInteger e with
None -> raise Not_an_integer
| Some (x) -> x
let box_int_to_exp (n : int64) (ye : typ) : exp =
let tp = unrollType ye in
match tp with
TInt (i, _) ->
kinteger64 i n
| _ -> raise Not_an_integer
let cil_to_ocaml_int (e : exp) : (int64 * int * sign) =
let v, s = unbox_int_type (typeOf e) in
unbox_int_exp (e), v, s
exception Weird_bitwidth
( int64 * int * sign ) : exp
let ocaml_int_to_cil v n s =
let char_size = bitsSizeOf charType in
let int_size = bitsSizeOf intType in
let short_size = bitsSizeOf (TInt(IShort,[]))in
let long_size = bitsSizeOf longType in
let longlong_size = bitsSizeOf (TInt(ILongLong,[])) in
let i =
match s with
Signed ->
if (n = char_size) then
ISChar
else if (n = int_size) then
IInt
else if (n = short_size) then
IShort
else if (n = long_size) then
ILong
else if (n = longlong_size) then
ILongLong
else
raise Weird_bitwidth
| Unsigned ->
if (n = char_size) then
IUChar
else if (n = int_size) then
IUInt
else if (n = short_size) then
IUShort
else if (n = long_size) then
IULong
else if (n = longlong_size) then
IULongLong
else
raise Weird_bitwidth
in
kinteger64 i v
(*****************************************************************************
* a couple of type functions that I thought would be useful:
****************************************************************************)
let rec isCompositeType tp =
match tp with
TComp _ -> true
| TPtr(x, _) -> isCompositeType x
| TArray(x,_,_) -> isCompositeType x
| TFun(x,_,_,_) -> isCompositeType x
| TNamed (x,_) -> isCompositeType x.ttype
| _ -> false
(** START OF deepHasAttribute ************************************************)
let visited = ref []
class attribute_checker target rflag = object (self)
inherit nopCilVisitor
method vtype t =
match t with
TComp(cinfo, a) ->
if(not (List.exists (fun x -> cinfo.cname = x) !visited )) then begin
visited := cinfo.cname :: !visited;
List.iter
(fun f ->
if (hasAttribute target f.fattr) then
rflag := true
else
ignore(visitCilType (new attribute_checker target rflag)
f.ftype)) cinfo.cfields;
end;
DoChildren
| TNamed(t1, a) ->
if(not (List.exists (fun x -> t1.tname = x) !visited )) then begin
visited := t1.tname :: !visited;
ignore(visitCilType (new attribute_checker target rflag) t1.ttype);
end;
DoChildren
| _ ->
DoChildren
method vattr (Attr(name,params)) =
if (name = target) then rflag := true;
DoChildren
end
let deepHasAttribute s t =
let found = ref false in
visited := [];
ignore(visitCilType (new attribute_checker s found) t);
!found
(** END OF deepHasAttribute **************************************************)
(** Stuff from ptranal, slightly modified ************************************)
(*****************************************************************************
* A transformation to make every instruction be in its own statement.
****************************************************************************)
class callBBVisitor = object
inherit nopCilVisitor
method vstmt s =
match s.skind with
Instr(il) -> begin
if (List.length il > 1) then
let list_of_stmts = List.map (fun one_inst ->
mkStmtOneInstr one_inst) il in
let block = mkBlock list_of_stmts in
s.skind <- Block block;
ChangeTo(s)
else
SkipChildren
end
| _ -> DoChildren
method vvdec _ = SkipChildren
method vexpr _ = SkipChildren
method vlval _ = SkipChildren
method vtype _ = SkipChildren
end
let one_instruction_per_statement f =
let thisVisitor = new callBBVisitor in
visitCilFileSameGlobals thisVisitor f
(*****************************************************************************
* A transformation that gives each variable a unique identifier.
****************************************************************************)
class vidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vvdec vi =
vi.vid <- !count ;
incr count ; SkipChildren
end
let globally_unique_vids f =
let thisVisitor = new vidVisitor in
visitCilFileSameGlobals thisVisitor f
(** End of stuff from ptranal ************************************************)
class sidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vstmt s =
s.sid <- !count ;
incr count ;
DoChildren
end
let globally_unique_sids f =
let thisVisitor = new sidVisitor in
visitCilFileSameGlobals thisVisitor f
(** Comparing expressions without a Out_of_memory error **********************)
let compare_exp x y =
compare x y
| null | https://raw.githubusercontent.com/heechul/crest-z3/cfcebadddb5e9d69e9956644fc37b46f6c2a21a0/cil/src/ext/ciltools.ml | ocaml | ****************************************************************************
* A collection of useful functions that were not already in CIL as far as I
* could tell. However, I have been surprised before . . .
***************************************************************************
****************************************************************************
* A bunch of functions for accessing integers. Originally written for
* somebody who didn't know CIL and just wanted to mess with it at the
* OCaml level.
***************************************************************************
depricated. Use isInteger directly instead
****************************************************************************
* a couple of type functions that I thought would be useful:
***************************************************************************
* START OF deepHasAttribute ***********************************************
* END OF deepHasAttribute *************************************************
* Stuff from ptranal, slightly modified ***********************************
****************************************************************************
* A transformation to make every instruction be in its own statement.
***************************************************************************
****************************************************************************
* A transformation that gives each variable a unique identifier.
***************************************************************************
* End of stuff from ptranal ***********************************************
* Comparing expressions without a Out_of_memory error ********************* | open Cil
Contributed by
let isOne e =
isInteger e = Some Int64.one
written by
let is_volatile_tp tp =
List.exists (function (Attr("volatile",_)) -> true
| _ -> false) (typeAttrs tp)
written by
let is_volatile_vi vi =
let vi_vol =
List.exists (function (Attr("volatile",_)) -> true
| _ -> false) vi.vattr in
let typ_vol = is_volatile_tp vi.vtype in
vi_vol || typ_vol
type sign = Signed | Unsigned
exception Not_an_integer
let unbox_int_type (ye : typ) : (int * sign) =
let tp = unrollType ye in
let s =
match tp with
TInt (i, _) ->
if (isSigned i) then
Signed
else
Unsigned
| _ -> raise Not_an_integer
in
(bitsSizeOf tp), s
let unbox_int_exp (e : exp) : int64 =
match isInteger e with
None -> raise Not_an_integer
| Some (x) -> x
let box_int_to_exp (n : int64) (ye : typ) : exp =
let tp = unrollType ye in
match tp with
TInt (i, _) ->
kinteger64 i n
| _ -> raise Not_an_integer
let cil_to_ocaml_int (e : exp) : (int64 * int * sign) =
let v, s = unbox_int_type (typeOf e) in
unbox_int_exp (e), v, s
exception Weird_bitwidth
( int64 * int * sign ) : exp
let ocaml_int_to_cil v n s =
let char_size = bitsSizeOf charType in
let int_size = bitsSizeOf intType in
let short_size = bitsSizeOf (TInt(IShort,[]))in
let long_size = bitsSizeOf longType in
let longlong_size = bitsSizeOf (TInt(ILongLong,[])) in
let i =
match s with
Signed ->
if (n = char_size) then
ISChar
else if (n = int_size) then
IInt
else if (n = short_size) then
IShort
else if (n = long_size) then
ILong
else if (n = longlong_size) then
ILongLong
else
raise Weird_bitwidth
| Unsigned ->
if (n = char_size) then
IUChar
else if (n = int_size) then
IUInt
else if (n = short_size) then
IUShort
else if (n = long_size) then
IULong
else if (n = longlong_size) then
IULongLong
else
raise Weird_bitwidth
in
kinteger64 i v
let rec isCompositeType tp =
match tp with
TComp _ -> true
| TPtr(x, _) -> isCompositeType x
| TArray(x,_,_) -> isCompositeType x
| TFun(x,_,_,_) -> isCompositeType x
| TNamed (x,_) -> isCompositeType x.ttype
| _ -> false
let visited = ref []
class attribute_checker target rflag = object (self)
inherit nopCilVisitor
method vtype t =
match t with
TComp(cinfo, a) ->
if(not (List.exists (fun x -> cinfo.cname = x) !visited )) then begin
visited := cinfo.cname :: !visited;
List.iter
(fun f ->
if (hasAttribute target f.fattr) then
rflag := true
else
ignore(visitCilType (new attribute_checker target rflag)
f.ftype)) cinfo.cfields;
end;
DoChildren
| TNamed(t1, a) ->
if(not (List.exists (fun x -> t1.tname = x) !visited )) then begin
visited := t1.tname :: !visited;
ignore(visitCilType (new attribute_checker target rflag) t1.ttype);
end;
DoChildren
| _ ->
DoChildren
method vattr (Attr(name,params)) =
if (name = target) then rflag := true;
DoChildren
end
let deepHasAttribute s t =
let found = ref false in
visited := [];
ignore(visitCilType (new attribute_checker s found) t);
!found
class callBBVisitor = object
inherit nopCilVisitor
method vstmt s =
match s.skind with
Instr(il) -> begin
if (List.length il > 1) then
let list_of_stmts = List.map (fun one_inst ->
mkStmtOneInstr one_inst) il in
let block = mkBlock list_of_stmts in
s.skind <- Block block;
ChangeTo(s)
else
SkipChildren
end
| _ -> DoChildren
method vvdec _ = SkipChildren
method vexpr _ = SkipChildren
method vlval _ = SkipChildren
method vtype _ = SkipChildren
end
let one_instruction_per_statement f =
let thisVisitor = new callBBVisitor in
visitCilFileSameGlobals thisVisitor f
class vidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vvdec vi =
vi.vid <- !count ;
incr count ; SkipChildren
end
let globally_unique_vids f =
let thisVisitor = new vidVisitor in
visitCilFileSameGlobals thisVisitor f
class sidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vstmt s =
s.sid <- !count ;
incr count ;
DoChildren
end
let globally_unique_sids f =
let thisVisitor = new sidVisitor in
visitCilFileSameGlobals thisVisitor f
let compare_exp x y =
compare x y
|
07b5f31927b33ad381f18bbbdcbae5ad212a2245693349f93b44a4646c01fc56 | CIFASIS/QuickFuzz | String.hs | # LANGUAGE FlexibleInstances , IncoherentInstances #
module Test.QuickFuzz.Gen.Base.String where
import Test.QuickCheck
import qualified Data.Text as TS
import qualified Data.Text.Lazy as TL
import Test.QuickFuzz.Gen.Base.Value
-- String
instance Arbitrary String where
arbitrary = genStrValue "String"
-- Text
instance Arbitrary TS.Text where
arbitrary = TS.pack <$> genStrValue "Text"
shrink xs = TS.pack <$> shrink (TS.unpack xs)
instance Arbitrary TL.Text where
arbitrary = TL.pack <$> genStrValue "Text"
shrink xs = TL.pack <$> shrink (TL.unpack xs)
instance CoArbitrary TS.Text where
coarbitrary = coarbitrary . TS.unpack
instance CoArbitrary TL.Text where
coarbitrary = coarbitrary . TL.unpack
| null | https://raw.githubusercontent.com/CIFASIS/QuickFuzz/a1c69f028b0960c002cb83e8145f039ecc0e0a23/src/Test/QuickFuzz/Gen/Base/String.hs | haskell | String
Text | # LANGUAGE FlexibleInstances , IncoherentInstances #
module Test.QuickFuzz.Gen.Base.String where
import Test.QuickCheck
import qualified Data.Text as TS
import qualified Data.Text.Lazy as TL
import Test.QuickFuzz.Gen.Base.Value
instance Arbitrary String where
arbitrary = genStrValue "String"
instance Arbitrary TS.Text where
arbitrary = TS.pack <$> genStrValue "Text"
shrink xs = TS.pack <$> shrink (TS.unpack xs)
instance Arbitrary TL.Text where
arbitrary = TL.pack <$> genStrValue "Text"
shrink xs = TL.pack <$> shrink (TL.unpack xs)
instance CoArbitrary TS.Text where
coarbitrary = coarbitrary . TS.unpack
instance CoArbitrary TL.Text where
coarbitrary = coarbitrary . TL.unpack
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.