_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
82b9c192b45b4e4cd80645b5e5407bede28e1eb833cde57437b5790731420500 | LeventErkok/sbvPlugin | T10.hs | {-# OPTIONS_GHC -fplugin=Data.SBV.Plugin #-}
module T10 where
import Data.SBV.Plugin
g :: Integer -> Integer
g x = x * 2 + 12
{-# ANN f theorem #-}
f :: Integer -> Bool
f x = g x < g (x+1)
| null | https://raw.githubusercontent.com/LeventErkok/sbvPlugin/b6a6e94cd237a4f64f985783931bd7656e7a6a69/tests/T10.hs | haskell | # OPTIONS_GHC -fplugin=Data.SBV.Plugin #
# ANN f theorem # |
module T10 where
import Data.SBV.Plugin
g :: Integer -> Integer
g x = x * 2 + 12
f :: Integer -> Bool
f x = g x < g (x+1)
|
9fa8a1ab619fb6ade09f6fd5e7b8fd639ec048428da27cd1ca29bb50c5b42c72 | flexsurfer/re-frisk | template.cljs | (ns ^{:mranderson/inlined true} re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.template
(:require [react :as react]
[clojure.string :as string]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.util :as util :refer [named?]]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.component :as comp]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.batching :as batch]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.input :as input]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.protocols :as p]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.ratom :as ratom]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.debug :refer-macros [dev? warn]]
[goog.object :as gobj]))
From 's , via pump :
(def ^{:doc "Regular expression that parses a CSS-style id and class
from a tag name."}
re-tag #"([^\s\.#]+)(?:#([^\s\.#]+))?(?:\.([^\s#]+))?")
(deftype NativeWrapper [tag id className])
(defn adapt-react-class
[c]
(->NativeWrapper c nil nil))
;;; Common utilities
(defn ^boolean hiccup-tag? [x]
(or (named? x)
(string? x)))
(defn ^boolean valid-tag? [x]
(or (hiccup-tag? x)
(ifn? x)
(instance? NativeWrapper x)))
;;; Props conversion
;; TODO: Move prop-name caches to the compiler object, if this
;; conversion can be configured.
(def prop-name-cache #js{:class "className"
:for "htmlFor"
:charset "charSet"})
(defn cache-get [o k]
(when ^boolean (.hasOwnProperty o k)
(gobj/get o k)))
(defn cached-prop-name [k]
(if (named? k)
(if-some [k' (cache-get prop-name-cache (name k))]
k'
(let [v (util/dash-to-prop-name k)]
(gobj/set prop-name-cache (name k) v)
v))
k))
(declare convert-prop-value)
(defn kv-conv [o k v]
(doto o
(gobj/set (cached-prop-name k) (convert-prop-value v))))
(defn convert-prop-value [x]
(cond (util/js-val? x) x
(named? x) (name x)
(map? x) (reduce-kv kv-conv #js{} x)
(coll? x) (clj->js x)
(ifn? x) (fn [& args]
(apply x args))
:else (clj->js x)))
;; Previous few functions copied for custom elements,
;; without mapping from class to className etc.
(def custom-prop-name-cache #js{})
(defn cached-custom-prop-name [k]
(if (named? k)
(if-some [k' (cache-get custom-prop-name-cache (name k))]
k'
(let [v (util/dash-to-prop-name k)]
(gobj/set custom-prop-name-cache (name k) v)
v))
k))
(defn custom-kv-conv [o k v]
(doto o
(gobj/set (cached-custom-prop-name k) (convert-prop-value v))))
(defn convert-custom-prop-value [x]
(cond (util/js-val? x) x
(named? x) (name x)
(map? x) (reduce-kv custom-kv-conv #js{} x)
(coll? x) (clj->js x)
(ifn? x) (fn [& args]
(apply x args))
:else (clj->js x)))
(defn set-id-class
"Takes the id and class from tag keyword, and adds them to the
other props. Parsed tag is JS object with :id and :class properties."
[props id-class]
(let [id (.-id id-class)
class (.-className id-class)]
(cond-> props
;; Only use ID from tag keyword if no :id in props already
(and (some? id)
(nil? (:id props)))
(assoc :id id)
;; Merge classes
class
;; Note: someone might use React-style :className property,
;; this is the only place where that needs special case. Using
;; :class and :className together is not supported.
(assoc :class (util/class-names class (or (:class props) (:className props)))))))
(defn convert-props [props ^clj id-class]
(let [class (:class props)
props (-> props
(cond-> class (assoc :class (util/class-names class)))
(set-id-class id-class))]
(if (.-custom id-class)
(convert-custom-prop-value props)
(convert-prop-value props))))
Conversion from forms
(defn make-element [this argv component jsprops first-child]
(case (- (count argv) first-child)
Optimize cases of zero or one child
0 (react/createElement component jsprops)
1 (react/createElement component jsprops
(p/as-element this (nth argv first-child nil)))
(.apply react/createElement nil
(reduce-kv (fn [a k v]
(when (>= k first-child)
(.push a (p/as-element this v)))
a)
#js [component jsprops] argv))))
(deftype HiccupTag [tag id className custom])
(defn parse-tag [hiccup-tag]
(let [[tag id className] (->> hiccup-tag name (re-matches re-tag) next)
className (when-not (nil? className)
(string/replace className #"\." " "))]
(assert tag (str "Invalid tag: '" hiccup-tag "'" (comp/comp-name)))
(->HiccupTag tag
id
className
;; Custom element names must contain hyphen
;; -elements/#custom-elements-core-concepts
(not= -1 (.indexOf tag "-")))))
(defn reag-element [tag v compiler]
(let [c (comp/as-class tag compiler)
jsprops #js {}]
(set! (.-argv jsprops) v)
(when-some [key (util/react-key-from-vec v)]
(set! (.-key jsprops) key))
(react/createElement c jsprops)))
(defn function-element [tag v first-arg compiler]
(let [jsprops #js {}]
(set! (.-reagentRender jsprops) tag)
(set! (.-argv jsprops) (subvec v first-arg))
( set ! ( .-opts ) opts )
(when-some [key (util/react-key-from-vec v)]
(set! (.-key jsprops) key))
(react/createElement (comp/functional-render-fn compiler tag) jsprops)))
(defn maybe-function-element
"If given tag is a Class, use it as a class,
else wrap in Reagent function wrapper."
[tag v compiler]
(if (comp/react-class? tag)
(reag-element tag v compiler)
(function-element tag v 1 compiler)))
(defn fragment-element [argv compiler]
(let [props (nth argv 1 nil)
hasprops (or (nil? props) (map? props))
jsprops (or (convert-prop-value (if hasprops props))
#js {})
first-child (+ 1 (if hasprops 1 0))]
(when-some [key (util/react-key-from-vec argv)]
(set! (.-key jsprops) key))
(p/make-element compiler argv react/Fragment jsprops first-child)))
(def tag-name-cache #js {})
(defn cached-parse [x]
(if-some [s (cache-get tag-name-cache x)]
s
(let [v (parse-tag x)]
(gobj/set tag-name-cache x v)
v)))
(defn native-element [parsed argv first ^p/Compiler compiler]
(let [component (.-tag parsed)
props (nth argv first nil)
hasprops (or (nil? props) (map? props))
jsprops (or (convert-props (if hasprops props) parsed)
#js {})
first-child (+ first (if hasprops 1 0))]
(if (input/input-component? component)
(let [input-class (or (.-reagentInput compiler)
(let [x (comp/create-class input/input-spec compiler)]
(set! (.-reagentInput compiler) x)
x))]
(-> [input-class argv component jsprops first-child compiler]
(with-meta (meta argv))
(->> (p/as-element compiler))))
(do
(when-some [key (-> (meta argv) util/get-react-key)]
(set! (.-key jsprops) key))
(p/make-element compiler argv component jsprops first-child)))))
(defn raw-element [comp argv compiler]
(let [props (nth argv 2 nil)
jsprops (or props #js {})]
(when-some [key (-> (meta argv) util/get-react-key)]
(set! (.-key jsprops) key))
(p/make-element compiler argv comp jsprops 3)))
(defn expand-seq [s compiler]
(into-array (map #(p/as-element compiler %) s)))
(defn expand-seq-dev [s ^clj o compiler]
(into-array (map (fn [val]
(when (and (vector? val)
(nil? (util/react-key-from-vec val)))
(set! (.-no-key o) true))
(p/as-element compiler val))
s)))
(defn expand-seq-check [x compiler]
(let [ctx #js {}
[res derefed] (ratom/check-derefs #(expand-seq-dev x ctx compiler))]
(when derefed
(warn (util/hiccup-err x (comp/comp-name) "Reactive deref not supported in lazy seq, "
"it should be wrapped in doall")))
(when (.-no-key ctx)
(warn (util/hiccup-err x (comp/comp-name) "Every element in a seq should have a unique :key")))
res))
(defn hiccup-element [v compiler]
(let [tag (nth v 0 nil)
n (name tag)
pos (.indexOf n ">")]
(case pos
-1 (native-element (cached-parse n) v 1 compiler)
0 (assert (= ">" n) (util/hiccup-err v (comp/comp-name) "Invalid Hiccup tag"))
;; Support extended hiccup syntax, i.e :div.bar>a.foo
;; Apply metadata (e.g. :key) to the outermost element.
Metadata is probably used only with sequeneces , and in that case
;; only the key of the outermost element matters.
(recur (with-meta [(subs n 0 pos)
(assoc (with-meta v nil) 0 (subs n (inc pos)))]
(meta v))
compiler))))
(defn vec-to-elem [v compiler fn-to-element]
(when (nil? compiler)
(js/console.error "vec-to-elem" (pr-str v)))
(assert (pos? (count v)) (util/hiccup-err v (comp/comp-name) "Hiccup form should not be empty"))
(let [tag (nth v 0 nil)]
(assert (valid-tag? tag) (util/hiccup-err v (comp/comp-name) "Invalid Hiccup form"))
(case tag
:> (native-element (->HiccupTag (nth v 1 nil) nil nil nil) v 2 compiler)
:r> (raw-element (nth v 1 nil) v compiler)
:f> (function-element (nth v 1 nil) v 2 compiler)
:<> (fragment-element v compiler)
(cond
(hiccup-tag? tag)
(hiccup-element v compiler)
(instance? NativeWrapper tag)
(native-element tag v 1 compiler)
:else (fn-to-element tag v compiler)))))
(defn as-element [this x fn-to-element]
(cond (util/js-val? x) x
(vector? x) (vec-to-elem x this fn-to-element)
(seq? x) (if (dev?)
(expand-seq-check x this)
(expand-seq x this))
(named? x) (name x)
(satisfies? IPrintWithWriter x) (pr-str x)
:else x))
(defn create-compiler [opts]
(let [id (gensym)
fn-to-element (if (:function-components opts)
maybe-function-element
reag-element)]
(reify p/Compiler
This is used to as cache key to cache component fns per compiler
(get-id [this] id)
(as-element [this x]
(as-element this x fn-to-element))
(make-element [this argv component jsprops first-child]
(make-element this argv component jsprops first-child)))))
(def default-compiler* (create-compiler {}))
(def ^:dynamic default-compiler default-compiler*)
(defn set-default-compiler! [compiler]
(set! default-compiler compiler))
| null | https://raw.githubusercontent.com/flexsurfer/re-frisk/638d820c84e23be79b8c52f8f136a611d942443f/re-frisk/src/re_frisk/inlined_deps/reagent/v1v0v0/reagent/impl/template.cljs | clojure | Common utilities
Props conversion
TODO: Move prop-name caches to the compiler object, if this
conversion can be configured.
Previous few functions copied for custom elements,
without mapping from class to className etc.
Only use ID from tag keyword if no :id in props already
Merge classes
Note: someone might use React-style :className property,
this is the only place where that needs special case. Using
:class and :className together is not supported.
Custom element names must contain hyphen
-elements/#custom-elements-core-concepts
Support extended hiccup syntax, i.e :div.bar>a.foo
Apply metadata (e.g. :key) to the outermost element.
only the key of the outermost element matters. | (ns ^{:mranderson/inlined true} re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.template
(:require [react :as react]
[clojure.string :as string]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.util :as util :refer [named?]]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.component :as comp]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.batching :as batch]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.input :as input]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.impl.protocols :as p]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.ratom :as ratom]
[re-frisk.inlined-deps.reagent.v1v0v0.reagent.debug :refer-macros [dev? warn]]
[goog.object :as gobj]))
From 's , via pump :
(def ^{:doc "Regular expression that parses a CSS-style id and class
from a tag name."}
re-tag #"([^\s\.#]+)(?:#([^\s\.#]+))?(?:\.([^\s#]+))?")
(deftype NativeWrapper [tag id className])
(defn adapt-react-class
[c]
(->NativeWrapper c nil nil))
(defn ^boolean hiccup-tag? [x]
(or (named? x)
(string? x)))
(defn ^boolean valid-tag? [x]
(or (hiccup-tag? x)
(ifn? x)
(instance? NativeWrapper x)))
(def prop-name-cache #js{:class "className"
:for "htmlFor"
:charset "charSet"})
(defn cache-get [o k]
(when ^boolean (.hasOwnProperty o k)
(gobj/get o k)))
(defn cached-prop-name [k]
(if (named? k)
(if-some [k' (cache-get prop-name-cache (name k))]
k'
(let [v (util/dash-to-prop-name k)]
(gobj/set prop-name-cache (name k) v)
v))
k))
(declare convert-prop-value)
(defn kv-conv [o k v]
(doto o
(gobj/set (cached-prop-name k) (convert-prop-value v))))
(defn convert-prop-value [x]
(cond (util/js-val? x) x
(named? x) (name x)
(map? x) (reduce-kv kv-conv #js{} x)
(coll? x) (clj->js x)
(ifn? x) (fn [& args]
(apply x args))
:else (clj->js x)))
(def custom-prop-name-cache #js{})
(defn cached-custom-prop-name [k]
(if (named? k)
(if-some [k' (cache-get custom-prop-name-cache (name k))]
k'
(let [v (util/dash-to-prop-name k)]
(gobj/set custom-prop-name-cache (name k) v)
v))
k))
(defn custom-kv-conv [o k v]
(doto o
(gobj/set (cached-custom-prop-name k) (convert-prop-value v))))
(defn convert-custom-prop-value [x]
(cond (util/js-val? x) x
(named? x) (name x)
(map? x) (reduce-kv custom-kv-conv #js{} x)
(coll? x) (clj->js x)
(ifn? x) (fn [& args]
(apply x args))
:else (clj->js x)))
(defn set-id-class
"Takes the id and class from tag keyword, and adds them to the
other props. Parsed tag is JS object with :id and :class properties."
[props id-class]
(let [id (.-id id-class)
class (.-className id-class)]
(cond-> props
(and (some? id)
(nil? (:id props)))
(assoc :id id)
class
(assoc :class (util/class-names class (or (:class props) (:className props)))))))
(defn convert-props [props ^clj id-class]
(let [class (:class props)
props (-> props
(cond-> class (assoc :class (util/class-names class)))
(set-id-class id-class))]
(if (.-custom id-class)
(convert-custom-prop-value props)
(convert-prop-value props))))
Conversion from forms
(defn make-element [this argv component jsprops first-child]
(case (- (count argv) first-child)
Optimize cases of zero or one child
0 (react/createElement component jsprops)
1 (react/createElement component jsprops
(p/as-element this (nth argv first-child nil)))
(.apply react/createElement nil
(reduce-kv (fn [a k v]
(when (>= k first-child)
(.push a (p/as-element this v)))
a)
#js [component jsprops] argv))))
(deftype HiccupTag [tag id className custom])
(defn parse-tag [hiccup-tag]
(let [[tag id className] (->> hiccup-tag name (re-matches re-tag) next)
className (when-not (nil? className)
(string/replace className #"\." " "))]
(assert tag (str "Invalid tag: '" hiccup-tag "'" (comp/comp-name)))
(->HiccupTag tag
id
className
(not= -1 (.indexOf tag "-")))))
(defn reag-element [tag v compiler]
(let [c (comp/as-class tag compiler)
jsprops #js {}]
(set! (.-argv jsprops) v)
(when-some [key (util/react-key-from-vec v)]
(set! (.-key jsprops) key))
(react/createElement c jsprops)))
(defn function-element [tag v first-arg compiler]
(let [jsprops #js {}]
(set! (.-reagentRender jsprops) tag)
(set! (.-argv jsprops) (subvec v first-arg))
( set ! ( .-opts ) opts )
(when-some [key (util/react-key-from-vec v)]
(set! (.-key jsprops) key))
(react/createElement (comp/functional-render-fn compiler tag) jsprops)))
(defn maybe-function-element
"If given tag is a Class, use it as a class,
else wrap in Reagent function wrapper."
[tag v compiler]
(if (comp/react-class? tag)
(reag-element tag v compiler)
(function-element tag v 1 compiler)))
(defn fragment-element [argv compiler]
(let [props (nth argv 1 nil)
hasprops (or (nil? props) (map? props))
jsprops (or (convert-prop-value (if hasprops props))
#js {})
first-child (+ 1 (if hasprops 1 0))]
(when-some [key (util/react-key-from-vec argv)]
(set! (.-key jsprops) key))
(p/make-element compiler argv react/Fragment jsprops first-child)))
(def tag-name-cache #js {})
(defn cached-parse [x]
(if-some [s (cache-get tag-name-cache x)]
s
(let [v (parse-tag x)]
(gobj/set tag-name-cache x v)
v)))
(defn native-element [parsed argv first ^p/Compiler compiler]
(let [component (.-tag parsed)
props (nth argv first nil)
hasprops (or (nil? props) (map? props))
jsprops (or (convert-props (if hasprops props) parsed)
#js {})
first-child (+ first (if hasprops 1 0))]
(if (input/input-component? component)
(let [input-class (or (.-reagentInput compiler)
(let [x (comp/create-class input/input-spec compiler)]
(set! (.-reagentInput compiler) x)
x))]
(-> [input-class argv component jsprops first-child compiler]
(with-meta (meta argv))
(->> (p/as-element compiler))))
(do
(when-some [key (-> (meta argv) util/get-react-key)]
(set! (.-key jsprops) key))
(p/make-element compiler argv component jsprops first-child)))))
(defn raw-element [comp argv compiler]
(let [props (nth argv 2 nil)
jsprops (or props #js {})]
(when-some [key (-> (meta argv) util/get-react-key)]
(set! (.-key jsprops) key))
(p/make-element compiler argv comp jsprops 3)))
(defn expand-seq [s compiler]
(into-array (map #(p/as-element compiler %) s)))
(defn expand-seq-dev [s ^clj o compiler]
(into-array (map (fn [val]
(when (and (vector? val)
(nil? (util/react-key-from-vec val)))
(set! (.-no-key o) true))
(p/as-element compiler val))
s)))
(defn expand-seq-check [x compiler]
(let [ctx #js {}
[res derefed] (ratom/check-derefs #(expand-seq-dev x ctx compiler))]
(when derefed
(warn (util/hiccup-err x (comp/comp-name) "Reactive deref not supported in lazy seq, "
"it should be wrapped in doall")))
(when (.-no-key ctx)
(warn (util/hiccup-err x (comp/comp-name) "Every element in a seq should have a unique :key")))
res))
(defn hiccup-element [v compiler]
(let [tag (nth v 0 nil)
n (name tag)
pos (.indexOf n ">")]
(case pos
-1 (native-element (cached-parse n) v 1 compiler)
0 (assert (= ">" n) (util/hiccup-err v (comp/comp-name) "Invalid Hiccup tag"))
Metadata is probably used only with sequeneces , and in that case
(recur (with-meta [(subs n 0 pos)
(assoc (with-meta v nil) 0 (subs n (inc pos)))]
(meta v))
compiler))))
(defn vec-to-elem [v compiler fn-to-element]
(when (nil? compiler)
(js/console.error "vec-to-elem" (pr-str v)))
(assert (pos? (count v)) (util/hiccup-err v (comp/comp-name) "Hiccup form should not be empty"))
(let [tag (nth v 0 nil)]
(assert (valid-tag? tag) (util/hiccup-err v (comp/comp-name) "Invalid Hiccup form"))
(case tag
:> (native-element (->HiccupTag (nth v 1 nil) nil nil nil) v 2 compiler)
:r> (raw-element (nth v 1 nil) v compiler)
:f> (function-element (nth v 1 nil) v 2 compiler)
:<> (fragment-element v compiler)
(cond
(hiccup-tag? tag)
(hiccup-element v compiler)
(instance? NativeWrapper tag)
(native-element tag v 1 compiler)
:else (fn-to-element tag v compiler)))))
(defn as-element [this x fn-to-element]
(cond (util/js-val? x) x
(vector? x) (vec-to-elem x this fn-to-element)
(seq? x) (if (dev?)
(expand-seq-check x this)
(expand-seq x this))
(named? x) (name x)
(satisfies? IPrintWithWriter x) (pr-str x)
:else x))
(defn create-compiler [opts]
(let [id (gensym)
fn-to-element (if (:function-components opts)
maybe-function-element
reag-element)]
(reify p/Compiler
This is used to as cache key to cache component fns per compiler
(get-id [this] id)
(as-element [this x]
(as-element this x fn-to-element))
(make-element [this argv component jsprops first-child]
(make-element this argv component jsprops first-child)))))
(def default-compiler* (create-compiler {}))
(def ^:dynamic default-compiler default-compiler*)
(defn set-default-compiler! [compiler]
(set! default-compiler compiler))
|
f04accfbdeceaa8123a271b5afaacf182d4236fafaeccdd701bd3d2c1a0a38eb | PataphysicalSociety/soupault | rose_tree.ml | Multi - way tree used by the ToC module
(* List helper functions *)
module List_utils = struct
let rec remove p xs =
match xs with
| [] -> []
| y :: ys ->
if (p y) then ys
else y :: (remove p ys)
let rec replace p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then x :: ys
else y :: (replace p x ys)
let rec insert_before p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then x :: y :: ys
else y :: (insert_before p x ys)
let rec insert_after p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then y :: x :: ys
else y :: (insert_after p x ys)
end
(* Actual tree *)
Extracts a top level section from a flat list of headings .
Since there can be any number of < h1 > elements ,
we have two possible options : consider all headings children of a virtual root ,
or treat it as multiple independent trees .
The latter approach allows for simpler types , since adding a virtual root
would require node data to be ' a option _ just _ to accomodate the root ,
while all real headings are guaranteed to have non - empty data .
Since there can be any number of <h1> elements,
we have two possible options: consider all headings children of a virtual root,
or treat it as multiple independent trees.
The latter approach allows for simpler types, since adding a virtual root
would require node data to be 'a option _just_ to accomodate the root,
while all real headings are guaranteed to have non-empty data.
*)
let take_section get_level hs =
let rec aux hs section level =
match hs with
| [] -> section, []
| h :: hs ->
if (get_level h) > level then aux hs (h :: section) level
else section, (h :: hs)
in match hs with
| [] -> failwith "Cannot take any section from an empty list of headings"
| [h] -> (h, []), []
| h :: hs ->
let first_level = get_level h in
let section, remainder = aux hs [] first_level in
(h, List.rev section), remainder
Multi - way tree with artificial node identifiers .
Heading text is not guaranteed to be unique ,
and headings are not guaranteed to have unique i d attributes either .
How do we make sure that we can insert a node in the headings tree at a well - defined position ?
The number of each heading in the document is unique , so we use it as a node identifier .
Having unique identifiers means we can insert at a " path " in the tree ,
where a path is a sequence of node identifiers .
E.g. , " the first h2 after the first h1 " would be [ 1 ; 2 ] .
When the tree is ready , those identifiers are useless , so we 'll remove them later .
Heading text is not guaranteed to be unique,
and headings are not guaranteed to have unique id attributes either.
How do we make sure that we can insert a node in the headings tree at a well-defined position?
The number of each heading in the document is unique, so we use it as a node identifier.
Having unique identifiers means we can insert at a "path" in the tree,
where a path is a sequence of node identifiers.
E.g., "the first h2 after the first h1" would be [1; 2].
When the tree is ready, those identifiers are useless, so we'll remove them later.
*)
module Path_tree = struct
type ('a, 'b) path_tree = {
(* Unique identifier for unambiguous references to nodes. *)
id: 'a;
(* Actual data of the node. *)
data: 'b;
children: ('a, 'b) path_tree list
}
exception Empty_path
exception Duplicate_child
exception Insert_error of string
let make id data = { id = id; data = data; children = [] }
let make_full id data children = { id = id; data = data; children = children }
let data_of_node n = n.data
let children_of_node n = n.children
Inserts an " immediate child node " , that is , a node exactly one level below .
For that reason , this function needs no recursion and serves as the terminal case .
The reason it takes parameters of a node [ ( i d , data , child list ) ]
rather than a node record is that for inserting deep into the tree ,
we have ids from the path and may need to create nodes as we go .
For that reason, this function needs no recursion and serves as the terminal case.
The reason it takes parameters of a node [(id, data, child list)]
rather than a node record is that for inserting deep into the tree,
we have ids from the path and may need to create nodes as we go.
*)
let insert_immediate node id data children =
let new_node = make_full id data children in
let children' = node.children @ [new_node] in
{node with children = children'}
(* Replaces an immediate child *)
let replace node child =
let children = node.children in
let id = child.id in
let children' = List_utils.replace (fun x -> x.id = id) child children in
{node with children = children'}
let find node id =
List.find_opt (fun x -> x.id = id) node.children
(* Multi-level insertion capable of creating sub-nodes if needed *)
let rec insert ?(children=[]) node path data =
match path with
| [] -> raise Empty_path
| [id] ->
(let last_child = find node id in
match last_child with
| None -> insert_immediate node id data children
| (Some _) -> raise Duplicate_child)
| id :: ids ->
let next_child = find node id in
match next_child with
| Some next_child' ->
let new_node = insert ~children:children next_child' ids data in
replace node new_node
| None ->
raise (Insert_error "Path does not exist")
(* The automaton for building a tree from a flat list *)
let from_list get_level tree hs =
let rec aux tree hs path =
match hs with
| [] -> tree
| _ -> begin
let ((id, h), children), remainder = take_section get_level hs in
let new_path = path @ [id] in
let tree = insert tree new_path h in
let tree =
(match children with
| [] -> tree
| _ -> aux tree children new_path)
in begin
match remainder with
| [] -> tree
| _ -> aux tree remainder path
end
end
in
aux tree hs []
end
type 'a tree = {
value: 'a;
children: ('a tree) list
}
let number_elements xs =
let rec aux xs num acc =
match xs with
| [] -> List.rev acc
| x :: xs ->
aux xs (num + 1) ((num, x) :: acc)
in aux xs 1 []
let break_into_sections get_level hs =
let rec aux hs acc =
match hs with
| [] -> acc
| _ ->
let section, remainder = take_section get_level hs in
aux remainder (section :: acc)
in List.rev @@ aux hs []
let rec from_path_tree t =
let data = Path_tree.data_of_node t in
match (Path_tree.children_of_node t) with
| [] -> {value=data; children=[]}
| cs -> {value=data; children=(List.map from_path_tree cs)}
let from_list get_level hs =
let get_level (_, x) = get_level x in
let sections = number_elements hs |> break_into_sections get_level in
List.map (fun ((id, h), cs) -> Path_tree.from_list get_level (Path_tree.make id h) cs) sections |>
List.map from_path_tree
| null | https://raw.githubusercontent.com/PataphysicalSociety/soupault/7e580f082bdda09f3b280a943162efe0abcdab8b/src/rose_tree.ml | ocaml | List helper functions
Actual tree
Unique identifier for unambiguous references to nodes.
Actual data of the node.
Replaces an immediate child
Multi-level insertion capable of creating sub-nodes if needed
The automaton for building a tree from a flat list | Multi - way tree used by the ToC module
module List_utils = struct
let rec remove p xs =
match xs with
| [] -> []
| y :: ys ->
if (p y) then ys
else y :: (remove p ys)
let rec replace p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then x :: ys
else y :: (replace p x ys)
let rec insert_before p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then x :: y :: ys
else y :: (insert_before p x ys)
let rec insert_after p x xs =
match xs with
| [] -> raise Not_found
| y :: ys ->
if (p y) then y :: x :: ys
else y :: (insert_after p x ys)
end
Extracts a top level section from a flat list of headings .
Since there can be any number of < h1 > elements ,
we have two possible options : consider all headings children of a virtual root ,
or treat it as multiple independent trees .
The latter approach allows for simpler types , since adding a virtual root
would require node data to be ' a option _ just _ to accomodate the root ,
while all real headings are guaranteed to have non - empty data .
Since there can be any number of <h1> elements,
we have two possible options: consider all headings children of a virtual root,
or treat it as multiple independent trees.
The latter approach allows for simpler types, since adding a virtual root
would require node data to be 'a option _just_ to accomodate the root,
while all real headings are guaranteed to have non-empty data.
*)
let take_section get_level hs =
let rec aux hs section level =
match hs with
| [] -> section, []
| h :: hs ->
if (get_level h) > level then aux hs (h :: section) level
else section, (h :: hs)
in match hs with
| [] -> failwith "Cannot take any section from an empty list of headings"
| [h] -> (h, []), []
| h :: hs ->
let first_level = get_level h in
let section, remainder = aux hs [] first_level in
(h, List.rev section), remainder
Multi - way tree with artificial node identifiers .
Heading text is not guaranteed to be unique ,
and headings are not guaranteed to have unique i d attributes either .
How do we make sure that we can insert a node in the headings tree at a well - defined position ?
The number of each heading in the document is unique , so we use it as a node identifier .
Having unique identifiers means we can insert at a " path " in the tree ,
where a path is a sequence of node identifiers .
E.g. , " the first h2 after the first h1 " would be [ 1 ; 2 ] .
When the tree is ready , those identifiers are useless , so we 'll remove them later .
Heading text is not guaranteed to be unique,
and headings are not guaranteed to have unique id attributes either.
How do we make sure that we can insert a node in the headings tree at a well-defined position?
The number of each heading in the document is unique, so we use it as a node identifier.
Having unique identifiers means we can insert at a "path" in the tree,
where a path is a sequence of node identifiers.
E.g., "the first h2 after the first h1" would be [1; 2].
When the tree is ready, those identifiers are useless, so we'll remove them later.
*)
module Path_tree = struct
type ('a, 'b) path_tree = {
id: 'a;
data: 'b;
children: ('a, 'b) path_tree list
}
exception Empty_path
exception Duplicate_child
exception Insert_error of string
let make id data = { id = id; data = data; children = [] }
let make_full id data children = { id = id; data = data; children = children }
let data_of_node n = n.data
let children_of_node n = n.children
Inserts an " immediate child node " , that is , a node exactly one level below .
For that reason , this function needs no recursion and serves as the terminal case .
The reason it takes parameters of a node [ ( i d , data , child list ) ]
rather than a node record is that for inserting deep into the tree ,
we have ids from the path and may need to create nodes as we go .
For that reason, this function needs no recursion and serves as the terminal case.
The reason it takes parameters of a node [(id, data, child list)]
rather than a node record is that for inserting deep into the tree,
we have ids from the path and may need to create nodes as we go.
*)
let insert_immediate node id data children =
let new_node = make_full id data children in
let children' = node.children @ [new_node] in
{node with children = children'}
let replace node child =
let children = node.children in
let id = child.id in
let children' = List_utils.replace (fun x -> x.id = id) child children in
{node with children = children'}
let find node id =
List.find_opt (fun x -> x.id = id) node.children
let rec insert ?(children=[]) node path data =
match path with
| [] -> raise Empty_path
| [id] ->
(let last_child = find node id in
match last_child with
| None -> insert_immediate node id data children
| (Some _) -> raise Duplicate_child)
| id :: ids ->
let next_child = find node id in
match next_child with
| Some next_child' ->
let new_node = insert ~children:children next_child' ids data in
replace node new_node
| None ->
raise (Insert_error "Path does not exist")
let from_list get_level tree hs =
let rec aux tree hs path =
match hs with
| [] -> tree
| _ -> begin
let ((id, h), children), remainder = take_section get_level hs in
let new_path = path @ [id] in
let tree = insert tree new_path h in
let tree =
(match children with
| [] -> tree
| _ -> aux tree children new_path)
in begin
match remainder with
| [] -> tree
| _ -> aux tree remainder path
end
end
in
aux tree hs []
end
type 'a tree = {
value: 'a;
children: ('a tree) list
}
let number_elements xs =
let rec aux xs num acc =
match xs with
| [] -> List.rev acc
| x :: xs ->
aux xs (num + 1) ((num, x) :: acc)
in aux xs 1 []
let break_into_sections get_level hs =
let rec aux hs acc =
match hs with
| [] -> acc
| _ ->
let section, remainder = take_section get_level hs in
aux remainder (section :: acc)
in List.rev @@ aux hs []
let rec from_path_tree t =
let data = Path_tree.data_of_node t in
match (Path_tree.children_of_node t) with
| [] -> {value=data; children=[]}
| cs -> {value=data; children=(List.map from_path_tree cs)}
let from_list get_level hs =
let get_level (_, x) = get_level x in
let sections = number_elements hs |> break_into_sections get_level in
List.map (fun ((id, h), cs) -> Path_tree.from_list get_level (Path_tree.make id h) cs) sections |>
List.map from_path_tree
|
1f208e1fac8e4747ed91a2d7c4a3f53b3d8049a46dd460599a1106bf3dc065bb | avsm/mirage-duniverse | xs_client_lwt.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
* A multiplexing protocol client over a byte - level transport
open Lwt
open Xs_protocol
module type IO = sig
type 'a t = 'a Lwt.t
val return: 'a -> 'a t
val ( >>= ): 'a t -> ('a -> 'b t) -> 'b t
type backend = [ `xen | `unix ]
val backend : backend
type channel
val create: unit -> channel t
val destroy: channel -> unit t
val read: channel -> bytes -> int -> int -> int t
val write: channel -> bytes -> int -> int -> unit t
end
module type S = sig
type client
val make : unit -> client Lwt.t
val suspend : client -> unit Lwt.t
val resume : client -> unit Lwt.t
type handle
val immediate : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val transaction : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val wait : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val directory : handle -> string -> string list Lwt.t
val read : handle -> string -> string Lwt.t
val write : handle -> string -> string -> unit Lwt.t
val rm : handle -> string -> unit Lwt.t
val mkdir : handle -> string -> unit Lwt.t
val setperms : handle -> string -> Xs_protocol.ACL.t -> unit Lwt.t
val debug : handle -> string list -> string list Lwt.t
val restrict : handle -> int -> unit Lwt.t
val getdomainpath : handle -> int -> string Lwt.t
val watch : handle -> string -> Xs_protocol.Token.t -> unit Lwt.t
val unwatch : handle -> string -> Xs_protocol.Token.t -> unit Lwt.t
val introduce : handle -> int -> nativeint -> int -> unit Lwt.t
val set_target : handle -> int -> int -> unit Lwt.t
end
let finally f g =
Lwt.catch
(fun () ->
f () >>= fun result ->
g () >>= fun () ->
Lwt.return result
) (fun e ->
g () >>= fun () ->
Lwt.fail e)
module StringSet = Xs_handle.StringSet
module Watcher = struct
* Someone who is watching paths is represented by one of these :
type t = {
mutable paths: StringSet.t; (* we never care about events or ordering, only paths *)
mutable cancelling: bool; (* we need to stop watching and clean up *)
c: unit Lwt_condition.t;
m: Lwt_mutex.t;
}
let make () = {
paths = StringSet.empty;
cancelling = false;
c = Lwt_condition.create ();
m = Lwt_mutex.create ();
}
(** Register that a watched path has been changed *)
let put (x: t) path =
Lwt_mutex.with_lock x.m
(fun () ->
x.paths <- StringSet.add path x.paths;
Lwt_condition.signal x.c ();
return ();
)
(** Return a set of modified paths, or an empty set if we're cancelling *)
let get (x: t) =
Lwt_mutex.with_lock x.m
(fun () ->
let rec loop () =
if x.paths = StringSet.empty && not x.cancelling then begin
Lwt_condition.wait ~mutex:x.m x.c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
>>= fun () ->
let results = x.paths in
x.paths <- StringSet.empty;
return results
)
(** Called to shutdown the watcher and trigger an orderly cleanup *)
let cancel (x: t) =
let (_: unit Lwt.t) =
Lwt_mutex.with_lock x.m
(fun () ->
x.cancelling <- true;
Lwt_condition.signal x.c ();
return ()
) in
()
end
exception Malformed_watch_event
exception Unexpected_rid of int32
exception Dispatcher_failed
module Client = functor(IO: IO with type 'a t = 'a Lwt.t) -> struct
module PS = PacketStream(IO)
(* Represents a single acive connection to a server *)
type client = {
mutable transport: IO.channel;
ps: PS.stream;
rid_to_wakeup: (int32, Xs_protocol.t Lwt.u) Hashtbl.t;
mutable dispatcher_thread: unit Lwt.t;
mutable dispatcher_shutting_down: bool;
watchevents: (Token.t, Watcher.t) Hashtbl.t;
mutable suspended : bool;
suspended_m : Lwt_mutex.t;
suspended_c : unit Lwt_condition.t;
}
The following values are only used if IO.backend = ` xen .
let client_cache = ref None
The whole application must only use one client , which will
multiplex all requests onto the same ring .
multiplex all requests onto the same ring. *)
let client_cache_m = Lwt_mutex.create ()
Multiple threads will call ' make ' in parallel . We must ensure only
one client is created .
one client is created. *)
let recv_one t =
PS.recv t.ps
>>= function
| Ok x -> return x
| Exception e -> Lwt.fail e
let send_one t = PS.send t.ps
let handle_exn t e =
Printf.fprintf stderr "Caught: %s\n%!" (Printexc.to_string e);
begin
match e with
| Xs_protocol.Response_parser_failed _ ->
(* Lwt_io.hexdump Lwt_io.stderr x *)
return ()
| _ -> return ()
end >>= fun () ->
t.dispatcher_shutting_down <- true; (* no more hashtable entries after this *)
(* all blocking threads are failed with our exception *)
Lwt_mutex.with_lock t.suspended_m (fun () ->
Printf.fprintf stderr "Propagating exception to %d threads\n%!" (Hashtbl.length t.rid_to_wakeup);
Hashtbl.iter (fun _ u -> Lwt.wakeup_later_exn u e) t.rid_to_wakeup;
return ())
>>= fun () ->
Lwt.fail e
let rec dispatcher t =
Lwt.catch (fun () -> recv_one t) (handle_exn t)
>>= fun pkt ->
match get_ty pkt with
| Op.Watchevent ->
begin match Unmarshal.list pkt with
| Some [path; token] ->
let token = Token.of_string token in
(* We may get old watches: silently drop these *)
if Hashtbl.mem t.watchevents token then begin
Watcher.put (Hashtbl.find t.watchevents token) path
>>= fun () -> dispatcher t
end else dispatcher t
| _ ->
handle_exn t Malformed_watch_event
end >>= fun () ->
dispatcher t
| _ ->
let rid = get_rid pkt in
Lwt_mutex.with_lock t.suspended_m (fun () ->
if Hashtbl.mem t.rid_to_wakeup rid
then return (Some (Hashtbl.find t.rid_to_wakeup rid))
else return None)
>>= function
| None -> handle_exn t (Unexpected_rid rid)
| Some thread ->
begin
Lwt.wakeup_later thread pkt;
dispatcher t
end
let make_unsafe () =
IO.create ()
>>= fun transport ->
let t = {
transport = transport;
ps = PS.make transport;
rid_to_wakeup = Hashtbl.create 10;
dispatcher_thread = return ();
dispatcher_shutting_down = false;
watchevents = Hashtbl.create 10;
suspended = false;
suspended_m = Lwt_mutex.create ();
suspended_c = Lwt_condition.create ();
} in
t.dispatcher_thread <- dispatcher t;
return t
let make () = match IO.backend with
| `unix -> make_unsafe ()
| `xen ->
Lwt_mutex.with_lock client_cache_m
(fun () -> match !client_cache with
| Some c -> return c
| None ->
make_unsafe ()
>>= fun c ->
client_cache := Some c;
return c
)
let suspend t =
Lwt_mutex.with_lock t.suspended_m
(fun () ->
t.suspended <- true;
let rec loop () =
if Hashtbl.length t.rid_to_wakeup > 0 then begin
Lwt_condition.wait ~mutex:t.suspended_m t.suspended_c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
)
>>= fun () ->
Hashtbl.iter (fun _ watcher -> Watcher.cancel watcher) t.watchevents;
Lwt.cancel t.dispatcher_thread;
return ()
let resume_unsafe t =
Lwt_mutex.with_lock t.suspended_m (fun () ->
t.suspended <- false;
t.dispatcher_shutting_down <- false;
Lwt_condition.broadcast t.suspended_c ();
return ())
>>= fun () ->
t.dispatcher_thread <- dispatcher t;
return ()
let resume t = match IO.backend with
| `unix -> resume_unsafe t
| `xen -> (match !client_cache with
| None -> Lwt.return ()
| Some c -> IO.create ()
>>= fun transport -> c.transport <- transport; resume_unsafe t)
type handle = client Xs_handle.t
let make_rid =
let counter = ref 0l in
fun () ->
let result = !counter in
counter := Int32.succ !counter;
result
let rpc hint h payload unmarshal =
let open Xs_handle in
let rid = make_rid () in
let request = Request.print payload (get_tid h) rid in
let t, u = wait () in
let c = get_client h in
if c.dispatcher_shutting_down
then Lwt.fail Dispatcher_failed
else begin
Lwt_mutex.with_lock c.suspended_m (fun () ->
let rec loop () =
if c.suspended then begin
Lwt_condition.wait ~mutex:c.suspended_m c.suspended_c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
>>= fun () ->
Hashtbl.add c.rid_to_wakeup rid u;
send_one c request
) >>= fun () ->
t >>= fun res ->
Lwt_mutex.with_lock c.suspended_m
(fun () ->
Hashtbl.remove c.rid_to_wakeup rid;
Lwt_condition.broadcast c.suspended_c ();
return ())
>>= fun () ->
return (response hint request res unmarshal)
end
let directory h path = rpc "directory" (Xs_handle.accessed_path h path) Request.(PathOp(path, Directory)) Unmarshal.list
let read h path = rpc "read" (Xs_handle.accessed_path h path) Request.(PathOp(path, Read)) Unmarshal.string
let write h path data = rpc "write" (Xs_handle.accessed_path h path) Request.(PathOp(path, Write data)) Unmarshal.ok
let rm h path = rpc "rm" (Xs_handle.accessed_path h path) Request.(PathOp(path, Rm)) Unmarshal.ok
let mkdir h path = rpc "mkdir" (Xs_handle.accessed_path h path) Request.(PathOp(path, Mkdir)) Unmarshal.ok
let setperms h path acl = rpc "setperms" (Xs_handle.accessed_path h path) Request.(PathOp(path, Setperms acl)) Unmarshal.ok
let debug h cmd_args = rpc "debug" h (Request.Debug cmd_args) Unmarshal.list
let restrict h domid = rpc "restrict" h (Request.Restrict domid) Unmarshal.ok
let getdomainpath h domid = rpc "getdomainpath" h (Request.Getdomainpath domid) Unmarshal.string
let watch h path token = rpc "watch" (Xs_handle.watch h path) (Request.Watch(path, Token.to_string token)) Unmarshal.ok
let unwatch h path token = rpc "unwatch" (Xs_handle.unwatch h path) (Request.Unwatch(path, Token.to_string token)) Unmarshal.ok
let introduce h domid store_mfn store_port = rpc "introduce" h (Request.Introduce(domid, store_mfn, store_port)) Unmarshal.ok
let set_target h stubdom_domid domid = rpc "set_target" h (Request.Set_target(stubdom_domid, domid)) Unmarshal.ok
let immediate client f = f (Xs_handle.no_transaction client)
let counter = ref 0l
let wait client f =
let open StringSet in
counter := Int32.succ !counter;
let token = Token.of_string (Printf.sprintf "%ld:xs_client.wait" !counter) in
(* When we register the 'watcher', the dispatcher thread will signal us when
watches arrive. *)
let watcher = Watcher.make () in
Hashtbl.add client.watchevents token watcher;
(* We signal the caller via this cancellable task: *)
let result, wakener = Lwt.task () in
on_cancel result
(fun () ->
(* Trigger an orderly cleanup in the background: *)
Watcher.cancel watcher
);
let h = Xs_handle.watching client in
(* Adjust the paths we're watching (if necessary) and block (if possible) *)
let adjust_paths () =
let current_paths = Xs_handle.get_watched_paths h in
(* Paths which weren't read don't need to be watched: *)
let old_paths = diff current_paths (Xs_handle.get_accessed_paths h) in
Lwt_list.iter_s (fun p -> unwatch h p token) (elements old_paths)
>>= fun () ->
(* Paths which were read do need to be watched: *)
let new_paths = diff (Xs_handle.get_accessed_paths h) current_paths in
Lwt_list.iter_s (fun p -> watch h p token) (elements new_paths)
>>= fun () ->
(* If we're watching the correct set of paths already then just block *)
if old_paths = empty && (new_paths = empty)
then begin
Watcher.get watcher
>>= fun results ->
(* an empty results set means we've been cancelled: trigger cleanup *)
if results = empty
then fail (Failure "goodnight")
else return ()
end else return () in
(* Main client loop: *)
let rec loop () =
Lwt.catch
(fun () ->
f h
>>= fun result ->
wakeup wakener result;
return true
) (function
| Eagain -> return false
| ex -> wakeup_exn wakener ex; return true)
>>= function
| true -> return ()
| false ->
adjust_paths ()
>>= fun () ->
loop ()
in
Lwt.async (fun () ->
finally loop
(fun () ->
let current_paths = Xs_handle.get_watched_paths h in
Lwt_list.iter_s (fun p -> unwatch h p token) (elements current_paths)
>>= fun () ->
Hashtbl.remove client.watchevents token;
return ()
)
);
result
let rec transaction client f =
rpc "transaction_start" (Xs_handle.no_transaction client) Request.Transaction_start Unmarshal.int32
>>= fun tid ->
let h = Xs_handle.transaction client tid in
f h
>>= fun result ->
Lwt.catch
(fun () ->
rpc "transaction_end" h (Request.Transaction_end true) Unmarshal.string
>>= fun res' ->
if res' = "OK" then return result else Lwt.fail (Error (Printf.sprintf "Unexpected transaction result: %s" res'))
) (function
| Eagain -> transaction client f
| e -> Lwt.fail e)
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/xenstore/client_lwt/xs_client_lwt.ml | ocaml | we never care about events or ordering, only paths
we need to stop watching and clean up
* Register that a watched path has been changed
* Return a set of modified paths, or an empty set if we're cancelling
* Called to shutdown the watcher and trigger an orderly cleanup
Represents a single acive connection to a server
Lwt_io.hexdump Lwt_io.stderr x
no more hashtable entries after this
all blocking threads are failed with our exception
We may get old watches: silently drop these
When we register the 'watcher', the dispatcher thread will signal us when
watches arrive.
We signal the caller via this cancellable task:
Trigger an orderly cleanup in the background:
Adjust the paths we're watching (if necessary) and block (if possible)
Paths which weren't read don't need to be watched:
Paths which were read do need to be watched:
If we're watching the correct set of paths already then just block
an empty results set means we've been cancelled: trigger cleanup
Main client loop: |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
* A multiplexing protocol client over a byte - level transport
open Lwt
open Xs_protocol
module type IO = sig
type 'a t = 'a Lwt.t
val return: 'a -> 'a t
val ( >>= ): 'a t -> ('a -> 'b t) -> 'b t
type backend = [ `xen | `unix ]
val backend : backend
type channel
val create: unit -> channel t
val destroy: channel -> unit t
val read: channel -> bytes -> int -> int -> int t
val write: channel -> bytes -> int -> int -> unit t
end
module type S = sig
type client
val make : unit -> client Lwt.t
val suspend : client -> unit Lwt.t
val resume : client -> unit Lwt.t
type handle
val immediate : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val transaction : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val wait : client -> (handle -> 'a Lwt.t) -> 'a Lwt.t
val directory : handle -> string -> string list Lwt.t
val read : handle -> string -> string Lwt.t
val write : handle -> string -> string -> unit Lwt.t
val rm : handle -> string -> unit Lwt.t
val mkdir : handle -> string -> unit Lwt.t
val setperms : handle -> string -> Xs_protocol.ACL.t -> unit Lwt.t
val debug : handle -> string list -> string list Lwt.t
val restrict : handle -> int -> unit Lwt.t
val getdomainpath : handle -> int -> string Lwt.t
val watch : handle -> string -> Xs_protocol.Token.t -> unit Lwt.t
val unwatch : handle -> string -> Xs_protocol.Token.t -> unit Lwt.t
val introduce : handle -> int -> nativeint -> int -> unit Lwt.t
val set_target : handle -> int -> int -> unit Lwt.t
end
let finally f g =
Lwt.catch
(fun () ->
f () >>= fun result ->
g () >>= fun () ->
Lwt.return result
) (fun e ->
g () >>= fun () ->
Lwt.fail e)
module StringSet = Xs_handle.StringSet
module Watcher = struct
* Someone who is watching paths is represented by one of these :
type t = {
c: unit Lwt_condition.t;
m: Lwt_mutex.t;
}
let make () = {
paths = StringSet.empty;
cancelling = false;
c = Lwt_condition.create ();
m = Lwt_mutex.create ();
}
let put (x: t) path =
Lwt_mutex.with_lock x.m
(fun () ->
x.paths <- StringSet.add path x.paths;
Lwt_condition.signal x.c ();
return ();
)
let get (x: t) =
Lwt_mutex.with_lock x.m
(fun () ->
let rec loop () =
if x.paths = StringSet.empty && not x.cancelling then begin
Lwt_condition.wait ~mutex:x.m x.c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
>>= fun () ->
let results = x.paths in
x.paths <- StringSet.empty;
return results
)
let cancel (x: t) =
let (_: unit Lwt.t) =
Lwt_mutex.with_lock x.m
(fun () ->
x.cancelling <- true;
Lwt_condition.signal x.c ();
return ()
) in
()
end
exception Malformed_watch_event
exception Unexpected_rid of int32
exception Dispatcher_failed
module Client = functor(IO: IO with type 'a t = 'a Lwt.t) -> struct
module PS = PacketStream(IO)
type client = {
mutable transport: IO.channel;
ps: PS.stream;
rid_to_wakeup: (int32, Xs_protocol.t Lwt.u) Hashtbl.t;
mutable dispatcher_thread: unit Lwt.t;
mutable dispatcher_shutting_down: bool;
watchevents: (Token.t, Watcher.t) Hashtbl.t;
mutable suspended : bool;
suspended_m : Lwt_mutex.t;
suspended_c : unit Lwt_condition.t;
}
The following values are only used if IO.backend = ` xen .
let client_cache = ref None
The whole application must only use one client , which will
multiplex all requests onto the same ring .
multiplex all requests onto the same ring. *)
let client_cache_m = Lwt_mutex.create ()
Multiple threads will call ' make ' in parallel . We must ensure only
one client is created .
one client is created. *)
let recv_one t =
PS.recv t.ps
>>= function
| Ok x -> return x
| Exception e -> Lwt.fail e
let send_one t = PS.send t.ps
let handle_exn t e =
Printf.fprintf stderr "Caught: %s\n%!" (Printexc.to_string e);
begin
match e with
| Xs_protocol.Response_parser_failed _ ->
return ()
| _ -> return ()
end >>= fun () ->
Lwt_mutex.with_lock t.suspended_m (fun () ->
Printf.fprintf stderr "Propagating exception to %d threads\n%!" (Hashtbl.length t.rid_to_wakeup);
Hashtbl.iter (fun _ u -> Lwt.wakeup_later_exn u e) t.rid_to_wakeup;
return ())
>>= fun () ->
Lwt.fail e
let rec dispatcher t =
Lwt.catch (fun () -> recv_one t) (handle_exn t)
>>= fun pkt ->
match get_ty pkt with
| Op.Watchevent ->
begin match Unmarshal.list pkt with
| Some [path; token] ->
let token = Token.of_string token in
if Hashtbl.mem t.watchevents token then begin
Watcher.put (Hashtbl.find t.watchevents token) path
>>= fun () -> dispatcher t
end else dispatcher t
| _ ->
handle_exn t Malformed_watch_event
end >>= fun () ->
dispatcher t
| _ ->
let rid = get_rid pkt in
Lwt_mutex.with_lock t.suspended_m (fun () ->
if Hashtbl.mem t.rid_to_wakeup rid
then return (Some (Hashtbl.find t.rid_to_wakeup rid))
else return None)
>>= function
| None -> handle_exn t (Unexpected_rid rid)
| Some thread ->
begin
Lwt.wakeup_later thread pkt;
dispatcher t
end
let make_unsafe () =
IO.create ()
>>= fun transport ->
let t = {
transport = transport;
ps = PS.make transport;
rid_to_wakeup = Hashtbl.create 10;
dispatcher_thread = return ();
dispatcher_shutting_down = false;
watchevents = Hashtbl.create 10;
suspended = false;
suspended_m = Lwt_mutex.create ();
suspended_c = Lwt_condition.create ();
} in
t.dispatcher_thread <- dispatcher t;
return t
let make () = match IO.backend with
| `unix -> make_unsafe ()
| `xen ->
Lwt_mutex.with_lock client_cache_m
(fun () -> match !client_cache with
| Some c -> return c
| None ->
make_unsafe ()
>>= fun c ->
client_cache := Some c;
return c
)
let suspend t =
Lwt_mutex.with_lock t.suspended_m
(fun () ->
t.suspended <- true;
let rec loop () =
if Hashtbl.length t.rid_to_wakeup > 0 then begin
Lwt_condition.wait ~mutex:t.suspended_m t.suspended_c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
)
>>= fun () ->
Hashtbl.iter (fun _ watcher -> Watcher.cancel watcher) t.watchevents;
Lwt.cancel t.dispatcher_thread;
return ()
let resume_unsafe t =
Lwt_mutex.with_lock t.suspended_m (fun () ->
t.suspended <- false;
t.dispatcher_shutting_down <- false;
Lwt_condition.broadcast t.suspended_c ();
return ())
>>= fun () ->
t.dispatcher_thread <- dispatcher t;
return ()
let resume t = match IO.backend with
| `unix -> resume_unsafe t
| `xen -> (match !client_cache with
| None -> Lwt.return ()
| Some c -> IO.create ()
>>= fun transport -> c.transport <- transport; resume_unsafe t)
type handle = client Xs_handle.t
let make_rid =
let counter = ref 0l in
fun () ->
let result = !counter in
counter := Int32.succ !counter;
result
let rpc hint h payload unmarshal =
let open Xs_handle in
let rid = make_rid () in
let request = Request.print payload (get_tid h) rid in
let t, u = wait () in
let c = get_client h in
if c.dispatcher_shutting_down
then Lwt.fail Dispatcher_failed
else begin
Lwt_mutex.with_lock c.suspended_m (fun () ->
let rec loop () =
if c.suspended then begin
Lwt_condition.wait ~mutex:c.suspended_m c.suspended_c
>>= fun () ->
loop ()
end else Lwt.return () in
loop ()
>>= fun () ->
Hashtbl.add c.rid_to_wakeup rid u;
send_one c request
) >>= fun () ->
t >>= fun res ->
Lwt_mutex.with_lock c.suspended_m
(fun () ->
Hashtbl.remove c.rid_to_wakeup rid;
Lwt_condition.broadcast c.suspended_c ();
return ())
>>= fun () ->
return (response hint request res unmarshal)
end
let directory h path = rpc "directory" (Xs_handle.accessed_path h path) Request.(PathOp(path, Directory)) Unmarshal.list
let read h path = rpc "read" (Xs_handle.accessed_path h path) Request.(PathOp(path, Read)) Unmarshal.string
let write h path data = rpc "write" (Xs_handle.accessed_path h path) Request.(PathOp(path, Write data)) Unmarshal.ok
let rm h path = rpc "rm" (Xs_handle.accessed_path h path) Request.(PathOp(path, Rm)) Unmarshal.ok
let mkdir h path = rpc "mkdir" (Xs_handle.accessed_path h path) Request.(PathOp(path, Mkdir)) Unmarshal.ok
let setperms h path acl = rpc "setperms" (Xs_handle.accessed_path h path) Request.(PathOp(path, Setperms acl)) Unmarshal.ok
let debug h cmd_args = rpc "debug" h (Request.Debug cmd_args) Unmarshal.list
let restrict h domid = rpc "restrict" h (Request.Restrict domid) Unmarshal.ok
let getdomainpath h domid = rpc "getdomainpath" h (Request.Getdomainpath domid) Unmarshal.string
let watch h path token = rpc "watch" (Xs_handle.watch h path) (Request.Watch(path, Token.to_string token)) Unmarshal.ok
let unwatch h path token = rpc "unwatch" (Xs_handle.unwatch h path) (Request.Unwatch(path, Token.to_string token)) Unmarshal.ok
let introduce h domid store_mfn store_port = rpc "introduce" h (Request.Introduce(domid, store_mfn, store_port)) Unmarshal.ok
let set_target h stubdom_domid domid = rpc "set_target" h (Request.Set_target(stubdom_domid, domid)) Unmarshal.ok
let immediate client f = f (Xs_handle.no_transaction client)
let counter = ref 0l
let wait client f =
let open StringSet in
counter := Int32.succ !counter;
let token = Token.of_string (Printf.sprintf "%ld:xs_client.wait" !counter) in
let watcher = Watcher.make () in
Hashtbl.add client.watchevents token watcher;
let result, wakener = Lwt.task () in
on_cancel result
(fun () ->
Watcher.cancel watcher
);
let h = Xs_handle.watching client in
let adjust_paths () =
let current_paths = Xs_handle.get_watched_paths h in
let old_paths = diff current_paths (Xs_handle.get_accessed_paths h) in
Lwt_list.iter_s (fun p -> unwatch h p token) (elements old_paths)
>>= fun () ->
let new_paths = diff (Xs_handle.get_accessed_paths h) current_paths in
Lwt_list.iter_s (fun p -> watch h p token) (elements new_paths)
>>= fun () ->
if old_paths = empty && (new_paths = empty)
then begin
Watcher.get watcher
>>= fun results ->
if results = empty
then fail (Failure "goodnight")
else return ()
end else return () in
let rec loop () =
Lwt.catch
(fun () ->
f h
>>= fun result ->
wakeup wakener result;
return true
) (function
| Eagain -> return false
| ex -> wakeup_exn wakener ex; return true)
>>= function
| true -> return ()
| false ->
adjust_paths ()
>>= fun () ->
loop ()
in
Lwt.async (fun () ->
finally loop
(fun () ->
let current_paths = Xs_handle.get_watched_paths h in
Lwt_list.iter_s (fun p -> unwatch h p token) (elements current_paths)
>>= fun () ->
Hashtbl.remove client.watchevents token;
return ()
)
);
result
let rec transaction client f =
rpc "transaction_start" (Xs_handle.no_transaction client) Request.Transaction_start Unmarshal.int32
>>= fun tid ->
let h = Xs_handle.transaction client tid in
f h
>>= fun result ->
Lwt.catch
(fun () ->
rpc "transaction_end" h (Request.Transaction_end true) Unmarshal.string
>>= fun res' ->
if res' = "OK" then return result else Lwt.fail (Error (Printf.sprintf "Unexpected transaction result: %s" res'))
) (function
| Eagain -> transaction client f
| e -> Lwt.fail e)
end
|
e945f98dd4ee73bfb13121288d5fc4af5162638d2221d485c771ff36abdeb7aa | MalloZup/fullrocketmetal | scheduler.clj | (ns fullrocketmetal.scheduler
(:require
[clojurewerkz.quartzite.triggers :as t]
[clojurewerkz.quartzite.jobs :as j]
[clojurewerkz.quartzite.conversion :as qc]
[clojurewerkz.quartzite.jobs :refer [defjob]]
[clojurewerkz.quartzite.schedule.cron :as qcron]
[missile.chat :as chat]
[missile.channels :as channels])
(:gen-class))
(defn create-rocket-msg-cron-trigger [cron-schedule-time]
(t/build
(t/start-now)
(t/with-schedule (qcron/schedule (qcron/cron-schedule cron-schedule-time)))))
(defjob rocket-message-job [ctx]
(let [data (qc/from-job-data ctx)
ch-name (data "ch-name")
text-message (data "text-message")]
(chat/sendMessage (channels/get-channel-id ch-name) text-message)))
(defn create-rocket-msg-job [list-jobs]
(j/build
(j/of-type rocket-message-job)
(j/using-job-data {"ch-name" (:channel-name list-jobs) "text-message" (:message list-jobs)})))
| null | https://raw.githubusercontent.com/MalloZup/fullrocketmetal/6029a2d2bf6c59910611650e7adaaf7b4c3697ae/src/fullrocketmetal/scheduler.clj | clojure | (ns fullrocketmetal.scheduler
(:require
[clojurewerkz.quartzite.triggers :as t]
[clojurewerkz.quartzite.jobs :as j]
[clojurewerkz.quartzite.conversion :as qc]
[clojurewerkz.quartzite.jobs :refer [defjob]]
[clojurewerkz.quartzite.schedule.cron :as qcron]
[missile.chat :as chat]
[missile.channels :as channels])
(:gen-class))
(defn create-rocket-msg-cron-trigger [cron-schedule-time]
(t/build
(t/start-now)
(t/with-schedule (qcron/schedule (qcron/cron-schedule cron-schedule-time)))))
(defjob rocket-message-job [ctx]
(let [data (qc/from-job-data ctx)
ch-name (data "ch-name")
text-message (data "text-message")]
(chat/sendMessage (channels/get-channel-id ch-name) text-message)))
(defn create-rocket-msg-job [list-jobs]
(j/build
(j/of-type rocket-message-job)
(j/using-job-data {"ch-name" (:channel-name list-jobs) "text-message" (:message list-jobs)})))
| |
0aaf01875662f350fe33909c3b6d1d812453df1eeed9e471c2cee44d761aefd6 | artyom-poptsov/guile-dsv | table.scm | ;;; table.scm -- Procedures to print fancy tables in a console.
Copyright ( C ) 2021 - 2022 Artyom V. Poptsov < >
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
;; This module contains procedures that allow to print configurable fancy
;; tables in a console.
;;; Code:
(define-module (dsv table)
#:use-module (scheme documentation)
#:use-module (ice-9 format)
#:export (%table-parameters
print-table-parameters
shorthand->table-parameter
get-width
format-table
table-map
table-filter-row
table-filter-column))
(define-with-docs %table-parameters
"Associative list of all known table parameters."
'((bt . border-top)
(btl . border-top-left)
(btr . border-top-right)
(btj . border-top-joint)
(bl . border-left)
(blj . border-left-joint)
(br . border-right)
(brj . border-right-joint)
(bb . border-bottom)
(bbl . border-bottom-left)
(bbr . border-bottom-right)
(bbj . border-bottom-joint)
;; Inner table lines.
(rs . row-separator)
(rj . row-joint)
(cs . column-separator)
;; Headers.
(ht . header-top)
(htl . header-top-left)
(htr . header-top-right)
(htj . header-top-joint)
(hl . header-left)
(hr . header-right)
(hcs . header-column-separator)
(hb . header-bottom)
(hbl . header-bottom-left)
(hbr . header-bottom-right)
(hbj . header-bottom-joint)))
(define (print-table-parameters port)
"Print all known table parameters to a PORT."
(for-each (lambda (param)
(format port
" ~4a ~a~%"
(car param)
(cdr param)))
%table-parameters))
(define (shorthand->table-parameter sh)
"Convert a shorthand SH to a table parameter."
(let ((param (assoc-ref %table-parameters sh)))
(unless param
(format (current-error-port)
"ERROR: Unknown table parameter: ~a~%" sh)
(format (current-error-port)
"Known table parameters are:~%")
(print-table-parameters (current-error-port))
(exit 1))
param))
(define (get-width table)
"Get maximum field width for each row of TABLE."
(let loop ((rows table)
(res '()))
(if (not (null? rows))
(let ((w (map string-length (car rows))))
(cond
((null? res)
(loop (cdr rows) w))
(else
(loop (cdr rows)
(map max res w)))))
res)))
(define (table-map proc table)
"Apply a procedure PROC to each TABLE cell, return the new table."
(define (process-row row row-num)
(let loop ((r row)
(col-num 0)
(result '()))
(if (null? r)
(reverse result)
(loop (cdr r)
(+ col-num 1)
(cons (proc (car r)
row-num
col-num)
result)))))
(let row-loop ((tbl table)
(row-num 0)
(result '()))
(if (null? tbl)
(reverse result)
(row-loop (cdr tbl)
(+ row-num 1)
(cons (process-row (car tbl)
row-num)
result)))))
(define (table-filter-row proc table)
(let loop ((tbl table)
(row-num 0)
(result '()))
(if (null? tbl)
(reverse result)
(let ((row (car tbl)))
(if (proc row row-num)
(loop (cdr tbl)
(+ row-num 1)
(cons row result))
(loop (cdr tbl)
(+ row-num 1)
result))))))
(define (table-filter-column proc table)
"Remove all the columns from a TABLE for which a procedure PROC returns #f."
(let ((first-row-length (length (car table))))
(let loop ((col 0)
(result (make-list (length table) '())))
(if (= col first-row-length)
(map reverse result)
(let ((value (map (lambda (r) (list-ref r col)) table)))
(if (proc value col)
(loop (+ col 1)
(map cons value result))
(loop (+ col 1)
result)))))))
(define* (format-table table
borders
#:key
(with-header? #f)
(port (current-output-port)))
"Format file and print it to a PORT."
(let* ((padding 5)
(column-separator (or (assoc-ref borders 'column-separator) ""))
(row-separator (assoc-ref borders 'row-separator))
(row-joint (assoc-ref borders 'row-joint))
(border-top (assoc-ref borders 'border-top))
(border-top-left (assoc-ref borders 'border-top-left))
(border-top-right (assoc-ref borders 'border-top-right))
(border-top-joint (assoc-ref borders 'border-top-joint))
(border-left (assoc-ref borders 'border-left))
(border-left-joint (assoc-ref borders 'border-left-joint))
(border-right (assoc-ref borders 'border-right))
(border-right-joint (assoc-ref borders 'border-right-joint))
(border-bottom (assoc-ref borders 'border-bottom))
(border-bottom-left (assoc-ref borders 'border-bottom-left))
(border-bottom-right (assoc-ref borders 'border-bottom-right))
(border-bottom-joint (assoc-ref borders 'border-bottom-joint))
(header-top (assoc-ref borders 'header-top))
(header-top-left (assoc-ref borders 'header-top-left))
(header-top-right (assoc-ref borders 'header-top-right))
(header-top-joint (assoc-ref borders 'header-top-joint))
(header-left (assoc-ref borders 'header-left))
(header-right (assoc-ref borders 'header-right))
(header-column-separator (assoc-ref borders
'header-column-separator))
(header-bottom (assoc-ref borders 'header-bottom))
(header-bottom-left (assoc-ref borders 'header-bottom-left))
(header-bottom-right (assoc-ref borders 'header-bottom-right))
(header-bottom-joint (assoc-ref borders 'header-bottom-joint))
(width (get-width table))
(format-field (lambda (field width)
"Print a FIELD in a column with given WIDTH."
(format port
(format #f " ~~~da " (+ width padding))
field)))
(format-row (lambda (row width border-left border-right separator)
(if border-left
(display border-left port)
(display " " port))
(let field-loop ((fields row)
(field-widths width))
(unless (null? fields)
(let ((f (car fields))
(w (car field-widths)))
(format-field f w)
(if (null? (cdr fields))
(if border-right
(display border-right port)
(display " " port))
(if separator
(display separator port)
(display " " port)))
(field-loop (cdr fields) (cdr field-widths)))))
(newline port)))
(display-line (lambda (widths middle left right joint)
(if left
(display left port)
(display " " port))
(let loop ((w widths))
(unless (null? w)
(let ((row-width (+ (car w) padding 2)))
(display (string-join (make-list row-width
middle)
"")
port)
(unless (null? (cdr w))
(if joint
(display joint port)
(display " " port))))
(loop (cdr w))))
(if right
(display right port)
(display " " port))
(newline port)))
(display-header-border-top
(lambda (widths)
(display-line widths
header-top
header-top-left
header-top-right
header-top-joint)))
(display-header-border-bottom
(lambda (widths)
(display-line widths
header-bottom
header-bottom-left
header-bottom-right
header-bottom-joint)))
(display-top-border (lambda (widths)
"Display a top horisontal table border."
(display-line widths
border-top
border-top-left
border-top-right
border-top-joint)))
(display-bottom-border (lambda (widths)
"Display a top horisontal table border."
(display-line widths
border-bottom
border-bottom-left
border-bottom-right
border-bottom-joint)))
(display-row-separator (lambda (widths)
"Display a top horisontal table border."
(display-line widths
row-separator
border-left-joint
border-right-joint
row-joint)))
(display-table (lambda (table)
(unless with-header?
(when border-top
(display-top-border width)))
(let loop ((t table))
(unless (null? t)
(format-row (car t)
width
border-left
border-right
column-separator)
(when row-separator
(if (null? (cdr t))
(when border-bottom
(display-bottom-border width))
(display-row-separator width)))
(loop (cdr t)))))))
(if with-header?
(begin
(when header-top
(display-header-border-top width))
(format-row (car table)
width
header-left
header-right
header-column-separator)
(when header-bottom
(display-header-border-bottom width))
(display-table (cdr table)))
(display-table table))))
;;; table.scm ends here.
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-dsv/8cb9690708bcb7448b28031f3b9b682680389172/modules/dsv/table.scm | scheme | table.scm -- Procedures to print fancy tables in a console.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
This module contains procedures that allow to print configurable fancy
tables in a console.
Code:
Inner table lines.
Headers.
table.scm ends here. |
Copyright ( C ) 2021 - 2022 Artyom V. Poptsov < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define-module (dsv table)
#:use-module (scheme documentation)
#:use-module (ice-9 format)
#:export (%table-parameters
print-table-parameters
shorthand->table-parameter
get-width
format-table
table-map
table-filter-row
table-filter-column))
(define-with-docs %table-parameters
"Associative list of all known table parameters."
'((bt . border-top)
(btl . border-top-left)
(btr . border-top-right)
(btj . border-top-joint)
(bl . border-left)
(blj . border-left-joint)
(br . border-right)
(brj . border-right-joint)
(bb . border-bottom)
(bbl . border-bottom-left)
(bbr . border-bottom-right)
(bbj . border-bottom-joint)
(rs . row-separator)
(rj . row-joint)
(cs . column-separator)
(ht . header-top)
(htl . header-top-left)
(htr . header-top-right)
(htj . header-top-joint)
(hl . header-left)
(hr . header-right)
(hcs . header-column-separator)
(hb . header-bottom)
(hbl . header-bottom-left)
(hbr . header-bottom-right)
(hbj . header-bottom-joint)))
(define (print-table-parameters port)
"Print all known table parameters to a PORT."
(for-each (lambda (param)
(format port
" ~4a ~a~%"
(car param)
(cdr param)))
%table-parameters))
(define (shorthand->table-parameter sh)
"Convert a shorthand SH to a table parameter."
(let ((param (assoc-ref %table-parameters sh)))
(unless param
(format (current-error-port)
"ERROR: Unknown table parameter: ~a~%" sh)
(format (current-error-port)
"Known table parameters are:~%")
(print-table-parameters (current-error-port))
(exit 1))
param))
(define (get-width table)
"Get maximum field width for each row of TABLE."
(let loop ((rows table)
(res '()))
(if (not (null? rows))
(let ((w (map string-length (car rows))))
(cond
((null? res)
(loop (cdr rows) w))
(else
(loop (cdr rows)
(map max res w)))))
res)))
(define (table-map proc table)
"Apply a procedure PROC to each TABLE cell, return the new table."
(define (process-row row row-num)
(let loop ((r row)
(col-num 0)
(result '()))
(if (null? r)
(reverse result)
(loop (cdr r)
(+ col-num 1)
(cons (proc (car r)
row-num
col-num)
result)))))
(let row-loop ((tbl table)
(row-num 0)
(result '()))
(if (null? tbl)
(reverse result)
(row-loop (cdr tbl)
(+ row-num 1)
(cons (process-row (car tbl)
row-num)
result)))))
(define (table-filter-row proc table)
(let loop ((tbl table)
(row-num 0)
(result '()))
(if (null? tbl)
(reverse result)
(let ((row (car tbl)))
(if (proc row row-num)
(loop (cdr tbl)
(+ row-num 1)
(cons row result))
(loop (cdr tbl)
(+ row-num 1)
result))))))
(define (table-filter-column proc table)
"Remove all the columns from a TABLE for which a procedure PROC returns #f."
(let ((first-row-length (length (car table))))
(let loop ((col 0)
(result (make-list (length table) '())))
(if (= col first-row-length)
(map reverse result)
(let ((value (map (lambda (r) (list-ref r col)) table)))
(if (proc value col)
(loop (+ col 1)
(map cons value result))
(loop (+ col 1)
result)))))))
(define* (format-table table
borders
#:key
(with-header? #f)
(port (current-output-port)))
"Format file and print it to a PORT."
(let* ((padding 5)
(column-separator (or (assoc-ref borders 'column-separator) ""))
(row-separator (assoc-ref borders 'row-separator))
(row-joint (assoc-ref borders 'row-joint))
(border-top (assoc-ref borders 'border-top))
(border-top-left (assoc-ref borders 'border-top-left))
(border-top-right (assoc-ref borders 'border-top-right))
(border-top-joint (assoc-ref borders 'border-top-joint))
(border-left (assoc-ref borders 'border-left))
(border-left-joint (assoc-ref borders 'border-left-joint))
(border-right (assoc-ref borders 'border-right))
(border-right-joint (assoc-ref borders 'border-right-joint))
(border-bottom (assoc-ref borders 'border-bottom))
(border-bottom-left (assoc-ref borders 'border-bottom-left))
(border-bottom-right (assoc-ref borders 'border-bottom-right))
(border-bottom-joint (assoc-ref borders 'border-bottom-joint))
(header-top (assoc-ref borders 'header-top))
(header-top-left (assoc-ref borders 'header-top-left))
(header-top-right (assoc-ref borders 'header-top-right))
(header-top-joint (assoc-ref borders 'header-top-joint))
(header-left (assoc-ref borders 'header-left))
(header-right (assoc-ref borders 'header-right))
(header-column-separator (assoc-ref borders
'header-column-separator))
(header-bottom (assoc-ref borders 'header-bottom))
(header-bottom-left (assoc-ref borders 'header-bottom-left))
(header-bottom-right (assoc-ref borders 'header-bottom-right))
(header-bottom-joint (assoc-ref borders 'header-bottom-joint))
(width (get-width table))
(format-field (lambda (field width)
"Print a FIELD in a column with given WIDTH."
(format port
(format #f " ~~~da " (+ width padding))
field)))
(format-row (lambda (row width border-left border-right separator)
(if border-left
(display border-left port)
(display " " port))
(let field-loop ((fields row)
(field-widths width))
(unless (null? fields)
(let ((f (car fields))
(w (car field-widths)))
(format-field f w)
(if (null? (cdr fields))
(if border-right
(display border-right port)
(display " " port))
(if separator
(display separator port)
(display " " port)))
(field-loop (cdr fields) (cdr field-widths)))))
(newline port)))
(display-line (lambda (widths middle left right joint)
(if left
(display left port)
(display " " port))
(let loop ((w widths))
(unless (null? w)
(let ((row-width (+ (car w) padding 2)))
(display (string-join (make-list row-width
middle)
"")
port)
(unless (null? (cdr w))
(if joint
(display joint port)
(display " " port))))
(loop (cdr w))))
(if right
(display right port)
(display " " port))
(newline port)))
(display-header-border-top
(lambda (widths)
(display-line widths
header-top
header-top-left
header-top-right
header-top-joint)))
(display-header-border-bottom
(lambda (widths)
(display-line widths
header-bottom
header-bottom-left
header-bottom-right
header-bottom-joint)))
(display-top-border (lambda (widths)
"Display a top horisontal table border."
(display-line widths
border-top
border-top-left
border-top-right
border-top-joint)))
(display-bottom-border (lambda (widths)
"Display a top horisontal table border."
(display-line widths
border-bottom
border-bottom-left
border-bottom-right
border-bottom-joint)))
(display-row-separator (lambda (widths)
"Display a top horisontal table border."
(display-line widths
row-separator
border-left-joint
border-right-joint
row-joint)))
(display-table (lambda (table)
(unless with-header?
(when border-top
(display-top-border width)))
(let loop ((t table))
(unless (null? t)
(format-row (car t)
width
border-left
border-right
column-separator)
(when row-separator
(if (null? (cdr t))
(when border-bottom
(display-bottom-border width))
(display-row-separator width)))
(loop (cdr t)))))))
(if with-header?
(begin
(when header-top
(display-header-border-top width))
(format-row (car table)
width
header-left
header-right
header-column-separator)
(when header-bottom
(display-header-border-bottom width))
(display-table (cdr table)))
(display-table table))))
|
5e97031a8e0ad90379c34b83d1293d18578f0a8f129bd7bd284ee7ec20afa898 | blarney-lang/actora | StackIR.hs | -- Stack intermediate representation
module StackIR where
import Data.Map as M
import Data.Set as S
import Data.List as L
-- Some meaningful type names, for readability
type Arity = Int
type StackOffset = Int
type InstrAddr = Int
type NumAtoms = Int
type PopAmount = Int
type ErrorCode = String
-- An instruction pointer before linking is a label,
-- and after linking is an address
data InstrPtr =
InstrLabel String
| InstrAddr InstrAddr
deriving (Eq, Ord, Show)
-- Atoms are words residing on the stack and heap
data Atom =
FUN InstrPtr
| INT Int
| ATOM String
| PTR PtrKind NumAtoms Int
deriving (Eq, Ord, Show)
Pointers can point to partial applications , tuples , and cons cells
data PtrKind = PtrApp Arity | PtrTuple | PtrCons
deriving (Eq, Ord, Show)
-- Primitive operators
data Prim =
PrimAdd
| PrimSub
| PrimAddImm Int
| PrimSubImm Int
| PrimEq
| PrimNotEq
| PrimLess
| PrimGreaterEq
| PrimInv
| PrimAnd
| PrimOr
| PrimXor
| PrimShiftLeft
| PrimShiftRight
| PrimArithShiftRight
deriving Show
-- Instruction set
data Instr =
LABEL String
| PUSH Atom
| SETU Int
| COPY StackOffset
| JUMP InstrPtr
| IJUMP
| SLIDE PopAmount NumAtoms
| RETURN PopAmount
| SLIDE_JUMP PopAmount NumAtoms InstrPtr
| LOAD Bool
| STORE NumAtoms PtrKind
| MATCH BranchCond
| CJUMPPOP PopAmount InstrPtr
| PRIM Prim
| HALT ErrorCode
deriving Show
-- Branch conditions
type BranchCond = (Polarity, BCond)
data Polarity = Pos | Neg deriving (Eq, Ord, Show)
data BCond =
IsAtom String
| IsInt Int
| IsCons
| IsTuple NumAtoms
| IsApp Arity
deriving (Eq, Ord, Show)
-- Replace labels with addresses
link :: [Instr] -> ([Instr], M.Map String InstrAddr)
link instrs = (L.map replace (dropLabels instrs), toAddr)
where
-- Compute mapping from labels to addresses
compute i [] = []
compute i (LABEL s:rest) = (s, i) : compute i rest
compute i (instr:rest) = compute (i+1) rest
-- Mapping from labels to addresses
toAddr = M.fromList (compute 0 instrs)
-- Determine address for given label
resolve s =
case M.lookup s toAddr of
Nothing -> error ("link: unknown label " ++ s)
Just addr -> InstrAddr addr
-- Drop all labels
dropLabels [] = []
dropLabels (LABEL s:rest) = dropLabels rest
dropLabels (i:is) = i : dropLabels is
-- Replace labels with addresses
replace (PUSH (FUN (InstrLabel s))) = PUSH (FUN (resolve s))
replace (SLIDE_JUMP n m (InstrLabel s)) = SLIDE_JUMP n m (resolve s)
replace (JUMP (InstrLabel s)) = JUMP (resolve s)
replace (CJUMPPOP pop (InstrLabel s)) = CJUMPPOP pop (resolve s)
replace other = other
-- Determine all atoms used
atoms :: [Instr] -> [String]
atoms is =
reserved ++ S.toList (S.unions (L.map get is) S.\\ S.fromList reserved)
where
reserved = ["false", "true", "[]"]
get (PUSH (ATOM a)) = S.singleton a
get (MATCH (_, IsAtom a)) = S.singleton a
get other = S.empty
| null | https://raw.githubusercontent.com/blarney-lang/actora/a5380662a55952c19f82e08c4dce25ad98390bdf/compiler/StackIR.hs | haskell | Stack intermediate representation
Some meaningful type names, for readability
An instruction pointer before linking is a label,
and after linking is an address
Atoms are words residing on the stack and heap
Primitive operators
Instruction set
Branch conditions
Replace labels with addresses
Compute mapping from labels to addresses
Mapping from labels to addresses
Determine address for given label
Drop all labels
Replace labels with addresses
Determine all atoms used |
module StackIR where
import Data.Map as M
import Data.Set as S
import Data.List as L
type Arity = Int
type StackOffset = Int
type InstrAddr = Int
type NumAtoms = Int
type PopAmount = Int
type ErrorCode = String
data InstrPtr =
InstrLabel String
| InstrAddr InstrAddr
deriving (Eq, Ord, Show)
data Atom =
FUN InstrPtr
| INT Int
| ATOM String
| PTR PtrKind NumAtoms Int
deriving (Eq, Ord, Show)
Pointers can point to partial applications , tuples , and cons cells
data PtrKind = PtrApp Arity | PtrTuple | PtrCons
deriving (Eq, Ord, Show)
data Prim =
PrimAdd
| PrimSub
| PrimAddImm Int
| PrimSubImm Int
| PrimEq
| PrimNotEq
| PrimLess
| PrimGreaterEq
| PrimInv
| PrimAnd
| PrimOr
| PrimXor
| PrimShiftLeft
| PrimShiftRight
| PrimArithShiftRight
deriving Show
data Instr =
LABEL String
| PUSH Atom
| SETU Int
| COPY StackOffset
| JUMP InstrPtr
| IJUMP
| SLIDE PopAmount NumAtoms
| RETURN PopAmount
| SLIDE_JUMP PopAmount NumAtoms InstrPtr
| LOAD Bool
| STORE NumAtoms PtrKind
| MATCH BranchCond
| CJUMPPOP PopAmount InstrPtr
| PRIM Prim
| HALT ErrorCode
deriving Show
type BranchCond = (Polarity, BCond)
data Polarity = Pos | Neg deriving (Eq, Ord, Show)
data BCond =
IsAtom String
| IsInt Int
| IsCons
| IsTuple NumAtoms
| IsApp Arity
deriving (Eq, Ord, Show)
link :: [Instr] -> ([Instr], M.Map String InstrAddr)
link instrs = (L.map replace (dropLabels instrs), toAddr)
where
compute i [] = []
compute i (LABEL s:rest) = (s, i) : compute i rest
compute i (instr:rest) = compute (i+1) rest
toAddr = M.fromList (compute 0 instrs)
resolve s =
case M.lookup s toAddr of
Nothing -> error ("link: unknown label " ++ s)
Just addr -> InstrAddr addr
dropLabels [] = []
dropLabels (LABEL s:rest) = dropLabels rest
dropLabels (i:is) = i : dropLabels is
replace (PUSH (FUN (InstrLabel s))) = PUSH (FUN (resolve s))
replace (SLIDE_JUMP n m (InstrLabel s)) = SLIDE_JUMP n m (resolve s)
replace (JUMP (InstrLabel s)) = JUMP (resolve s)
replace (CJUMPPOP pop (InstrLabel s)) = CJUMPPOP pop (resolve s)
replace other = other
atoms :: [Instr] -> [String]
atoms is =
reserved ++ S.toList (S.unions (L.map get is) S.\\ S.fromList reserved)
where
reserved = ["false", "true", "[]"]
get (PUSH (ATOM a)) = S.singleton a
get (MATCH (_, IsAtom a)) = S.singleton a
get other = S.empty
|
5b14b09ba0c6ebeb9e2cd64bd7783a217e0a4234553ac60ee6fd67082e7bacc4 | ichko/fmi-fp-2020-21 | Main.hs | module Main where
import Logic (maybeUpdateGame)
import Rendering (firstTurn, showGame)
import Text.Read (readMaybe)
import Types
( Game (Game, state),
GameState (GameOver),
Player (..),
Position,
)
loop :: Game -> IO ()
loop g@Game {state = GameOver _} = putStrLn $ showGame g
loop gs = do
putStrLn $ showGame gs
move <- getLine
let parsedMove = readMaybe move :: Maybe Position
in case parsedMove of
Nothing -> do
putStrLn "Invalid input, try again"
loop gs
Just position ->
case maybeUpdateGame gs position of
Nothing -> do
putStrLn "Invalid move, try again"
loop gs
Just nextGameState -> loop nextGameState
testState :: [[Maybe Player]]
testState = map (map Just) state
where
state =
[ [First, First, Second],
[Second, First, Second],
[First, Second, Second]
]
main :: IO ()
main = do
loop firstTurn
| null | https://raw.githubusercontent.com/ichko/fmi-fp-2020-21/83dea8db7666e7a8a372d82301d71c79d5b798ff/week-05/TicTacToe/Main.hs | haskell | module Main where
import Logic (maybeUpdateGame)
import Rendering (firstTurn, showGame)
import Text.Read (readMaybe)
import Types
( Game (Game, state),
GameState (GameOver),
Player (..),
Position,
)
loop :: Game -> IO ()
loop g@Game {state = GameOver _} = putStrLn $ showGame g
loop gs = do
putStrLn $ showGame gs
move <- getLine
let parsedMove = readMaybe move :: Maybe Position
in case parsedMove of
Nothing -> do
putStrLn "Invalid input, try again"
loop gs
Just position ->
case maybeUpdateGame gs position of
Nothing -> do
putStrLn "Invalid move, try again"
loop gs
Just nextGameState -> loop nextGameState
testState :: [[Maybe Player]]
testState = map (map Just) state
where
state =
[ [First, First, Second],
[Second, First, Second],
[First, Second, Second]
]
main :: IO ()
main = do
loop firstTurn
| |
d1e22562673b3cb9c5484ef33f25d550b43b840b59d9e558909a3e7e37363189 | atdixon/thurber | thurber.clj | (ns thurber
(:refer-clojure :exclude [filter partial])
(:require [camel-snake-kebab.core :as csk]
[clojure.data.json :as json]
[clojure.string :as str]
[clojure.walk :as walk]
[taoensso.nippy :as nippy]
[clojure.tools.logging :as log])
(:import (org.apache.beam.sdk.transforms PTransform Create ParDo DoFn$ProcessContext DoFn$OnTimerContext Combine$CombineFn SerializableFunction Filter SerializableBiFunction CombineWithContext$Context)
(java.util Map)
(thurber.java TDoFn TCoder TOptions TProxy TCombine TDoFn_Stateful TFnContext TFn)
(org.apache.beam.sdk.values PCollection KV PCollectionView TupleTag TupleTagList PCollectionTuple)
(org.apache.beam.sdk Pipeline PipelineResult)
(org.apache.beam.sdk.options PipelineOptionsFactory PipelineOptions)
(clojure.lang MapEntry Keyword IPersistentMap)
(org.apache.beam.sdk.transforms.windowing BoundedWindow)
(org.apache.beam.sdk.coders KvCoder CustomCoder)
(java.io DataInputStream InputStream DataOutputStream OutputStream)
(org.apache.beam.sdk.state ValueState Timer BagState)
(org.apache.beam.sdk.transforms.splittabledofn RestrictionTracker)
(org.apache.beam.sdk.transforms.join CoGbkResult CoGbkResultSchema)))
;; --
(defn ->beam-args [m]
(map (fn [[k v]]
(format "--%s=%s"
(-> k csk/->camelCase name)
(cond
(map? v) (json/write-str v)
(coll? v) (str/join "," v)
:else (-> v str (str/escape {\" "\\\""}))))) m))
(defn ^PipelineOptions create-options
([]
(create-options [] TOptions))
([opts]
(create-options opts TOptions))
([opts as]
(-> (PipelineOptionsFactory/fromArgs
(cond
(map? opts) (into-array String (->beam-args opts))
(coll? opts) (into-array String opts)
:else opts))
(.as as))))
(defn ^Pipeline create-pipeline
([] (Pipeline/create))
([opts] (-> (if (instance? PipelineOptions opts)
opts (create-options opts))
(Pipeline/create))))
(defn get-custom-config [obj]
(if (instance? Pipeline obj)
(recur (.getOptions obj))
(->> (.getCustomConfig ^TOptions (.as obj TOptions))
(into {}) walk/keywordize-keys)))
(defn ^PipelineResult run-pipeline! [p]
(if (instance? Pipeline p)
(.run ^Pipeline p)
(run-pipeline! (.getPipeline p))))
;; --
(defonce
^:private nippy-impl
(proxy [CustomCoder] []
(encode [val ^OutputStream out]
(nippy/freeze-to-out! (DataOutputStream. out) val))
(decode [^InputStream in]
(nippy/thaw-from-in! (DataInputStream. in)))))
(def nippy
(TCoder. #'nippy-impl))
(def nippy-kv (KvCoder/of nippy nippy))
nippy codes MapEntry as vectors by default ; but we want them to stay
MapEntry after thaw :
(nippy/extend-freeze
MapEntry :thurber/map-entry
[val data-output]
(let [[k v] val]
(nippy/freeze-to-out! data-output [k v])))
(nippy/extend-thaw
:thurber/map-entry
[data-input]
(let [[k v] (nippy/thaw-from-in! data-input)]
(MapEntry/create k v)))
;; --
Clojure thread bindings are more expensive than needed for hot code ;
ThreadLocals are faster , so we use them for thread bindings instead .
(defonce ^:private ^ThreadLocal tl-context (ThreadLocal.))
(defonce ^:private ^ThreadLocal tl-proxy-args (ThreadLocal.))
(def ^:private get-custom-config-memo
(let [mem (atom {})]
(fn [^PipelineOptions opts]
(if-let [e (find @mem (.getJobName opts))]
(val e)
(let [ret (get-custom-config opts)]
(swap! mem assoc (.getJobName opts) ret)
ret)))))
(defn ^PipelineOptions *pipeline-options [] (.-pipelineOptions ^TFnContext (.get tl-context)))
(defn ^IPersistentMap *custom-config [] (some-> (*pipeline-options) get-custom-config-memo))
(defn ^DoFn$ProcessContext *process-context [] (.-processContext ^TFnContext (.get tl-context)))
(defn ^BoundedWindow *element-window [] (.-elementWindow ^TFnContext (.get tl-context)))
(defn ^ValueState *value-state [] (.-valueState ^TFnContext (.get tl-context)))
(defn ^BagState *bag-state [] (.-bagState ^TFnContext (.get tl-context)))
(defn ^Timer *event-timer [] (.-eventTimer ^TFnContext (.get tl-context)))
(defn ^DoFn$OnTimerContext *timer-context [] (.-timerContext ^TFnContext (.get tl-context)))
(defn ^CombineWithContext$Context *combine-context [] (.-combineContext ^TFnContext (.get tl-context)))
(defn ^RestrictionTracker *restriction-tracker [] (.-restrictionTracker ^TFnContext (.get tl-context)))
(defn ^"[Ljava.lang.Object;" *proxy-args [] (.get tl-proxy-args))
;; --
(defn *side-input [^PCollectionView v]
(or
(some-> (*process-context) (.sideInput v))
(some-> (*combine-context) (.sideInput v))))
;; --
(defn proxy-with-signature* [proxy-var sig & args]
(TProxy/create proxy-var sig (into-array Object args)))
(defn proxy* [proxy-var & args]
(apply proxy-with-signature* proxy-var nil args))
;; --
(defn- ^TFn ->TFn [f]
(cond
(instance? TFn f) f
(var? f) (TFn. f)))
;; --
(defn- var->name [v]
(or (:th/name (meta v)) (:name (meta v)))
(-> v meta :name name))
(defn ^PTransform partial
[fn-like-or-name & args]
(let [[explicit-name fn- args-]
(if (string? fn-like-or-name)
[fn-like-or-name (first args) (rest args)]
[nil fn-like-or-name args])
use-name (cond
(some? explicit-name) explicit-name
(var? fn-) (format "partial:%s" (var->name fn-like-or-name)))]
(cond-> (-> fn- ->TFn (.partial_ (into-array Object args-)))
use-name (vary-meta merge {:th/name use-name}))))
(def ser-fn partial)
(defn- kw-impl
[^Keyword kw elem] (kw elem))
(defn- filter-impl [^TFn pred-fn & args]
(when (.apply_ pred-fn args) (last args)))
(defn filter [fn-like-or-name & args]
(let [[explicit-name fn- args-]
(if (string? fn-like-or-name)
[fn-like-or-name (first args) (rest args)]
[nil fn-like-or-name args])
use-name (cond
(some? explicit-name) explicit-name
(var? fn-) (format "filter:%s" (var->name fn-like-or-name)))
tfn- (-> fn- ->TFn)]
;; Note: we promote all args from provided fn-like to args passed to filter-impl;
these top - level args are used by to infer tags , side - inputs etc so
;; they must be seen at the top level; filter-impl will relay them to the fn-.
(cond-> (-> #'filter-impl ->TFn
(.partial_
(into-array Object
(concat [(.withoutPartialArgs tfn-)]
args- (.-partialArgs tfn-)))))
use-name (vary-meta merge {:th/name use-name}))))
;; --
(defmacro inline [fn-form]
{:pre [(= #'clojure.core/fn (resolve (first fn-form)))
(symbol? (second fn-form))]}
(let [name-sym (second fn-form)
name- (name name-sym)
[arglists bodies] (if (vector? (nth fn-form 2))
[[(nth fn-form 2)] [(drop 3 fn-form)]]
[(into [] (map first (nnext fn-form)))
(into [] (map rest (nnext fn-form)))])
lex-syms (->> bodies
walk/macroexpand-all
(tree-seq coll? seq)
(clojure.core/filter simple-symbol?)
(clojure.core/filter (set (keys &env)))
set vec)
;; note: as we are prepending lexical scope symbols even
if we have one that is masked by an actual arg sym
;; it will precede and therefore still be masked.
arglists' (map #(into lex-syms %) arglists)
fn-form' (list* `fn (map list* arglists' bodies))]
(intern *ns*
(with-meta name-sym
(into {} (map (fn [[k v]] [k (eval v)]))
(meta name-sym)))
(eval fn-form'))
;; we use raw symbol here so as to not rewrite metadata of symbol
;; interned while compiling:
(if (empty? lex-syms)
`(intern ~*ns* (symbol ~name-))
`(thurber/partial (intern ~*ns* (symbol ~name-)) ~@lex-syms))))
(defmacro fn* [& body]
`(inline
(fn ~@body)))
;; --
(defn- ^TCoder ->coder [prev nxf]
(when-let [c (:th/coder nxf)]
(condp = c
:th/inherit-or-nippy (or (.getCoder prev) nippy)
:th/inherit (.getCoder prev)
c)))
(defn- ->pardo [^TFn xf-fn stateful? ^TFn timer-fn]
(let [tags (into [] (clojure.core/filter #(instance? TupleTag %) (.-partialArgs xf-fn)))
views (into [] (clojure.core/filter #(instance? PCollectionView %)) (.-partialArgs xf-fn))
side-inputs (into [] (clojure.core/filter #(instance? PCollectionView %)) (.-partialArgs xf-fn))]
(cond-> (ParDo/of (if (or stateful? timer-fn)
(TDoFn_Stateful. xf-fn timer-fn)
(TDoFn. xf-fn)))
(not-empty tags)
(.withOutputTags ^TupleTag (first tags)
(reduce (fn [^TupleTagList acc ^TupleTag tag]
(.and acc tag)) (TupleTagList/empty) (rest tags)))
(not-empty views)
(.withSideInputs ^Iterable side-inputs))))
(defn- set-coder! [pcoll-or-tuple coder]
(cond
(instance? PCollection pcoll-or-tuple) (.setCoder ^PCollection pcoll-or-tuple coder)
(instance? PCollectionTuple pcoll-or-tuple) (do
(->> ^PCollectionTuple pcoll-or-tuple
(.getAll)
(.values)
(clojure.core/run! #(.setCoder ^PCollection % coder)))
pcoll-or-tuple)))
(defn- normalize-xf
([xf] (normalize-xf xf {}))
([xf override]
(cond
(instance? TFn xf) (let [normal (merge {:th/name (var->name (.-fnVar ^TFn xf)) :th/coder :th/inherit-or-nippy}
(select-keys (meta xf) [:th/name :th/coder :th/timer-fn :th/stateful]) override)]
(assoc normal :th/xform (->pardo xf (:th/stateful normal) (->TFn (:th/timer-fn normal)))))
(instance? PTransform xf) (merge {:th/xform xf} override)
(keyword? xf) (normalize-xf (partial (str xf) #'kw-impl xf) override)
(map? xf) (normalize-xf (:th/xform xf) (merge (dissoc xf :th/xform) override)) ;; ...b/c maps may nest.
(var? xf) (normalize-xf (TFn. xf) override))))
(defn apply!
"Apply transforms to an input (Pipeline, PCollection, PBegin ...);
Answers a PCollection, PCollectionView, or so on ..."
[input xf-or-prefix & xfs]
(let [[prefix input xfs']
(if (string? xf-or-prefix)
[xf-or-prefix input xfs]
["" input (conj xfs xf-or-prefix)])]
(reduce
(fn [acc xf]
(let [nxf (normalize-xf xf)
Take care here . acc ' may commonly be PCollection but can also be
;; PCollectionTuple or PCollectionView, eg.
acc' (if (:th/name nxf)
(.apply acc (str prefix (when (not-empty prefix) ":") (:th/name nxf)) (:th/xform nxf))
(.apply acc (str prefix (when (not-empty prefix) ":") (.getName (:th/xform nxf))) (:th/xform nxf)))
explicit-coder (->coder acc nxf)]
(when explicit-coder
(set-coder! acc' explicit-coder)) acc')) input xfs')))
(defn ^PTransform compose [& [xf-or-name :as xfs]]
(proxy [PTransform] [(when (string? xf-or-name) xf-or-name)]
(expand [pc]
(apply apply! pc (if (string? xf-or-name) (rest xfs) xfs)))))
;; --
(defn ^PTransform create
([coll] (create nil coll))
([name coll]
(cond->>
(if (map? coll)
(-> (Create/of ^Map coll) (.withCoder nippy))
(-> (Create/of ^Iterable (seq coll)) (.withCoder nippy)))
name (hash-map :th/name name :th/xform))))
;; --
(defn ^Combine$CombineFn combiner
([reducef] (combiner reducef reducef))
([combinef reducef] (combiner #'identity combinef reducef))
([extractf combinef reducef]
{:pre [(or (var? extractf) (instance? TFn extractf))
(or (var? combinef) (instance? TFn reducef))
(or (var? combinef) (instance? TFn reducef))]}
(TCombine. (->TFn extractf) (->TFn combinef) (->TFn reducef))))
;; --
(defn with-timer [fn-like timer-fn-like]
{:th/xform fn-like
:th/timer-fn timer-fn-like})
(defn with-name [xf-like name-]
{:th/name name-
:th/xform xf-like})
;; --
(defn ^{:th/coder nippy-kv} ->kv
([seg]
(KV/of seg seg))
([key-fn seg]
(KV/of (key-fn seg) seg))
([key-fn val-fn seg]
(KV/of (key-fn seg) (val-fn seg))))
;; --
(defn ^{:th/coder nippy} kv->clj [^KV kv]
(MapEntry/create (.getKey kv) (.getValue kv)))
(defn ^{:th/coder nippy-kv} clj->kv [[k v]]
(KV/of k v))
;; --
(defn log
([elem] (log :info elem))
([level elem] (log/logp level elem) elem))
(defn log-verbose
([elem] (log-verbose :info elem))
([level elem] (log/logf level "%s @ %s ∈ %s ∈ %s" elem
(.timestamp (*process-context))
(.pane (*process-context))
(*element-window)) elem))
;; --
(defn co-gbk-result->clj [^CoGbkResult r]
(let [^CoGbkResultSchema s (-> r .getSchema)
tags (-> s .getTupleTagList .getAll)]
(into {}
(map (fn [^TupleTag tag]
[(-> tag .getId keyword)
(->> tag (.getAll r) seq)]) tags))))
(defn kv->clj*
"Convert a Beam KV *result* to Clojure vector: the result is not serializable and
rather may contain result/s as lazy sequences backed by Beam lazy Iterable results;
therefore not for use as DoFn."
[^KV kv]
(let [key- (.getKey kv) val- (.getValue kv)]
(cond
(instance? CoGbkResult val-) [key- (co-gbk-result->clj val-)]
(instance? Iterable val-) [key- (seq val-)]
:else [key- val-])))
| null | https://raw.githubusercontent.com/atdixon/thurber/38b86a683e446f2deed55fad157476ae05940d50/src/thurber.clj | clojure | --
--
but we want them to stay
--
--
--
--
--
Note: we promote all args from provided fn-like to args passed to filter-impl;
they must be seen at the top level; filter-impl will relay them to the fn-.
--
note: as we are prepending lexical scope symbols even
it will precede and therefore still be masked.
we use raw symbol here so as to not rewrite metadata of symbol
interned while compiling:
--
...b/c maps may nest.
PCollectionTuple or PCollectionView, eg.
--
--
--
--
--
--
--
| (ns thurber
(:refer-clojure :exclude [filter partial])
(:require [camel-snake-kebab.core :as csk]
[clojure.data.json :as json]
[clojure.string :as str]
[clojure.walk :as walk]
[taoensso.nippy :as nippy]
[clojure.tools.logging :as log])
(:import (org.apache.beam.sdk.transforms PTransform Create ParDo DoFn$ProcessContext DoFn$OnTimerContext Combine$CombineFn SerializableFunction Filter SerializableBiFunction CombineWithContext$Context)
(java.util Map)
(thurber.java TDoFn TCoder TOptions TProxy TCombine TDoFn_Stateful TFnContext TFn)
(org.apache.beam.sdk.values PCollection KV PCollectionView TupleTag TupleTagList PCollectionTuple)
(org.apache.beam.sdk Pipeline PipelineResult)
(org.apache.beam.sdk.options PipelineOptionsFactory PipelineOptions)
(clojure.lang MapEntry Keyword IPersistentMap)
(org.apache.beam.sdk.transforms.windowing BoundedWindow)
(org.apache.beam.sdk.coders KvCoder CustomCoder)
(java.io DataInputStream InputStream DataOutputStream OutputStream)
(org.apache.beam.sdk.state ValueState Timer BagState)
(org.apache.beam.sdk.transforms.splittabledofn RestrictionTracker)
(org.apache.beam.sdk.transforms.join CoGbkResult CoGbkResultSchema)))
(defn ->beam-args [m]
(map (fn [[k v]]
(format "--%s=%s"
(-> k csk/->camelCase name)
(cond
(map? v) (json/write-str v)
(coll? v) (str/join "," v)
:else (-> v str (str/escape {\" "\\\""}))))) m))
(defn ^PipelineOptions create-options
([]
(create-options [] TOptions))
([opts]
(create-options opts TOptions))
([opts as]
(-> (PipelineOptionsFactory/fromArgs
(cond
(map? opts) (into-array String (->beam-args opts))
(coll? opts) (into-array String opts)
:else opts))
(.as as))))
(defn ^Pipeline create-pipeline
([] (Pipeline/create))
([opts] (-> (if (instance? PipelineOptions opts)
opts (create-options opts))
(Pipeline/create))))
(defn get-custom-config [obj]
(if (instance? Pipeline obj)
(recur (.getOptions obj))
(->> (.getCustomConfig ^TOptions (.as obj TOptions))
(into {}) walk/keywordize-keys)))
(defn ^PipelineResult run-pipeline! [p]
(if (instance? Pipeline p)
(.run ^Pipeline p)
(run-pipeline! (.getPipeline p))))
(defonce
^:private nippy-impl
(proxy [CustomCoder] []
(encode [val ^OutputStream out]
(nippy/freeze-to-out! (DataOutputStream. out) val))
(decode [^InputStream in]
(nippy/thaw-from-in! (DataInputStream. in)))))
(def nippy
(TCoder. #'nippy-impl))
(def nippy-kv (KvCoder/of nippy nippy))
MapEntry after thaw :
(nippy/extend-freeze
MapEntry :thurber/map-entry
[val data-output]
(let [[k v] val]
(nippy/freeze-to-out! data-output [k v])))
(nippy/extend-thaw
:thurber/map-entry
[data-input]
(let [[k v] (nippy/thaw-from-in! data-input)]
(MapEntry/create k v)))
ThreadLocals are faster , so we use them for thread bindings instead .
(defonce ^:private ^ThreadLocal tl-context (ThreadLocal.))
(defonce ^:private ^ThreadLocal tl-proxy-args (ThreadLocal.))
(def ^:private get-custom-config-memo
(let [mem (atom {})]
(fn [^PipelineOptions opts]
(if-let [e (find @mem (.getJobName opts))]
(val e)
(let [ret (get-custom-config opts)]
(swap! mem assoc (.getJobName opts) ret)
ret)))))
(defn ^PipelineOptions *pipeline-options [] (.-pipelineOptions ^TFnContext (.get tl-context)))
(defn ^IPersistentMap *custom-config [] (some-> (*pipeline-options) get-custom-config-memo))
(defn ^DoFn$ProcessContext *process-context [] (.-processContext ^TFnContext (.get tl-context)))
(defn ^BoundedWindow *element-window [] (.-elementWindow ^TFnContext (.get tl-context)))
(defn ^ValueState *value-state [] (.-valueState ^TFnContext (.get tl-context)))
(defn ^BagState *bag-state [] (.-bagState ^TFnContext (.get tl-context)))
(defn ^Timer *event-timer [] (.-eventTimer ^TFnContext (.get tl-context)))
(defn ^DoFn$OnTimerContext *timer-context [] (.-timerContext ^TFnContext (.get tl-context)))
(defn ^CombineWithContext$Context *combine-context [] (.-combineContext ^TFnContext (.get tl-context)))
(defn ^RestrictionTracker *restriction-tracker [] (.-restrictionTracker ^TFnContext (.get tl-context)))
(defn ^"[Ljava.lang.Object;" *proxy-args [] (.get tl-proxy-args))
(defn *side-input [^PCollectionView v]
(or
(some-> (*process-context) (.sideInput v))
(some-> (*combine-context) (.sideInput v))))
(defn proxy-with-signature* [proxy-var sig & args]
(TProxy/create proxy-var sig (into-array Object args)))
(defn proxy* [proxy-var & args]
(apply proxy-with-signature* proxy-var nil args))
(defn- ^TFn ->TFn [f]
(cond
(instance? TFn f) f
(var? f) (TFn. f)))
(defn- var->name [v]
(or (:th/name (meta v)) (:name (meta v)))
(-> v meta :name name))
(defn ^PTransform partial
[fn-like-or-name & args]
(let [[explicit-name fn- args-]
(if (string? fn-like-or-name)
[fn-like-or-name (first args) (rest args)]
[nil fn-like-or-name args])
use-name (cond
(some? explicit-name) explicit-name
(var? fn-) (format "partial:%s" (var->name fn-like-or-name)))]
(cond-> (-> fn- ->TFn (.partial_ (into-array Object args-)))
use-name (vary-meta merge {:th/name use-name}))))
(def ser-fn partial)
(defn- kw-impl
[^Keyword kw elem] (kw elem))
(defn- filter-impl [^TFn pred-fn & args]
(when (.apply_ pred-fn args) (last args)))
(defn filter [fn-like-or-name & args]
(let [[explicit-name fn- args-]
(if (string? fn-like-or-name)
[fn-like-or-name (first args) (rest args)]
[nil fn-like-or-name args])
use-name (cond
(some? explicit-name) explicit-name
(var? fn-) (format "filter:%s" (var->name fn-like-or-name)))
tfn- (-> fn- ->TFn)]
these top - level args are used by to infer tags , side - inputs etc so
(cond-> (-> #'filter-impl ->TFn
(.partial_
(into-array Object
(concat [(.withoutPartialArgs tfn-)]
args- (.-partialArgs tfn-)))))
use-name (vary-meta merge {:th/name use-name}))))
(defmacro inline [fn-form]
{:pre [(= #'clojure.core/fn (resolve (first fn-form)))
(symbol? (second fn-form))]}
(let [name-sym (second fn-form)
name- (name name-sym)
[arglists bodies] (if (vector? (nth fn-form 2))
[[(nth fn-form 2)] [(drop 3 fn-form)]]
[(into [] (map first (nnext fn-form)))
(into [] (map rest (nnext fn-form)))])
lex-syms (->> bodies
walk/macroexpand-all
(tree-seq coll? seq)
(clojure.core/filter simple-symbol?)
(clojure.core/filter (set (keys &env)))
set vec)
if we have one that is masked by an actual arg sym
arglists' (map #(into lex-syms %) arglists)
fn-form' (list* `fn (map list* arglists' bodies))]
(intern *ns*
(with-meta name-sym
(into {} (map (fn [[k v]] [k (eval v)]))
(meta name-sym)))
(eval fn-form'))
(if (empty? lex-syms)
`(intern ~*ns* (symbol ~name-))
`(thurber/partial (intern ~*ns* (symbol ~name-)) ~@lex-syms))))
(defmacro fn* [& body]
`(inline
(fn ~@body)))
(defn- ^TCoder ->coder [prev nxf]
(when-let [c (:th/coder nxf)]
(condp = c
:th/inherit-or-nippy (or (.getCoder prev) nippy)
:th/inherit (.getCoder prev)
c)))
(defn- ->pardo [^TFn xf-fn stateful? ^TFn timer-fn]
(let [tags (into [] (clojure.core/filter #(instance? TupleTag %) (.-partialArgs xf-fn)))
views (into [] (clojure.core/filter #(instance? PCollectionView %)) (.-partialArgs xf-fn))
side-inputs (into [] (clojure.core/filter #(instance? PCollectionView %)) (.-partialArgs xf-fn))]
(cond-> (ParDo/of (if (or stateful? timer-fn)
(TDoFn_Stateful. xf-fn timer-fn)
(TDoFn. xf-fn)))
(not-empty tags)
(.withOutputTags ^TupleTag (first tags)
(reduce (fn [^TupleTagList acc ^TupleTag tag]
(.and acc tag)) (TupleTagList/empty) (rest tags)))
(not-empty views)
(.withSideInputs ^Iterable side-inputs))))
(defn- set-coder! [pcoll-or-tuple coder]
(cond
(instance? PCollection pcoll-or-tuple) (.setCoder ^PCollection pcoll-or-tuple coder)
(instance? PCollectionTuple pcoll-or-tuple) (do
(->> ^PCollectionTuple pcoll-or-tuple
(.getAll)
(.values)
(clojure.core/run! #(.setCoder ^PCollection % coder)))
pcoll-or-tuple)))
(defn- normalize-xf
([xf] (normalize-xf xf {}))
([xf override]
(cond
(instance? TFn xf) (let [normal (merge {:th/name (var->name (.-fnVar ^TFn xf)) :th/coder :th/inherit-or-nippy}
(select-keys (meta xf) [:th/name :th/coder :th/timer-fn :th/stateful]) override)]
(assoc normal :th/xform (->pardo xf (:th/stateful normal) (->TFn (:th/timer-fn normal)))))
(instance? PTransform xf) (merge {:th/xform xf} override)
(keyword? xf) (normalize-xf (partial (str xf) #'kw-impl xf) override)
(var? xf) (normalize-xf (TFn. xf) override))))
(defn apply!
Answers a PCollection, PCollectionView, or so on ..."
[input xf-or-prefix & xfs]
(let [[prefix input xfs']
(if (string? xf-or-prefix)
[xf-or-prefix input xfs]
["" input (conj xfs xf-or-prefix)])]
(reduce
(fn [acc xf]
(let [nxf (normalize-xf xf)
Take care here . acc ' may commonly be PCollection but can also be
acc' (if (:th/name nxf)
(.apply acc (str prefix (when (not-empty prefix) ":") (:th/name nxf)) (:th/xform nxf))
(.apply acc (str prefix (when (not-empty prefix) ":") (.getName (:th/xform nxf))) (:th/xform nxf)))
explicit-coder (->coder acc nxf)]
(when explicit-coder
(set-coder! acc' explicit-coder)) acc')) input xfs')))
(defn ^PTransform compose [& [xf-or-name :as xfs]]
(proxy [PTransform] [(when (string? xf-or-name) xf-or-name)]
(expand [pc]
(apply apply! pc (if (string? xf-or-name) (rest xfs) xfs)))))
(defn ^PTransform create
([coll] (create nil coll))
([name coll]
(cond->>
(if (map? coll)
(-> (Create/of ^Map coll) (.withCoder nippy))
(-> (Create/of ^Iterable (seq coll)) (.withCoder nippy)))
name (hash-map :th/name name :th/xform))))
(defn ^Combine$CombineFn combiner
([reducef] (combiner reducef reducef))
([combinef reducef] (combiner #'identity combinef reducef))
([extractf combinef reducef]
{:pre [(or (var? extractf) (instance? TFn extractf))
(or (var? combinef) (instance? TFn reducef))
(or (var? combinef) (instance? TFn reducef))]}
(TCombine. (->TFn extractf) (->TFn combinef) (->TFn reducef))))
(defn with-timer [fn-like timer-fn-like]
{:th/xform fn-like
:th/timer-fn timer-fn-like})
(defn with-name [xf-like name-]
{:th/name name-
:th/xform xf-like})
(defn ^{:th/coder nippy-kv} ->kv
([seg]
(KV/of seg seg))
([key-fn seg]
(KV/of (key-fn seg) seg))
([key-fn val-fn seg]
(KV/of (key-fn seg) (val-fn seg))))
(defn ^{:th/coder nippy} kv->clj [^KV kv]
(MapEntry/create (.getKey kv) (.getValue kv)))
(defn ^{:th/coder nippy-kv} clj->kv [[k v]]
(KV/of k v))
(defn log
([elem] (log :info elem))
([level elem] (log/logp level elem) elem))
(defn log-verbose
([elem] (log-verbose :info elem))
([level elem] (log/logf level "%s @ %s ∈ %s ∈ %s" elem
(.timestamp (*process-context))
(.pane (*process-context))
(*element-window)) elem))
(defn co-gbk-result->clj [^CoGbkResult r]
(let [^CoGbkResultSchema s (-> r .getSchema)
tags (-> s .getTupleTagList .getAll)]
(into {}
(map (fn [^TupleTag tag]
[(-> tag .getId keyword)
(->> tag (.getAll r) seq)]) tags))))
(defn kv->clj*
"Convert a Beam KV *result* to Clojure vector: the result is not serializable and
therefore not for use as DoFn."
[^KV kv]
(let [key- (.getKey kv) val- (.getValue kv)]
(cond
(instance? CoGbkResult val-) [key- (co-gbk-result->clj val-)]
(instance? Iterable val-) [key- (seq val-)]
:else [key- val-])))
|
0b521d30acbce30da84c20c5a3be0146a7a23dc3973977017412d9d0830adcc4 | diagrams/diagrams-haddock | Test.hs | # LANGUAGE CPP #
module Test (exported) where
#ifdef INSIDE
-- | <<diagrams/blah.svg#diagram=blah>>
--
> blah = pad 1.1 $ square 1
--
#endif
-- | <<diagrams/d'.svg#diagram=d'>>
--
> d ' = pad 1.1 $ circle 1
--
exported = putStrLn "blah"
| null | https://raw.githubusercontent.com/diagrams/diagrams-haddock/29067d968a6097aa9d648b566c234d924a97bd86/test/simpleCabalCPP/src/Test.hs | haskell | | <<diagrams/blah.svg#diagram=blah>>
| <<diagrams/d'.svg#diagram=d'>>
| # LANGUAGE CPP #
module Test (exported) where
#ifdef INSIDE
> blah = pad 1.1 $ square 1
#endif
> d ' = pad 1.1 $ circle 1
exported = putStrLn "blah"
|
cf7faca18191a33ee5edca5b35c0e264e4273f481d3210db4f5412601b1eceba | brownplt/LambdaS5 | ljs_restore_id.ml | open Prelude
open Ljs_syntax
open Ljs_opt
open Exp_util
open Ljs_analyze_env
This phase will try to restore JavaScript identifier , for example ,
restore ` var ` from % context['var ' ] .
This phase needs to know the existance of all properties of context
before the actual transformation .
This phase should only work in strict mode . In non - strict mode ,
code like function(a ) { this.x = a } will create x in global
environment . Creating code silently makes this phase to lose track
of global variables and to do transformation incorrectly . Consider
the code in non - strict mode
var bar = 2
function foo ( ) { this.bar = 1 }
foo ( ) ;
bar ;
this phase will turn the last bar as identifier ` bar ' but leaves
the ` this.bar = 1 ` as it is , which is setting field ` bar ' of ` this '
object ' , something like % set - property(%this , " bar " , 1 )
restore `var` from %context['var'].
This phase needs to know the existance of all properties of context
before the actual transformation.
This phase should only work in strict mode. In non-strict mode,
code like function(a) {this.x = a} will create x in global
environment. Creating code silently makes this phase to lose track
of global variables and to do transformation incorrectly. Consider
the code in non-strict mode
var bar = 2
function foo() { this.bar = 1 }
foo();
bar;
this phase will turn the last bar as identifier `bar' but leaves
the `this.bar = 1` as it is, which is setting field `bar' of `this'
object', something like %set-property(%this, "bar", 1)
*)
let debug_on = false
let dprint = Debug.make_debug_printer ~on:debug_on "restore"
(* set this variable to true if the restoration should be only applied
on code in strict mode *)
let only_strict = false
in function object # code attr , parent context is always shadowed ,
This function will recognize pattern of the new context and try to
get
1 ) function 's parameters
2 ) function 's local variables .
3 ) ? if ` arguments ` keyword are not used , the whole
Let ( _ , % context ... ) shall be removed
Note : parameter exp is the x_v part of Let(_,x , x_v , body ) , not the
whole Let expression .
one good example is
let ( % context = {
let ( % a11 = undefined ) {
let ( % x12 = % args [ " 0 " , null ] )
{ [ # proto : % parent , # class : " Object " , # extensible : true , ]
' arguments ' : { # value ( % args ) , # writable true , # configurable false } ,
' x ' : { # getter func ( this , args ) { % x12 } ,
# setter func ( this , args ) { % x12 : = args [ " 0 " , { [ # proto : % ArrayProto ,
# class : " Array " ,
# extensible : true , ] } ] } } ,
' a ' : { # getter func ( this , args ) { % a11 } ,
# setter func ( this , args )
{ % a11 : = ,
# class:"Array " ,
# extensible : true , ] } ] } } } } } ) { ... }
desugared from
function f1(x ) { var a = x } ;
This function will recognize pattern of the new context and try to
get
1) function's parameters
2) function's local variables.
3)? if `arguments` keyword are not used, the whole
Let(_, %context...) shall be removed
Note: parameter exp is the x_v part of Let(_,x,x_v,body), not the
whole Let expression.
one good example is
let (%context = {
let (%a11 = undefined) {
let (%x12 = %args ["0" , null])
{[#proto: %parent, #class: "Object", #extensible: true,]
'arguments' : {#value (%args) , #writable true , #configurable false},
'x' : {#getter func (this , args) {%x12} ,
#setter func (this , args) {%x12 := args ["0" , {[#proto: %ArrayProto,
#class: "Array",
#extensible: true,]}]}},
'a' : {#getter func (this , args) {%a11} ,
#setter func (this , args)
{%a11 := args["0",{[#proto:%ArrayProto,
#class:"Array",
#extensible: true,]}]}}}}}) {...}
desugared from
function f1(x) {var a = x};
*)
let recognize_new_context exp ctx : names_t =
let rec strip_let exp : exp = match exp with
| Let (_, _, _, body) -> strip_let body
| _ -> exp
in
let rec get_id_in_getter exp = match exp with
| Id (_, _) -> exp
| Lambda (_, xs, body) -> get_id_in_getter body
| Label (_, _, body) -> get_id_in_getter body
| Break (_, _, body) -> get_id_in_getter body
| _ -> failwith "[5] pattern assertion failed: getter contains more complicated structure"
in
let rec collect_fields prop ctx : names_t = match prop with
| fld, Data ({value=value; writable=_},_,_) ->
IdMap.add fld (value, true) ctx
| fld, Accessor ({getter=getter; setter=_},_,_) ->
IdMap.add fld ((get_id_in_getter getter), true) ctx
in
let rec recog_field exp ctx : names_t =
match exp with
| Object (_, _, props) ->
List.fold_right collect_fields props ctx
| _ -> (* assume: obj follows let *)
Exp_util.print_ljs exp; print_newline();
failwith "[4]pattern assertion failed:"
in
let obj = strip_let exp in
recog_field obj ctx
local context has the pattern that
let ( % context = {
let ( local1= .. )
let ( .. )
let ( arg1= .. )
let ( arg2= .. )
contextobject } )
body
this function will recognize the pattern and return the option of an exp that takes out
the contextobject and results in
let ( local1= .. )
let ( .. )
let ( arg1= .. )
let ( arg2= .. )
body
body is also returned as the second argument for convenience .
let (%context = {
let (local1=..)
let (local2=..)
let (arg1=..)
let (arg2=..)
contextobject})
body
this function will recognize the pattern and return the option of an exp that takes out
the contextobject and results in
let (local1=..)
let (local2=..)
let (arg1=..)
let (arg2=..)
body
body is also returned as the second argument for convenience.
*)
let get_localcontext (let_exp : exp) : exp option =
let rec get_let let_exp : exp =
match let_exp with
| Let (p, x, x_v, letb) ->
Let (p, x, x_v, get_let letb)
| Object (_,_,_) -> Undefined Pos.dummy
| _ -> failwith "not context"
in
match let_exp with
| Let (p, "%context", x_v, body) ->
(try
Some (get_let x_v)
with _ -> None
)
| _ -> None
let replace_let_body let_exp new_body =
let rec traverse let_exp : exp = match let_exp with
| Let (p, x, x_v, body) ->
Let (p, x, x_v, traverse body)
| Undefined _ -> new_body
| _ -> failwith "replace_let_body: should not reach here"
in
traverse let_exp
these functions will check if the program contains assignment to " this " or " window " .
Such assignments will create new variables silently . To be more specific , this
function will look for
0 . strict mode : can be turned off .
1 . % context["window " ] : code may do bad things through window
2 . % EnvCheckAssign ( _ , _ , % this , _ ): code may do bad things through the alias of ' this '
% EnvCheckAssign ( _ , _ , { object field as this } , _ ): code may do bad things through the alias of ' this ' .
the " this " alias should also prohibited in lambda , see 6 )
3 . passing % this to a function : code may do something like ` var a ; function z(o ) { return o.a } ; z(this ) `
4 . computation string field on top level . In function computation is fine .
5 . ): strict mode does not allow " with " . But here is how it works : code will make a new context , and
we can not decide if the expression % context["x " ] should be translated to identifier x or leave it as % context["x " ] .
6 . this[delete " x " ] : try to delete variable from this(no matter in toplevel or lambda ) .
7 . iterate through all property of top - level this
Such assignments will create new variables silently. To be more specific, this
function will look for
0. strict mode: can be turned off.
1. %context["window"]: code may do bad things through window
2. %EnvCheckAssign(_, _, %this, _): code may do bad things through the alias of 'this'
%EnvCheckAssign(_, _, {object field as this}, _): code may do bad things through the alias of 'this'.
the "this" alias should also prohibited in lambda, see 6)
3. passing %this to a function: code may do something like `var a; function z(o) {return o.a}; z(this)`
4. computation string field on top level. In function computation is fine.
5. with(o): strict mode does not allow "with". But here is how it works: code will make a new context, and
we cannot decide if the expression %context["x"] should be translated to identifier x or leave it as %context["x"].
6. this[delete "x"]: try to delete variable from this(no matter in toplevel or lambda).
7. iterate through all property of top-level this
*)
let rec all_in_strict exp : bool = match exp with
(* checking whether all functions are in strict mode does not work.
because 'with' expression can appear on top level.
*)
| Let (_, "#strict", False _, body) -> false
| _ -> List.for_all all_in_strict (child_exps exp)
let rec window_free ?(toplevel=true) exp : bool =
(* distinct top-level window and in function window *)
(* on top level, we should prohibit: this.window; this['window']; window *)
(* in function: we should only prohibit window variable. Because we also
prohibit passing this as argument, a['window'] works fine *)
match exp with
| GetField (_, obj, String(_, "window"), args) ->
window_free ~toplevel args &&
(match obj with
| Id (_, "%context") -> dprint "not eligible: reference to window variable\n";
false
| App (_, Id (_, "%PropAccessorCheck"), [Id (_, "%this")]) ->
if toplevel then
(dprint "not eligible: reference window through this in top-level\n";
false)
else true
| _ -> window_free ~toplevel obj)
| Lambda (_, _, body) ->
window_free ~toplevel:false body
| _ -> List.for_all (fun e -> window_free ~toplevel e) (child_exps exp)
let by_pass_args func = match func with
| I d ( _ , " % PropAccessorCheck " ) - > true
| _ - > false
let is_context ~toplevel ( obj : exp ) : bool =
match obj with
| I d ( _ , " % context " ) - > true
| App ( _ , f , [ arg ] ) when by_pass_args f - >
begin match arg with
| I d ( _ , " % this " ) when toplevel - > true
| _ - > false
end
| _ - > false
( * top level can use window properties but can not make assignment to it .
any refer to window object itself should be prohibited .
Explicitly prohibit
let by_pass_args func = match func with
| Id (_, "%PropAccessorCheck") -> true
| _ -> false
let is_context ~toplevel (obj : exp) : bool =
match obj with
| Id (_, "%context") -> true
| App (_, f, [arg]) when by_pass_args f ->
begin match arg with
| Id (_, "%this") when toplevel -> true
| _ -> false
end
| _ -> false
(* top level can use window properties but cannot make assignment to it.
any refer to window object itself should be prohibited.
Explicitly prohibit
*)
let rec is_window_obj ~toplevel e =
(* window object will be desugared to multiple patterns*)
match e with
| GetField (_, obj, String(_, "window"), _)
when is_context ~toplevel obj ->
dprint_string (sprintf "find window object in %s\n" (ljs_str e));
true
| App (_, f, [arg]) when by_pass_args f ->
is_window_obj ~toplevel arg
| _ -> false
let rec window_free ?(toplevel=true) exp : bool =
match exp with
| GetField (_, obj, String(_, "window"), args) ->
let is_not_context () =
if not (is_context ~toplevel obj) then
true
else (dprint_string "get window from context\n"; false)
in
window_free ~toplevel args && is_not_context()
| GetField (_, obj, _, _) when is_window_obj ~toplevel obj ->
(* OK. use property of window. *)
true
| Lambda (_, _, body) -> window_free ~toplevel:false body
| _ -> List.for_all (fun e -> window_free ~toplevel e) (child_exps exp)
*)
let rec eligible_for_restoration exp : bool =
let is_static_field fld = match fld with
| String (_, _) -> true
| _ ->
dprint (sprintf "not eligible: find non-static field: %s\n%!" (Exp_util.ljs_str fld));
false
in
let rec contain_this_keyword toplevel (args_obj : exp) =
match args_obj with
| Id (_, "%this") -> let result = toplevel in
if result then (dprint "not eligible: make alias on %this\n"; true)
else false
| Lambda (_, _, body) -> contain_this_keyword false body
| _ -> List.exists (fun e -> contain_this_keyword toplevel e) (child_exps args_obj)
in
let rec is_eligible_rec ?(toplevel=true) exp : bool =
let is_eligible exp = is_eligible_rec ~toplevel exp in
match exp with
| Undefined _
| Null _
| String (_,_)
| Num (_,_)
| True _
| False _
| Id _ -> true
| Object (_, attr, props) ->
let is_eligible_option ~toplevel (opt : exp option) = match opt with
| Some (e) -> is_eligible_rec ~toplevel e
| None -> true
in
let handle_prop prop = match prop with
| (s, Data(data, _, _)) -> is_eligible_rec ~toplevel data.value
| (s, Accessor(acc, _, _)) -> is_eligible_rec ~toplevel acc.getter && is_eligible_rec ~toplevel acc.setter
in
is_eligible_option ~toplevel attr.primval &&
is_eligible_option ~toplevel:false attr.code &&
is_eligible_option ~toplevel attr.proto &&
List.for_all handle_prop props
| GetField (_, obj, fld, args) ->
let eligible_field obj fld = match obj with
(* when obj is `this` and it is on top-level, only static field is
allowed; computation fields on other objects are fine. *)
| App (_,Id(_,"%PropAccessorCheck"),[Id(_,"%this")]) -> is_static_field fld
| _ -> true
in
(* static field is not required in function *)
is_eligible obj && is_eligible fld && is_eligible args &&
(if toplevel then (eligible_field obj fld) else true)
| App (_, f, args) -> (match f, args with
| Id (_, "%EnvCheckAssign"), [_;_; Id(_, "%this");_] when toplevel ->
dprint "make alias of 'this'. not eligible";
false
| Id (_, "%EnvCheckAssign"), [_;_; Object(_,_,_) as obj;_] ->
not (List.exists (fun x->contain_this_keyword toplevel x) (child_exps obj)) &&
(List.for_all is_eligible args)
| Id (_, "%set-property"), [App(_, Id(_,"%ToObject"), [Id(_, "%this")]);
this_fld; arg] ->
this['fld ' ] = 1= > to % set - property(%ToObject(%this ) , ' fld ' , 1 . )
is_eligible arg && (if toplevel then (is_static_field this_fld) else true)
| Id (_, "%makeWithContext"), _ ->
dprint "Use 'with'. Not eligible";
false
| Id (_, "%propertyNames"), [Id(_, "%this"); _] when toplevel ->
dprint "get property from top-level this. Not eligible";
false
| Id (_, fname), args ->
List.for_all is_eligible args &&
(if fname = "%mkArgsObj" && toplevel then
(assert ((List.length args) = 1);
not (contain_this_keyword toplevel (List.nth args 0)))
else true)
| _ ->
is_eligible f &&
List.for_all is_eligible args
)
| Lambda (_, _, body) ->
is_eligible_rec ~toplevel body
| DeleteField (_, Id(_, "%this"), v) ->
dprint (sprintf "deletefield: %s\n" (Exp_util.ljs_str v));
false
| _ -> List.for_all is_eligible (child_exps exp)
in
let check_strict = if only_strict then all_in_strict exp else true in
check_strict && window_free exp && is_eligible_rec exp
this phase highly relies on the desugared patterns .
this phase must be the first phase before all optimizations .
recognize the following pattern :
- % defineGlobalVar(%context , " x " )
= > let ( x = undefined ) ...
- % context["x " ] if x in % context
= > x or x 's actual binding location
- % context["x " = .. ] if " x " in % context
= > x : = ... or mutation on x 's actual binding identifiers
- in function object : let { % context = { let .. let .. let { contextobj } } function - body }
remove the % context
- % PropAccessorCheck(%this ) in top - level
= > % context
therefore , this.x , which will be desugared to % PropAccessorCheck(%this)["x " .. ] , will be
translated to % context["x " ]
this phase must be the first phase before all optimizations.
recognize the following pattern:
- %defineGlobalVar(%context, "x")
=> let (x = undefined) ...
- %context["x"] if x in %context
=> x or x's actual binding location
- %context["x" = ..] if "x" in %context
=> x := ... or mutation on x's actual binding identifiers
- in function object: let {%context = {let..let..let { contextobj}} function-body}
remove the %context
- %PropAccessorCheck(%this) in top-level
=> %context
therefore, this.x, which will be desugared to %PropAccessorCheck(%this)["x"..], will be
translated to %context["x"]
*)
let pre_post_transform (op : string) (id : id) : exp option =
let p = Pos.dummy in
let toNumber id : exp = App (Pos.dummy, Id (Pos.dummy, "%ToNumber"), [Id (Pos.dummy, id)]) in
let make_prim op id = match op with
| "-" -> App (p, Id(p, "%PrimSub"), [toNumber id; Num(p,1.)])
| "+" -> App (p, Id(p, "%PrimAdd"), [toNumber id; Num(p,1.)])
| _ -> failwith "make_prim gets unexpected argument"
in
match op with
--i = > i : = % PrimSub(%ToNumber(i ) , 1 )
Some (SetBang (p, id, make_prim "-" id))
+ + i = > i : = % PrimAdd(%ToNumber(i ) , 1 )
Some (SetBang (p, id, make_prim "+" id))
i++ = > let ( post = ToNumber(i ) ) { i : = % PrimAdd(%ToNumber(i),1 ) ; post }
Some (Let (p, "post", toNumber id,
Seq (p, SetBang(p, id , make_prim "+" id), Id (p, "post"))))
| "%PostDecrement" -> (* i-- => let (post = ToNumber(i)) {i := %PrimSub(%ToNumber(i),1); post} *)
Some (Let (p, "post", toNumber id,
Seq (p, SetBang(p, id, make_prim "-" id), Id (p, "post"))))
| _ -> None
let make_writable_error (msg : string) : exp =
let msg = msg ^ " not writable" in
App (Pos.dummy, Id(Pos.dummy, "%TypeError"), [String (Pos.dummy, msg)])
let rec restore_id (e : exp) : exp =
let rec restore_rec ?(in_lambda=false) (e : exp) (ctx : names_t) : exp =
match e with
| Seq (p, e1, e2) ->
begin match e1 with
| App (p, Id (_, "%defineGlobalVar"), [ctxobj; String (_, id)]) when is_ctx_obj ctxobj ->
(* if id is in ctx, do nothing, continue to e2; else, add
id->id, true into ctx
*)
dprint (sprintf "find defineGlobalVar %s\n" id);
if IdMap.mem id ctx then
restore_rec ~in_lambda e2 ctx
else
let ctx = IdMap.add id (Id (Pos.dummy, id), true) ctx in
let newe2 = restore_rec ~in_lambda e2 ctx in
Let (p, id, Undefined Pos.dummy, newe2)
| App (_, Id (_, "%defineGlobalAccessors"), [ctxobj; String (_, id)])
when is_ctx_obj ctxobj && IdMap.mem id ctx ->
(* if the id is already in %global, get what it is bound *)
dprint (sprintf "find defineGlobalAccessor %s in %%global bindings\n" id);
restore_rec ~in_lambda e2 ctx
| _ ->
let newe1 = restore_rec ~in_lambda e1 ctx in
let newe2 = restore_rec ~in_lambda e2 ctx in
Seq (p, newe1, newe2)
end
| GetField (pos, obj, fld, args) ->
let o = restore_rec ~in_lambda obj ctx in
let f = restore_rec ~in_lambda fld ctx in
let a = restore_rec ~in_lambda args ctx in
(match is_ctx_obj o, f with
| true, String (_, fldstr) ->
(* get fld from context *)
printf " match context['%s']\n% ! " ;
IdMap.iter ( fun k v->printf " % s - > % s\n% ! " k ( v ) ) ctx ;
IdMap.iter (fun k v->printf "%s -> %s\n%!" k (Exp_util.ljs_str v)) ctx;*)
begin try
let v, _ = IdMap.find fldstr ctx in
v
with Not_found -> GetField (pos, o, f, a)
end
| _ -> GetField (pos, o, f, a)
)
| SetField (pos, obj, fld, newval, args) ->
let o = restore_rec ~in_lambda obj ctx in
let f = restore_rec ~in_lambda fld ctx in
let v = restore_rec ~in_lambda newval ctx in
let a = restore_rec ~in_lambda args ctx in
(match is_ctx_obj o, f with
| true, String (_, fldstr) ->
(try match IdMap.find fldstr ctx with
| _, false -> make_writable_error fldstr
| Id (_, id), true -> SetBang(pos, id, v)
| err, _ ->
let _ = IdMap.iter ( fun k ( v , b)->printf " % s - > % s,%b\n% ! " k ( v ) b ) ctx in
failwith (sprintf "SetField: transformation failed: %s" (ljs_str err))
with Not_found -> SetField (pos, o, f, v, a)
)
| _ -> SetField (pos, o, f, v, a)
)
| App (pos, f, args) ->
let f = restore_rec ~in_lambda f ctx in
let args = List.map (fun x->restore_rec ~in_lambda x ctx) args in
(match f, args with
| Id (_, "%EnvCheckAssign"), [o; String (_, fld_name); v; _] when is_ctx_obj o ->
(try match IdMap.find fld_name ctx with
| _, false -> make_writable_error fld_name
| Id (_, id), true -> SetBang (pos, id, v)
| fld, _ -> SetBang (pos, fld_name, v)
TODO : get normal exp , which means that i d is somehow declared in contxt ,
use that id(see example of es5id : 12.14 - 1 . ' ( % ToJSError(foo ) )
failwith ( sprintf " App : transformation failed : % s. Field is actually:%s "
( e ) ( fld ) )
use that id(see example of es5id: 12.14-1. 'foo'=>#value (%ToJSError(foo))
failwith (sprintf "App: transformation failed: %s. Field is actually:%s"
(ljs_str e) (ljs_str fld))
*)
with Not_found -> App (pos, f, args))
| Id (_, "%PropAccessorCheck"), [Id (_, "%this")] ->
if in_lambda then
App (pos, f, args)
else
Id (pos, "%global")
| Id (p1, "%set-property"), [o; String (p3, id); v] when is_ctx_obj o->
let newexp = SetField (p1, o, String(p3,id), v, Null Pos.dummy) in
(match restore_rec ~in_lambda newexp ctx with
| SetField(_,_,_,_,_) ->
(* cannot translate, use the original set-property exp *)
App (pos, f, args)
| result -> result
)
| Id (_, "%ToObject"), [Id(_, "%this")] when not in_lambda ->
Id (Pos.dummy, "%global")
| Id (_, "%Typeof"), [o; String(_, id)]
when is_ctx_obj o && IdMap.mem id ctx ->
begin try
let replace_exp, _ = IdMap.find id ctx in
TryCatch (Pos.dummy,
Op1(Pos.dummy, "typeof", replace_exp),
Lambda (Pos.dummy, ["e"], Undefined Pos.dummy))
with Not_found ->
App (pos, f, args)
end
| Id (pos, op), [o; String(_, id)]
when is_ctx_obj o && IdMap.mem id ctx ->
let transform var = match pre_post_transform op var with
| Some (result) -> result
| None -> App (pos, f, args)
in
begin match IdMap.find id ctx with
| Id (_, actual_id), _ -> transform actual_id
| Num (_,_), _ -> make_writable_error id
| _ -> failwith (sprintf "%s: IdMap stores unrecognized exp" op)
end
| _ -> App (pos, f, args)
)
| Let (p, x, x_v, body) ->
let x_v = restore_rec ~in_lambda x_v ctx in
first match with context patterns in lambda
begin match get_localcontext e with
| None -> (* not a new context binding in lambda *)
(* in the desugared code, there is no place to bind %context to a non-obj *)
(*assert (x <> "%context");*)
Let (p, x, x_v, restore_rec ~in_lambda body ctx)
FIXME : 12.14 - 1
dprint (sprintf "new_let is %s\n" (Exp_util.ljs_str new_let));
(try
let new_ctx = recognize_new_context x_v ctx in
replace_let_body new_let (restore_rec ~in_lambda body new_ctx)
with Failure msg ->
(printf "oops, pattern error: %s\n%!" msg;
Let (p, x, x_v, restore_rec ~in_lambda body ctx)
)
)
end
| Lambda (p, xs, body) ->
let result = restore_rec ~in_lambda:true body ctx in
Lambda (p, xs, result)
| Undefined _
| Null _
| String (_, _)
| Num (_, _)
| True _
| False _
| Id (_, _)
| Op1 (_,_,_)
| Op2 (_,_,_,_)
| If (_,_,_,_)
| Label (_,_,_)
| Object (_,_,_)
| GetObjAttr (_, _, _)
| SetAttr (_,_,_,_,_)
| GetAttr (_,_,_,_)
| SetObjAttr (_,_,_,_)
| DeleteField (_, _, _)
| OwnFieldNames (_,_)
| SetBang (_,_,_)
| Rec (_,_,_,_)
| Break (_,_,_)
| TryCatch (_,_,_)
| TryFinally (_,_,_)
| Throw (_,_)
| Hint (_,_,_)
| Eval (_,_,_)
-> optimize (fun e->restore_rec ~in_lambda e ctx) e
in
let names = get_env_names e in
let _ = IdMap.iter ( fun k ( v , b)->printf " % s - > % s,%b\n% ! " k ( v ) b ) names in
let rec jump_env (e : exp) : exp =
match e with
| Seq (p0, S.Hint (p1, hint, e), e2) when is_env_delimiter hint ->
Seq (p0, S.Hint (p1, hint, e), restore_rec e2 names)
| _ -> optimize jump_env e
in
let rec propagate_this (e : exp) (env : names_t) =
(* XXX: any good reason to propagate this? *)
let propagate e = propagate_this e env in
match e with
| Id (pos, id) when IdMap.mem id env -> Id (pos, "%this")
| Let (pos, x, Id (p1, "%this"), body) ->
let body = propagate_this body (IdMap.add x ((Undefined Pos.dummy),true) env) in
Let(pos, x, Id(p1, "%this"), body)
| _ -> optimize propagate e
in
let exp = propagate_this e IdMap.empty in
(* if there are environment, jump over the env. Otherwise just start
from the very begin of the code.*)
match get_code_after_delimiter exp with
| None -> restore_rec exp names
| Some (_) -> jump_env exp
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/optimization/ljs_restore_id.ml | ocaml | set this variable to true if the restoration should be only applied
on code in strict mode
assume: obj follows let
checking whether all functions are in strict mode does not work.
because 'with' expression can appear on top level.
distinct top-level window and in function window
on top level, we should prohibit: this.window; this['window']; window
in function: we should only prohibit window variable. Because we also
prohibit passing this as argument, a['window'] works fine
top level can use window properties but cannot make assignment to it.
any refer to window object itself should be prohibited.
Explicitly prohibit
window object will be desugared to multiple patterns
OK. use property of window.
when obj is `this` and it is on top-level, only static field is
allowed; computation fields on other objects are fine.
static field is not required in function
i-- => let (post = ToNumber(i)) {i := %PrimSub(%ToNumber(i),1); post}
if id is in ctx, do nothing, continue to e2; else, add
id->id, true into ctx
if the id is already in %global, get what it is bound
get fld from context
cannot translate, use the original set-property exp
not a new context binding in lambda
in the desugared code, there is no place to bind %context to a non-obj
assert (x <> "%context");
XXX: any good reason to propagate this?
if there are environment, jump over the env. Otherwise just start
from the very begin of the code. | open Prelude
open Ljs_syntax
open Ljs_opt
open Exp_util
open Ljs_analyze_env
This phase will try to restore JavaScript identifier , for example ,
restore ` var ` from % context['var ' ] .
This phase needs to know the existance of all properties of context
before the actual transformation .
This phase should only work in strict mode . In non - strict mode ,
code like function(a ) { this.x = a } will create x in global
environment . Creating code silently makes this phase to lose track
of global variables and to do transformation incorrectly . Consider
the code in non - strict mode
var bar = 2
function foo ( ) { this.bar = 1 }
foo ( ) ;
bar ;
this phase will turn the last bar as identifier ` bar ' but leaves
the ` this.bar = 1 ` as it is , which is setting field ` bar ' of ` this '
object ' , something like % set - property(%this , " bar " , 1 )
restore `var` from %context['var'].
This phase needs to know the existance of all properties of context
before the actual transformation.
This phase should only work in strict mode. In non-strict mode,
code like function(a) {this.x = a} will create x in global
environment. Creating code silently makes this phase to lose track
of global variables and to do transformation incorrectly. Consider
the code in non-strict mode
var bar = 2
function foo() { this.bar = 1 }
foo();
bar;
this phase will turn the last bar as identifier `bar' but leaves
the `this.bar = 1` as it is, which is setting field `bar' of `this'
object', something like %set-property(%this, "bar", 1)
*)
let debug_on = false
let dprint = Debug.make_debug_printer ~on:debug_on "restore"
let only_strict = false
in function object # code attr , parent context is always shadowed ,
This function will recognize pattern of the new context and try to
get
1 ) function 's parameters
2 ) function 's local variables .
3 ) ? if ` arguments ` keyword are not used , the whole
Let ( _ , % context ... ) shall be removed
Note : parameter exp is the x_v part of Let(_,x , x_v , body ) , not the
whole Let expression .
one good example is
let ( % context = {
let ( % a11 = undefined ) {
let ( % x12 = % args [ " 0 " , null ] )
{ [ # proto : % parent , # class : " Object " , # extensible : true , ]
' arguments ' : { # value ( % args ) , # writable true , # configurable false } ,
' x ' : { # getter func ( this , args ) { % x12 } ,
# setter func ( this , args ) { % x12 : = args [ " 0 " , { [ # proto : % ArrayProto ,
# class : " Array " ,
# extensible : true , ] } ] } } ,
' a ' : { # getter func ( this , args ) { % a11 } ,
# setter func ( this , args )
{ % a11 : = ,
# class:"Array " ,
# extensible : true , ] } ] } } } } } ) { ... }
desugared from
function f1(x ) { var a = x } ;
This function will recognize pattern of the new context and try to
get
1) function's parameters
2) function's local variables.
3)? if `arguments` keyword are not used, the whole
Let(_, %context...) shall be removed
Note: parameter exp is the x_v part of Let(_,x,x_v,body), not the
whole Let expression.
one good example is
let (%context = {
let (%a11 = undefined) {
let (%x12 = %args ["0" , null])
{[#proto: %parent, #class: "Object", #extensible: true,]
'arguments' : {#value (%args) , #writable true , #configurable false},
'x' : {#getter func (this , args) {%x12} ,
#setter func (this , args) {%x12 := args ["0" , {[#proto: %ArrayProto,
#class: "Array",
#extensible: true,]}]}},
'a' : {#getter func (this , args) {%a11} ,
#setter func (this , args)
{%a11 := args["0",{[#proto:%ArrayProto,
#class:"Array",
#extensible: true,]}]}}}}}) {...}
desugared from
function f1(x) {var a = x};
*)
let recognize_new_context exp ctx : names_t =
let rec strip_let exp : exp = match exp with
| Let (_, _, _, body) -> strip_let body
| _ -> exp
in
let rec get_id_in_getter exp = match exp with
| Id (_, _) -> exp
| Lambda (_, xs, body) -> get_id_in_getter body
| Label (_, _, body) -> get_id_in_getter body
| Break (_, _, body) -> get_id_in_getter body
| _ -> failwith "[5] pattern assertion failed: getter contains more complicated structure"
in
let rec collect_fields prop ctx : names_t = match prop with
| fld, Data ({value=value; writable=_},_,_) ->
IdMap.add fld (value, true) ctx
| fld, Accessor ({getter=getter; setter=_},_,_) ->
IdMap.add fld ((get_id_in_getter getter), true) ctx
in
let rec recog_field exp ctx : names_t =
match exp with
| Object (_, _, props) ->
List.fold_right collect_fields props ctx
Exp_util.print_ljs exp; print_newline();
failwith "[4]pattern assertion failed:"
in
let obj = strip_let exp in
recog_field obj ctx
local context has the pattern that
let ( % context = {
let ( local1= .. )
let ( .. )
let ( arg1= .. )
let ( arg2= .. )
contextobject } )
body
this function will recognize the pattern and return the option of an exp that takes out
the contextobject and results in
let ( local1= .. )
let ( .. )
let ( arg1= .. )
let ( arg2= .. )
body
body is also returned as the second argument for convenience .
let (%context = {
let (local1=..)
let (local2=..)
let (arg1=..)
let (arg2=..)
contextobject})
body
this function will recognize the pattern and return the option of an exp that takes out
the contextobject and results in
let (local1=..)
let (local2=..)
let (arg1=..)
let (arg2=..)
body
body is also returned as the second argument for convenience.
*)
let get_localcontext (let_exp : exp) : exp option =
let rec get_let let_exp : exp =
match let_exp with
| Let (p, x, x_v, letb) ->
Let (p, x, x_v, get_let letb)
| Object (_,_,_) -> Undefined Pos.dummy
| _ -> failwith "not context"
in
match let_exp with
| Let (p, "%context", x_v, body) ->
(try
Some (get_let x_v)
with _ -> None
)
| _ -> None
let replace_let_body let_exp new_body =
let rec traverse let_exp : exp = match let_exp with
| Let (p, x, x_v, body) ->
Let (p, x, x_v, traverse body)
| Undefined _ -> new_body
| _ -> failwith "replace_let_body: should not reach here"
in
traverse let_exp
these functions will check if the program contains assignment to " this " or " window " .
Such assignments will create new variables silently . To be more specific , this
function will look for
0 . strict mode : can be turned off .
1 . % context["window " ] : code may do bad things through window
2 . % EnvCheckAssign ( _ , _ , % this , _ ): code may do bad things through the alias of ' this '
% EnvCheckAssign ( _ , _ , { object field as this } , _ ): code may do bad things through the alias of ' this ' .
the " this " alias should also prohibited in lambda , see 6 )
3 . passing % this to a function : code may do something like ` var a ; function z(o ) { return o.a } ; z(this ) `
4 . computation string field on top level . In function computation is fine .
5 . ): strict mode does not allow " with " . But here is how it works : code will make a new context , and
we can not decide if the expression % context["x " ] should be translated to identifier x or leave it as % context["x " ] .
6 . this[delete " x " ] : try to delete variable from this(no matter in toplevel or lambda ) .
7 . iterate through all property of top - level this
Such assignments will create new variables silently. To be more specific, this
function will look for
0. strict mode: can be turned off.
1. %context["window"]: code may do bad things through window
2. %EnvCheckAssign(_, _, %this, _): code may do bad things through the alias of 'this'
%EnvCheckAssign(_, _, {object field as this}, _): code may do bad things through the alias of 'this'.
the "this" alias should also prohibited in lambda, see 6)
3. passing %this to a function: code may do something like `var a; function z(o) {return o.a}; z(this)`
4. computation string field on top level. In function computation is fine.
5. with(o): strict mode does not allow "with". But here is how it works: code will make a new context, and
we cannot decide if the expression %context["x"] should be translated to identifier x or leave it as %context["x"].
6. this[delete "x"]: try to delete variable from this(no matter in toplevel or lambda).
7. iterate through all property of top-level this
*)
let rec all_in_strict exp : bool = match exp with
| Let (_, "#strict", False _, body) -> false
| _ -> List.for_all all_in_strict (child_exps exp)
let rec window_free ?(toplevel=true) exp : bool =
match exp with
| GetField (_, obj, String(_, "window"), args) ->
window_free ~toplevel args &&
(match obj with
| Id (_, "%context") -> dprint "not eligible: reference to window variable\n";
false
| App (_, Id (_, "%PropAccessorCheck"), [Id (_, "%this")]) ->
if toplevel then
(dprint "not eligible: reference window through this in top-level\n";
false)
else true
| _ -> window_free ~toplevel obj)
| Lambda (_, _, body) ->
window_free ~toplevel:false body
| _ -> List.for_all (fun e -> window_free ~toplevel e) (child_exps exp)
let by_pass_args func = match func with
| I d ( _ , " % PropAccessorCheck " ) - > true
| _ - > false
let is_context ~toplevel ( obj : exp ) : bool =
match obj with
| I d ( _ , " % context " ) - > true
| App ( _ , f , [ arg ] ) when by_pass_args f - >
begin match arg with
| I d ( _ , " % this " ) when toplevel - > true
| _ - > false
end
| _ - > false
( * top level can use window properties but can not make assignment to it .
any refer to window object itself should be prohibited .
Explicitly prohibit
let by_pass_args func = match func with
| Id (_, "%PropAccessorCheck") -> true
| _ -> false
let is_context ~toplevel (obj : exp) : bool =
match obj with
| Id (_, "%context") -> true
| App (_, f, [arg]) when by_pass_args f ->
begin match arg with
| Id (_, "%this") when toplevel -> true
| _ -> false
end
| _ -> false
let rec is_window_obj ~toplevel e =
match e with
| GetField (_, obj, String(_, "window"), _)
when is_context ~toplevel obj ->
dprint_string (sprintf "find window object in %s\n" (ljs_str e));
true
| App (_, f, [arg]) when by_pass_args f ->
is_window_obj ~toplevel arg
| _ -> false
let rec window_free ?(toplevel=true) exp : bool =
match exp with
| GetField (_, obj, String(_, "window"), args) ->
let is_not_context () =
if not (is_context ~toplevel obj) then
true
else (dprint_string "get window from context\n"; false)
in
window_free ~toplevel args && is_not_context()
| GetField (_, obj, _, _) when is_window_obj ~toplevel obj ->
true
| Lambda (_, _, body) -> window_free ~toplevel:false body
| _ -> List.for_all (fun e -> window_free ~toplevel e) (child_exps exp)
*)
let rec eligible_for_restoration exp : bool =
let is_static_field fld = match fld with
| String (_, _) -> true
| _ ->
dprint (sprintf "not eligible: find non-static field: %s\n%!" (Exp_util.ljs_str fld));
false
in
let rec contain_this_keyword toplevel (args_obj : exp) =
match args_obj with
| Id (_, "%this") -> let result = toplevel in
if result then (dprint "not eligible: make alias on %this\n"; true)
else false
| Lambda (_, _, body) -> contain_this_keyword false body
| _ -> List.exists (fun e -> contain_this_keyword toplevel e) (child_exps args_obj)
in
let rec is_eligible_rec ?(toplevel=true) exp : bool =
let is_eligible exp = is_eligible_rec ~toplevel exp in
match exp with
| Undefined _
| Null _
| String (_,_)
| Num (_,_)
| True _
| False _
| Id _ -> true
| Object (_, attr, props) ->
let is_eligible_option ~toplevel (opt : exp option) = match opt with
| Some (e) -> is_eligible_rec ~toplevel e
| None -> true
in
let handle_prop prop = match prop with
| (s, Data(data, _, _)) -> is_eligible_rec ~toplevel data.value
| (s, Accessor(acc, _, _)) -> is_eligible_rec ~toplevel acc.getter && is_eligible_rec ~toplevel acc.setter
in
is_eligible_option ~toplevel attr.primval &&
is_eligible_option ~toplevel:false attr.code &&
is_eligible_option ~toplevel attr.proto &&
List.for_all handle_prop props
| GetField (_, obj, fld, args) ->
let eligible_field obj fld = match obj with
| App (_,Id(_,"%PropAccessorCheck"),[Id(_,"%this")]) -> is_static_field fld
| _ -> true
in
is_eligible obj && is_eligible fld && is_eligible args &&
(if toplevel then (eligible_field obj fld) else true)
| App (_, f, args) -> (match f, args with
| Id (_, "%EnvCheckAssign"), [_;_; Id(_, "%this");_] when toplevel ->
dprint "make alias of 'this'. not eligible";
false
| Id (_, "%EnvCheckAssign"), [_;_; Object(_,_,_) as obj;_] ->
not (List.exists (fun x->contain_this_keyword toplevel x) (child_exps obj)) &&
(List.for_all is_eligible args)
| Id (_, "%set-property"), [App(_, Id(_,"%ToObject"), [Id(_, "%this")]);
this_fld; arg] ->
this['fld ' ] = 1= > to % set - property(%ToObject(%this ) , ' fld ' , 1 . )
is_eligible arg && (if toplevel then (is_static_field this_fld) else true)
| Id (_, "%makeWithContext"), _ ->
dprint "Use 'with'. Not eligible";
false
| Id (_, "%propertyNames"), [Id(_, "%this"); _] when toplevel ->
dprint "get property from top-level this. Not eligible";
false
| Id (_, fname), args ->
List.for_all is_eligible args &&
(if fname = "%mkArgsObj" && toplevel then
(assert ((List.length args) = 1);
not (contain_this_keyword toplevel (List.nth args 0)))
else true)
| _ ->
is_eligible f &&
List.for_all is_eligible args
)
| Lambda (_, _, body) ->
is_eligible_rec ~toplevel body
| DeleteField (_, Id(_, "%this"), v) ->
dprint (sprintf "deletefield: %s\n" (Exp_util.ljs_str v));
false
| _ -> List.for_all is_eligible (child_exps exp)
in
let check_strict = if only_strict then all_in_strict exp else true in
check_strict && window_free exp && is_eligible_rec exp
this phase highly relies on the desugared patterns .
this phase must be the first phase before all optimizations .
recognize the following pattern :
- % defineGlobalVar(%context , " x " )
= > let ( x = undefined ) ...
- % context["x " ] if x in % context
= > x or x 's actual binding location
- % context["x " = .. ] if " x " in % context
= > x : = ... or mutation on x 's actual binding identifiers
- in function object : let { % context = { let .. let .. let { contextobj } } function - body }
remove the % context
- % PropAccessorCheck(%this ) in top - level
= > % context
therefore , this.x , which will be desugared to % PropAccessorCheck(%this)["x " .. ] , will be
translated to % context["x " ]
this phase must be the first phase before all optimizations.
recognize the following pattern:
- %defineGlobalVar(%context, "x")
=> let (x = undefined) ...
- %context["x"] if x in %context
=> x or x's actual binding location
- %context["x" = ..] if "x" in %context
=> x := ... or mutation on x's actual binding identifiers
- in function object: let {%context = {let..let..let { contextobj}} function-body}
remove the %context
- %PropAccessorCheck(%this) in top-level
=> %context
therefore, this.x, which will be desugared to %PropAccessorCheck(%this)["x"..], will be
translated to %context["x"]
*)
let pre_post_transform (op : string) (id : id) : exp option =
let p = Pos.dummy in
let toNumber id : exp = App (Pos.dummy, Id (Pos.dummy, "%ToNumber"), [Id (Pos.dummy, id)]) in
let make_prim op id = match op with
| "-" -> App (p, Id(p, "%PrimSub"), [toNumber id; Num(p,1.)])
| "+" -> App (p, Id(p, "%PrimAdd"), [toNumber id; Num(p,1.)])
| _ -> failwith "make_prim gets unexpected argument"
in
match op with
--i = > i : = % PrimSub(%ToNumber(i ) , 1 )
Some (SetBang (p, id, make_prim "-" id))
+ + i = > i : = % PrimAdd(%ToNumber(i ) , 1 )
Some (SetBang (p, id, make_prim "+" id))
i++ = > let ( post = ToNumber(i ) ) { i : = % PrimAdd(%ToNumber(i),1 ) ; post }
Some (Let (p, "post", toNumber id,
Seq (p, SetBang(p, id , make_prim "+" id), Id (p, "post"))))
Some (Let (p, "post", toNumber id,
Seq (p, SetBang(p, id, make_prim "-" id), Id (p, "post"))))
| _ -> None
let make_writable_error (msg : string) : exp =
let msg = msg ^ " not writable" in
App (Pos.dummy, Id(Pos.dummy, "%TypeError"), [String (Pos.dummy, msg)])
let rec restore_id (e : exp) : exp =
let rec restore_rec ?(in_lambda=false) (e : exp) (ctx : names_t) : exp =
match e with
| Seq (p, e1, e2) ->
begin match e1 with
| App (p, Id (_, "%defineGlobalVar"), [ctxobj; String (_, id)]) when is_ctx_obj ctxobj ->
dprint (sprintf "find defineGlobalVar %s\n" id);
if IdMap.mem id ctx then
restore_rec ~in_lambda e2 ctx
else
let ctx = IdMap.add id (Id (Pos.dummy, id), true) ctx in
let newe2 = restore_rec ~in_lambda e2 ctx in
Let (p, id, Undefined Pos.dummy, newe2)
| App (_, Id (_, "%defineGlobalAccessors"), [ctxobj; String (_, id)])
when is_ctx_obj ctxobj && IdMap.mem id ctx ->
dprint (sprintf "find defineGlobalAccessor %s in %%global bindings\n" id);
restore_rec ~in_lambda e2 ctx
| _ ->
let newe1 = restore_rec ~in_lambda e1 ctx in
let newe2 = restore_rec ~in_lambda e2 ctx in
Seq (p, newe1, newe2)
end
| GetField (pos, obj, fld, args) ->
let o = restore_rec ~in_lambda obj ctx in
let f = restore_rec ~in_lambda fld ctx in
let a = restore_rec ~in_lambda args ctx in
(match is_ctx_obj o, f with
| true, String (_, fldstr) ->
printf " match context['%s']\n% ! " ;
IdMap.iter ( fun k v->printf " % s - > % s\n% ! " k ( v ) ) ctx ;
IdMap.iter (fun k v->printf "%s -> %s\n%!" k (Exp_util.ljs_str v)) ctx;*)
begin try
let v, _ = IdMap.find fldstr ctx in
v
with Not_found -> GetField (pos, o, f, a)
end
| _ -> GetField (pos, o, f, a)
)
| SetField (pos, obj, fld, newval, args) ->
let o = restore_rec ~in_lambda obj ctx in
let f = restore_rec ~in_lambda fld ctx in
let v = restore_rec ~in_lambda newval ctx in
let a = restore_rec ~in_lambda args ctx in
(match is_ctx_obj o, f with
| true, String (_, fldstr) ->
(try match IdMap.find fldstr ctx with
| _, false -> make_writable_error fldstr
| Id (_, id), true -> SetBang(pos, id, v)
| err, _ ->
let _ = IdMap.iter ( fun k ( v , b)->printf " % s - > % s,%b\n% ! " k ( v ) b ) ctx in
failwith (sprintf "SetField: transformation failed: %s" (ljs_str err))
with Not_found -> SetField (pos, o, f, v, a)
)
| _ -> SetField (pos, o, f, v, a)
)
| App (pos, f, args) ->
let f = restore_rec ~in_lambda f ctx in
let args = List.map (fun x->restore_rec ~in_lambda x ctx) args in
(match f, args with
| Id (_, "%EnvCheckAssign"), [o; String (_, fld_name); v; _] when is_ctx_obj o ->
(try match IdMap.find fld_name ctx with
| _, false -> make_writable_error fld_name
| Id (_, id), true -> SetBang (pos, id, v)
| fld, _ -> SetBang (pos, fld_name, v)
TODO : get normal exp , which means that i d is somehow declared in contxt ,
use that id(see example of es5id : 12.14 - 1 . ' ( % ToJSError(foo ) )
failwith ( sprintf " App : transformation failed : % s. Field is actually:%s "
( e ) ( fld ) )
use that id(see example of es5id: 12.14-1. 'foo'=>#value (%ToJSError(foo))
failwith (sprintf "App: transformation failed: %s. Field is actually:%s"
(ljs_str e) (ljs_str fld))
*)
with Not_found -> App (pos, f, args))
| Id (_, "%PropAccessorCheck"), [Id (_, "%this")] ->
if in_lambda then
App (pos, f, args)
else
Id (pos, "%global")
| Id (p1, "%set-property"), [o; String (p3, id); v] when is_ctx_obj o->
let newexp = SetField (p1, o, String(p3,id), v, Null Pos.dummy) in
(match restore_rec ~in_lambda newexp ctx with
| SetField(_,_,_,_,_) ->
App (pos, f, args)
| result -> result
)
| Id (_, "%ToObject"), [Id(_, "%this")] when not in_lambda ->
Id (Pos.dummy, "%global")
| Id (_, "%Typeof"), [o; String(_, id)]
when is_ctx_obj o && IdMap.mem id ctx ->
begin try
let replace_exp, _ = IdMap.find id ctx in
TryCatch (Pos.dummy,
Op1(Pos.dummy, "typeof", replace_exp),
Lambda (Pos.dummy, ["e"], Undefined Pos.dummy))
with Not_found ->
App (pos, f, args)
end
| Id (pos, op), [o; String(_, id)]
when is_ctx_obj o && IdMap.mem id ctx ->
let transform var = match pre_post_transform op var with
| Some (result) -> result
| None -> App (pos, f, args)
in
begin match IdMap.find id ctx with
| Id (_, actual_id), _ -> transform actual_id
| Num (_,_), _ -> make_writable_error id
| _ -> failwith (sprintf "%s: IdMap stores unrecognized exp" op)
end
| _ -> App (pos, f, args)
)
| Let (p, x, x_v, body) ->
let x_v = restore_rec ~in_lambda x_v ctx in
first match with context patterns in lambda
begin match get_localcontext e with
Let (p, x, x_v, restore_rec ~in_lambda body ctx)
FIXME : 12.14 - 1
dprint (sprintf "new_let is %s\n" (Exp_util.ljs_str new_let));
(try
let new_ctx = recognize_new_context x_v ctx in
replace_let_body new_let (restore_rec ~in_lambda body new_ctx)
with Failure msg ->
(printf "oops, pattern error: %s\n%!" msg;
Let (p, x, x_v, restore_rec ~in_lambda body ctx)
)
)
end
| Lambda (p, xs, body) ->
let result = restore_rec ~in_lambda:true body ctx in
Lambda (p, xs, result)
| Undefined _
| Null _
| String (_, _)
| Num (_, _)
| True _
| False _
| Id (_, _)
| Op1 (_,_,_)
| Op2 (_,_,_,_)
| If (_,_,_,_)
| Label (_,_,_)
| Object (_,_,_)
| GetObjAttr (_, _, _)
| SetAttr (_,_,_,_,_)
| GetAttr (_,_,_,_)
| SetObjAttr (_,_,_,_)
| DeleteField (_, _, _)
| OwnFieldNames (_,_)
| SetBang (_,_,_)
| Rec (_,_,_,_)
| Break (_,_,_)
| TryCatch (_,_,_)
| TryFinally (_,_,_)
| Throw (_,_)
| Hint (_,_,_)
| Eval (_,_,_)
-> optimize (fun e->restore_rec ~in_lambda e ctx) e
in
let names = get_env_names e in
let _ = IdMap.iter ( fun k ( v , b)->printf " % s - > % s,%b\n% ! " k ( v ) b ) names in
let rec jump_env (e : exp) : exp =
match e with
| Seq (p0, S.Hint (p1, hint, e), e2) when is_env_delimiter hint ->
Seq (p0, S.Hint (p1, hint, e), restore_rec e2 names)
| _ -> optimize jump_env e
in
let rec propagate_this (e : exp) (env : names_t) =
let propagate e = propagate_this e env in
match e with
| Id (pos, id) when IdMap.mem id env -> Id (pos, "%this")
| Let (pos, x, Id (p1, "%this"), body) ->
let body = propagate_this body (IdMap.add x ((Undefined Pos.dummy),true) env) in
Let(pos, x, Id(p1, "%this"), body)
| _ -> optimize propagate e
in
let exp = propagate_this e IdMap.empty in
match get_code_after_delimiter exp with
| None -> restore_rec exp names
| Some (_) -> jump_env exp
|
083f8f364dc1a474f5fe3f12b5bb2f15d05ea3e346edfd840f277d9c3952156e | DanielG/kvm-in-a-box | Udev.hs | module Udev where
import Types
import Resource
import Control.Applicative
import Data.List
import Data.List.Split
import Data.Maybe
lvmOwnerResources :: [Vm] -> SomeResource
lvmOwnerResources vms = SomeResource $ FileResource {
rPath = "/etc/udev/rules.d/60-kib.rules",
rPerms = ((Nothing, Nothing), Just "644"),
rNormalize = unlines . sort . lines,
rParse = map own . parse,
rUnparse = unparse,
rContentFunc = const $ map own $ concatMap rs vms
}
where
rs Vm { vName=vmn, vSysCfg=VmSysCfg {vVg=vg, vAddDisks=addDisks} } =
rule vg vmn Nothing : map (rule vg vmn) (map Just addDisks)
rule vg vmn mdisk =
[ ("ENV{DM_VG_NAME}==", qt vg)
, ("ENV{DM_LV_NAME}==", qt $ vmn ++ fromMaybe "" (("-"++) <$> mdisk))
, ("OWNER=", qt $ "kib-" ++ vmn)
]
splitKV = split (condense $ endsWith "=")
qt str = "\"" ++ str ++ "\""
own :: [(String, String)] -> (ResourceOwner, [(String, String)])
own x
| Just vg <- lookup "ENV{DM_VG_NAME}" x
, Just lv <- lookup "ENV{DM_LV_NAME}" x
, Just owner <- lookup "OWNER" x
, "kib":vmn:_ <- splitOn "-" owner
= (OwnerVm vmn, x)
@(map snd - > [ vg , lv , ' k':'i':'b':'-':vmn ] ) = ( OwnerVm vmn , x )
own x = (OwnerSystem, x)
parse :: String -> [[(String, String)]]
parse = map (map var . words) . lines
unparse :: [[(String, String)]] -> String
unparse = unlines . map (unwords . map unvar)
var str = let [k,v] = splitKV str in (k,v)
unvar (k,v) = k ++ v
| null | https://raw.githubusercontent.com/DanielG/kvm-in-a-box/6bf71bb389a19806fac2f32a2c6d92261fb649e1/src/Udev.hs | haskell | module Udev where
import Types
import Resource
import Control.Applicative
import Data.List
import Data.List.Split
import Data.Maybe
lvmOwnerResources :: [Vm] -> SomeResource
lvmOwnerResources vms = SomeResource $ FileResource {
rPath = "/etc/udev/rules.d/60-kib.rules",
rPerms = ((Nothing, Nothing), Just "644"),
rNormalize = unlines . sort . lines,
rParse = map own . parse,
rUnparse = unparse,
rContentFunc = const $ map own $ concatMap rs vms
}
where
rs Vm { vName=vmn, vSysCfg=VmSysCfg {vVg=vg, vAddDisks=addDisks} } =
rule vg vmn Nothing : map (rule vg vmn) (map Just addDisks)
rule vg vmn mdisk =
[ ("ENV{DM_VG_NAME}==", qt vg)
, ("ENV{DM_LV_NAME}==", qt $ vmn ++ fromMaybe "" (("-"++) <$> mdisk))
, ("OWNER=", qt $ "kib-" ++ vmn)
]
splitKV = split (condense $ endsWith "=")
qt str = "\"" ++ str ++ "\""
own :: [(String, String)] -> (ResourceOwner, [(String, String)])
own x
| Just vg <- lookup "ENV{DM_VG_NAME}" x
, Just lv <- lookup "ENV{DM_LV_NAME}" x
, Just owner <- lookup "OWNER" x
, "kib":vmn:_ <- splitOn "-" owner
= (OwnerVm vmn, x)
@(map snd - > [ vg , lv , ' k':'i':'b':'-':vmn ] ) = ( OwnerVm vmn , x )
own x = (OwnerSystem, x)
parse :: String -> [[(String, String)]]
parse = map (map var . words) . lines
unparse :: [[(String, String)]] -> String
unparse = unlines . map (unwords . map unvar)
var str = let [k,v] = splitKV str in (k,v)
unvar (k,v) = k ++ v
| |
5d7f9931fa77ab42c6bde6f322a733fa082d065efb6e7ff8e603531458db4bc8 | emqx/emqx | emqx_shared_sub.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_shared_sub).
-behaviour(gen_server).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
%% APIs
-export([start_link/0]).
-export([
subscribe/3,
unsubscribe/3
]).
-export([
dispatch/3,
dispatch/4,
do_dispatch_with_ack/4,
redispatch/1
]).
-export([
maybe_ack/1,
maybe_nack_dropped/1,
nack_no_connection/1,
is_ack_required/1
]).
%% for testing
-ifdef(TEST).
-export([
subscribers/2,
strategy/1
]).
-endif.
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
%% Internal exports (RPC)
-export([
init_monitors/0
]).
-export_type([strategy/0]).
-type strategy() ::
random
| round_robin
| round_robin_per_group
| sticky
| local
| hash_clientid
| hash_topic.
-define(SERVER, ?MODULE).
-define(TAB, emqx_shared_subscription).
-define(SHARED_SUBS_ROUND_ROBIN_COUNTER, emqx_shared_subscriber_round_robin_counter).
-define(SHARED_SUBS, emqx_shared_subscriber).
-define(ALIVE_SUBS, emqx_alive_shared_subscribers).
-define(SHARED_SUB_QOS1_DISPATCH_TIMEOUT_SECONDS, 5).
-define(IS_LOCAL_PID(Pid), (is_pid(Pid) andalso node(Pid) =:= node())).
-define(ACK, shared_sub_ack).
-define(NACK(Reason), {shared_sub_nack, Reason}).
-define(NO_ACK, no_ack).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).
-define(SUBSCRIBER_DOWN, noproc).
-type redispatch_to() :: ?REDISPATCH_TO(emqx_topic:group(), emqx_topic:topic()).
-record(state, {pmon}).
-record(emqx_shared_subscription, {group, topic, subpid}).
%%--------------------------------------------------------------------
Mnesia bootstrap
%%--------------------------------------------------------------------
mnesia(boot) ->
ok = mria:create_table(?TAB, [
{type, bag},
{rlog_shard, ?SHARED_SUB_SHARD},
{storage, ram_copies},
{record_name, emqx_shared_subscription},
{attributes, record_info(fields, emqx_shared_subscription)}
]).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
-spec start_link() -> startlink_ret().
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec subscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok.
subscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
gen_server:call(?SERVER, {subscribe, Group, Topic, SubPid}).
-spec unsubscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok.
unsubscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
gen_server:call(?SERVER, {unsubscribe, Group, Topic, SubPid}).
record(Group, Topic, SubPid) ->
#emqx_shared_subscription{group = Group, topic = Topic, subpid = SubPid}.
-spec dispatch(emqx_types:group(), emqx_types:topic(), emqx_types:delivery()) ->
emqx_types:deliver_result().
dispatch(Group, Topic, Delivery) ->
dispatch(Group, Topic, Delivery, _FailedSubs = #{}).
dispatch(Group, Topic, Delivery = #delivery{message = Msg}, FailedSubs) ->
#message{from = ClientId, topic = SourceTopic} = Msg,
case pick(strategy(Group), ClientId, SourceTopic, Group, Topic, FailedSubs) of
false ->
{error, no_subscribers};
{Type, SubPid} ->
Msg1 = with_redispatch_to(Msg, Group, Topic),
case do_dispatch(SubPid, Group, Topic, Msg1, Type) of
ok ->
{ok, 1};
{error, Reason} ->
%% Failed to dispatch to this sub, try next.
dispatch(Group, Topic, Delivery, FailedSubs#{SubPid => Reason})
end
end.
-spec strategy(emqx_topic:group()) -> strategy().
strategy(Group) ->
case emqx:get_config([broker, shared_subscription_group, Group, strategy], undefined) of
undefined -> emqx:get_config([broker, shared_subscription_strategy]);
Strategy -> Strategy
end.
-spec ack_enabled() -> boolean().
ack_enabled() ->
emqx:get_config([broker, shared_dispatch_ack_enabled]).
do_dispatch(SubPid, _Group, Topic, Msg, _Type) when SubPid =:= self() ->
%% Deadlock otherwise
SubPid ! {deliver, Topic, Msg},
ok;
%% return either 'ok' (when everything is fine) or 'error'
do_dispatch(SubPid, _Group, Topic, #message{qos = ?QOS_0} = Msg, _Type) ->
%% For QoS 0 message, send it as regular dispatch
send(SubPid, Topic, {deliver, Topic, Msg});
do_dispatch(SubPid, _Group, Topic, Msg, retry) ->
%% Retry implies all subscribers nack:ed, send again without ack
send(SubPid, Topic, {deliver, Topic, Msg});
do_dispatch(SubPid, Group, Topic, Msg, fresh) ->
case ack_enabled() of
true ->
FIXME : replace with ` emqx_shared_sub_proto : dispatch_with_ack ' in 5.2
do_dispatch_with_ack(SubPid, Group, Topic, Msg);
false ->
send(SubPid, Topic, {deliver, Topic, Msg})
end.
-spec do_dispatch_with_ack(pid(), emqx_types:group(), emqx_types:topic(), emqx_types:message()) ->
ok | {error, _}.
do_dispatch_with_ack(SubPid, Group, Topic, Msg) ->
For QoS 1/2 message , expect an ack
Ref = erlang:monitor(process, SubPid),
Sender = self(),
FIXME : replace with regular send in 5.2
send(SubPid, Topic, {deliver, Topic, with_group_ack(Msg, Group, Sender, Ref)}),
Timeout =
case Msg#message.qos of
?QOS_2 -> infinity;
_ -> timer:seconds(?SHARED_SUB_QOS1_DISPATCH_TIMEOUT_SECONDS)
end,
try
receive
{Ref, ?ACK} ->
ok;
{Ref, ?NACK(Reason)} ->
%% the receive session may nack this message when its queue is full
{error, Reason};
{'DOWN', Ref, process, SubPid, Reason} ->
{error, Reason}
after Timeout ->
{error, timeout}
end
after
ok = emqx_pmon:demonitor(Ref)
end.
with_group_ack(Msg, Group, Sender, Ref) ->
emqx_message:set_headers(#{shared_dispatch_ack => {Group, Sender, Ref}}, Msg).
-spec without_group_ack(emqx_types:message()) -> emqx_types:message().
without_group_ack(Msg) ->
emqx_message:set_headers(#{shared_dispatch_ack => ?NO_ACK}, Msg).
get_group_ack(Msg) ->
emqx_message:get_header(shared_dispatch_ack, Msg, ?NO_ACK).
with_redispatch_to(#message{qos = ?QOS_0} = Msg, _Group, _Topic) ->
Msg;
with_redispatch_to(Msg, Group, Topic) ->
emqx_message:set_headers(#{redispatch_to => ?REDISPATCH_TO(Group, Topic)}, Msg).
%% @hidden Redispatch is neede only for the messages with redispatch_to header added.
is_redispatch_needed(#message{} = Msg) ->
case get_redispatch_to(Msg) of
?REDISPATCH_TO(_, _) ->
true;
_ ->
false
end.
@doc Redispatch shared deliveries to other members in the group .
redispatch(Messages0) ->
Messages = lists:filter(fun is_redispatch_needed/1, Messages0),
case length(Messages) of
L when L > 0 ->
?SLOG(info, #{
msg => "redispatching_shared_subscription_message",
count => L
}),
lists:foreach(fun redispatch_shared_message/1, Messages);
_ ->
ok
end.
redispatch_shared_message(#message{} = Msg) ->
%% As long as it's still a #message{} record in inflight,
%% we should try to re-dispatch
?REDISPATCH_TO(Group, Topic) = get_redispatch_to(Msg),
%% Note that dispatch is called with self() in failed subs
%% This is done to avoid dispatching back to caller
Delivery = #delivery{sender = self(), message = Msg},
%% Self is terminating, it makes no sense to loop-back the dispatch
FailedSubs = #{self() => ?SUBSCRIBER_DOWN},
dispatch(Group, Topic, Delivery, FailedSubs).
%% @hidden Return the `redispatch_to` group-topic in the message header.
%% `false` is returned if the message is not a shared dispatch.
%% or when it's a QoS 0 message.
-spec get_redispatch_to(emqx_types:message()) -> redispatch_to() | false.
get_redispatch_to(Msg) ->
emqx_message:get_header(redispatch_to, Msg, false).
-spec is_ack_required(emqx_types:message()) -> boolean().
is_ack_required(Msg) -> ?NO_ACK =/= get_group_ack(Msg).
%% @doc Negative ack dropped message due to inflight window or message queue being full.
-spec maybe_nack_dropped(emqx_types:message()) -> boolean().
maybe_nack_dropped(Msg) ->
case get_group_ack(Msg) of
?NO_ACK -> false;
{_Group, Sender, Ref} -> ok == nack(Sender, Ref, dropped)
end.
%% @doc Negative ack message due to connection down.
%% Assuming this function is always called when ack is required
%% i.e is_ack_required returned true.
-spec nack_no_connection(emqx_types:message()) -> ok.
nack_no_connection(Msg) ->
{_Group, Sender, Ref} = get_group_ack(Msg),
nack(Sender, Ref, no_connection).
-spec nack(pid(), reference(), dropped | no_connection) -> ok.
nack(Sender, Ref, Reason) ->
Sender ! {Ref, ?NACK(Reason)},
ok.
-spec maybe_ack(emqx_types:message()) -> emqx_types:message().
maybe_ack(Msg) ->
case get_group_ack(Msg) of
?NO_ACK ->
Msg;
{_Group, Sender, Ref} ->
Sender ! {Ref, ?ACK},
without_group_ack(Msg)
end.
pick(sticky, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
Sub0 = erlang:get({shared_sub_sticky, Group, Topic}),
All = subscribers(Group, Topic, FailedSubs),
case is_active_sub(Sub0, FailedSubs, All) of
true ->
%% the old subscriber is still alive
%% keep using it for sticky strategy
{fresh, Sub0};
false ->
randomly pick one for the first message
FailedSubs1 = FailedSubs#{Sub0 => ?SUBSCRIBER_DOWN},
Res = do_pick(All, random, ClientId, SourceTopic, Group, Topic, FailedSubs1),
case Res of
{_, Sub} ->
%% stick to whatever pick result
erlang:put({shared_sub_sticky, Group, Topic}, Sub);
_ ->
ok
end,
Res
end;
pick(Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
All = subscribers(Group, Topic, FailedSubs),
do_pick(All, Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs).
do_pick([], _Strategy, _ClientId, _SourceTopic, _Group, _Topic, _FailedSubs) ->
false;
do_pick(All, Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
case lists:filter(fun(Sub) -> not maps:is_key(Sub, FailedSubs) end, All) of
[] ->
All offline ? pick one anyway
{retry, pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, All)};
Subs ->
More than one available
{fresh, pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, Subs)}
end.
pick_subscriber(_Group, _Topic, _Strategy, _ClientId, _SourceTopic, [Sub]) ->
Sub;
pick_subscriber(Group, Topic, local, ClientId, SourceTopic, Subs) ->
case lists:filter(fun(Pid) -> erlang:node(Pid) =:= node() end, Subs) of
[_ | _] = LocalSubs ->
pick_subscriber(Group, Topic, random, ClientId, SourceTopic, LocalSubs);
[] ->
pick_subscriber(Group, Topic, random, ClientId, SourceTopic, Subs)
end;
pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, Subs) ->
Nth = do_pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, length(Subs)),
lists:nth(Nth, Subs).
do_pick_subscriber(_Group, _Topic, random, _ClientId, _SourceTopic, Count) ->
rand:uniform(Count);
do_pick_subscriber(_Group, _Topic, hash_clientid, ClientId, _SourceTopic, Count) ->
1 + erlang:phash2(ClientId) rem Count;
do_pick_subscriber(_Group, _Topic, hash_topic, _ClientId, SourceTopic, Count) ->
1 + erlang:phash2(SourceTopic) rem Count;
do_pick_subscriber(Group, Topic, round_robin, _ClientId, _SourceTopic, Count) ->
Rem =
case erlang:get({shared_sub_round_robin, Group, Topic}) of
undefined -> rand:uniform(Count) - 1;
N -> (N + 1) rem Count
end,
_ = erlang:put({shared_sub_round_robin, Group, Topic}, Rem),
Rem + 1;
do_pick_subscriber(Group, Topic, round_robin_per_group, _ClientId, _SourceTopic, Count) ->
reset the counter to 1 if counter > subscriber count to avoid the counter to grow larger
%% than the current subscriber count.
if no counter for the given group topic exists - due to a configuration change - create a new one starting at 0
ets:update_counter(?SHARED_SUBS_ROUND_ROBIN_COUNTER, {Group, Topic}, {2, 1, Count, 1}, {
{Group, Topic}, 0
}).
%% Select ETS table to get all subscriber pids which are not down.
subscribers(Group, Topic, FailedSubs) ->
lists:filter(
fun(P) ->
?SUBSCRIBER_DOWN =/= maps:get(P, FailedSubs, false)
end,
subscribers(Group, Topic)
).
%% Select ETS table to get all subscriber pids.
subscribers(Group, Topic) ->
ets:select(?TAB, [{{emqx_shared_subscription, Group, Topic, '$1'}, [], ['$1']}]).
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([]) ->
ok = mria:wait_for_tables([?TAB]),
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
{ok, update_stats(#state{pmon = PMon})}.
init_monitors() ->
mnesia:foldl(
fun(#emqx_shared_subscription{subpid = SubPid}, Mon) ->
emqx_pmon:monitor(SubPid, Mon)
end,
emqx_pmon:new(),
?TAB
).
handle_call({subscribe, Group, Topic, SubPid}, _From, State = #state{pmon = PMon}) ->
mria:dirty_write(?TAB, record(Group, Topic, SubPid)),
case ets:member(?SHARED_SUBS, {Group, Topic}) of
true -> ok;
false -> ok = emqx_router:do_add_route(Topic, {Group, node()})
end,
ok = maybe_insert_alive_tab(SubPid),
ok = maybe_insert_round_robin_count({Group, Topic}),
true = ets:insert(?SHARED_SUBS, {{Group, Topic}, SubPid}),
{reply, ok, update_stats(State#state{pmon = emqx_pmon:monitor(SubPid, PMon)})};
handle_call({unsubscribe, Group, Topic, SubPid}, _From, State) ->
mria:dirty_delete_object(?TAB, record(Group, Topic, SubPid)),
true = ets:delete_object(?SHARED_SUBS, {{Group, Topic}, SubPid}),
delete_route_if_needed({Group, Topic}),
maybe_delete_round_robin_count({Group, Topic}),
{reply, ok, State};
handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info(
{mnesia_table_event, {write, #emqx_shared_subscription{subpid = SubPid}, _}},
State = #state{pmon = PMon}
) ->
ok = maybe_insert_alive_tab(SubPid),
{noreply, update_stats(State#state{pmon = emqx_pmon:monitor(SubPid, PMon)})};
%% The subscriber may have subscribed multiple topics, so we need to keep monitoring the PID until
%% it `unsubscribed` the last topic.
%% The trick is we don't demonitor the subscriber here, and (after a long time) it will eventually
%% be disconnected.
handle_info({mnesia_table_event , { delete_object , OldRecord , _ } } , State = # state{pmon = PMon } ) - >
# emqx_shared_subscription{subpid = SubPid } = OldRecord ,
{ noreply , update_stats(State#state{pmon = emqx_pmon : demonitor(SubPid , PMon ) } ) } ;
handle_info({mnesia_table_event, _Event}, State) ->
{noreply, State};
handle_info({'DOWN', _MRef, process, SubPid, Reason}, State = #state{pmon = PMon}) ->
?SLOG(info, #{msg => "shared_subscriber_down", sub_pid => SubPid, reason => Reason}),
cleanup_down(SubPid),
{noreply, update_stats(State#state{pmon = emqx_pmon:erase(SubPid, PMon)})};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
mnesia:unsubscribe({table, ?TAB, simple}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
send(Pid, Topic, Msg) ->
Node = node(Pid),
_ =
case Node =:= node() of
true ->
Pid ! Msg;
false ->
emqx_shared_sub_proto_v1:send(Node, Pid, Topic, Msg)
end,
ok.
maybe_insert_round_robin_count({Group, _Topic} = GroupTopic) ->
strategy(Group) =:= round_robin_per_group andalso
ets:insert(?SHARED_SUBS_ROUND_ROBIN_COUNTER, {GroupTopic, 0}),
ok.
maybe_delete_round_robin_count({Group, _Topic} = GroupTopic) ->
strategy(Group) =:= round_robin_per_group andalso
if_no_more_subscribers(GroupTopic, fun() ->
ets:delete(?SHARED_SUBS_ROUND_ROBIN_COUNTER, GroupTopic)
end),
ok.
if_no_more_subscribers(GroupTopic, Fn) ->
case ets:member(?SHARED_SUBS, GroupTopic) of
true -> ok;
false -> Fn()
end,
ok.
%% keep track of alive remote pids
maybe_insert_alive_tab(Pid) when ?IS_LOCAL_PID(Pid) -> ok;
maybe_insert_alive_tab(Pid) when is_pid(Pid) ->
ets:insert(?ALIVE_SUBS, {Pid}),
ok.
cleanup_down(SubPid) ->
?IS_LOCAL_PID(SubPid) orelse ets:delete(?ALIVE_SUBS, SubPid),
lists:foreach(
fun(Record = #emqx_shared_subscription{topic = Topic, group = Group}) ->
ok = mria:dirty_delete_object(?TAB, Record),
true = ets:delete_object(?SHARED_SUBS, {{Group, Topic}, SubPid}),
maybe_delete_round_robin_count({Group, Topic}),
delete_route_if_needed({Group, Topic})
end,
mnesia:dirty_match_object(#emqx_shared_subscription{_ = '_', subpid = SubPid})
).
update_stats(State) ->
emqx_stats:setstat(
'subscriptions.shared.count',
'subscriptions.shared.max',
ets:info(?TAB, size)
),
State.
%% Return 'true' if the subscriber process is alive AND not in the failed list
is_active_sub(Pid, FailedSubs, All) ->
lists:member(Pid, All) andalso
(not maps:is_key(Pid, FailedSubs)) andalso
is_alive_sub(Pid).
erlang : is_process_alive/1 does not work with remote pid .
is_alive_sub(Pid) when ?IS_LOCAL_PID(Pid) ->
erlang:is_process_alive(Pid);
is_alive_sub(Pid) ->
[] =/= ets:lookup(?ALIVE_SUBS, Pid).
delete_route_if_needed({Group, Topic} = GroupTopic) ->
if_no_more_subscribers(GroupTopic, fun() ->
ok = emqx_router:do_delete_route(Topic, {Group, node()})
end).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/src/emqx_shared_sub.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
APIs
for testing
gen_server callbacks
Internal exports (RPC)
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
API
--------------------------------------------------------------------
Failed to dispatch to this sub, try next.
Deadlock otherwise
return either 'ok' (when everything is fine) or 'error'
For QoS 0 message, send it as regular dispatch
Retry implies all subscribers nack:ed, send again without ack
the receive session may nack this message when its queue is full
@hidden Redispatch is neede only for the messages with redispatch_to header added.
As long as it's still a #message{} record in inflight,
we should try to re-dispatch
Note that dispatch is called with self() in failed subs
This is done to avoid dispatching back to caller
Self is terminating, it makes no sense to loop-back the dispatch
@hidden Return the `redispatch_to` group-topic in the message header.
`false` is returned if the message is not a shared dispatch.
or when it's a QoS 0 message.
@doc Negative ack dropped message due to inflight window or message queue being full.
@doc Negative ack message due to connection down.
Assuming this function is always called when ack is required
i.e is_ack_required returned true.
the old subscriber is still alive
keep using it for sticky strategy
stick to whatever pick result
than the current subscriber count.
Select ETS table to get all subscriber pids which are not down.
Select ETS table to get all subscriber pids.
--------------------------------------------------------------------
gen_server callbacks
--------------------------------------------------------------------
The subscriber may have subscribed multiple topics, so we need to keep monitoring the PID until
it `unsubscribed` the last topic.
The trick is we don't demonitor the subscriber here, and (after a long time) it will eventually
be disconnected.
--------------------------------------------------------------------
--------------------------------------------------------------------
keep track of alive remote pids
Return 'true' if the subscriber process is alive AND not in the failed list | Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_shared_sub).
-behaviour(gen_server).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-export([start_link/0]).
-export([
subscribe/3,
unsubscribe/3
]).
-export([
dispatch/3,
dispatch/4,
do_dispatch_with_ack/4,
redispatch/1
]).
-export([
maybe_ack/1,
maybe_nack_dropped/1,
nack_no_connection/1,
is_ack_required/1
]).
-ifdef(TEST).
-export([
subscribers/2,
strategy/1
]).
-endif.
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-export([
init_monitors/0
]).
-export_type([strategy/0]).
-type strategy() ::
random
| round_robin
| round_robin_per_group
| sticky
| local
| hash_clientid
| hash_topic.
-define(SERVER, ?MODULE).
-define(TAB, emqx_shared_subscription).
-define(SHARED_SUBS_ROUND_ROBIN_COUNTER, emqx_shared_subscriber_round_robin_counter).
-define(SHARED_SUBS, emqx_shared_subscriber).
-define(ALIVE_SUBS, emqx_alive_shared_subscribers).
-define(SHARED_SUB_QOS1_DISPATCH_TIMEOUT_SECONDS, 5).
-define(IS_LOCAL_PID(Pid), (is_pid(Pid) andalso node(Pid) =:= node())).
-define(ACK, shared_sub_ack).
-define(NACK(Reason), {shared_sub_nack, Reason}).
-define(NO_ACK, no_ack).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).
-define(SUBSCRIBER_DOWN, noproc).
-type redispatch_to() :: ?REDISPATCH_TO(emqx_topic:group(), emqx_topic:topic()).
-record(state, {pmon}).
-record(emqx_shared_subscription, {group, topic, subpid}).
Mnesia bootstrap
mnesia(boot) ->
ok = mria:create_table(?TAB, [
{type, bag},
{rlog_shard, ?SHARED_SUB_SHARD},
{storage, ram_copies},
{record_name, emqx_shared_subscription},
{attributes, record_info(fields, emqx_shared_subscription)}
]).
-spec start_link() -> startlink_ret().
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
-spec subscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok.
subscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
gen_server:call(?SERVER, {subscribe, Group, Topic, SubPid}).
-spec unsubscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok.
unsubscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
gen_server:call(?SERVER, {unsubscribe, Group, Topic, SubPid}).
record(Group, Topic, SubPid) ->
#emqx_shared_subscription{group = Group, topic = Topic, subpid = SubPid}.
-spec dispatch(emqx_types:group(), emqx_types:topic(), emqx_types:delivery()) ->
emqx_types:deliver_result().
dispatch(Group, Topic, Delivery) ->
dispatch(Group, Topic, Delivery, _FailedSubs = #{}).
dispatch(Group, Topic, Delivery = #delivery{message = Msg}, FailedSubs) ->
#message{from = ClientId, topic = SourceTopic} = Msg,
case pick(strategy(Group), ClientId, SourceTopic, Group, Topic, FailedSubs) of
false ->
{error, no_subscribers};
{Type, SubPid} ->
Msg1 = with_redispatch_to(Msg, Group, Topic),
case do_dispatch(SubPid, Group, Topic, Msg1, Type) of
ok ->
{ok, 1};
{error, Reason} ->
dispatch(Group, Topic, Delivery, FailedSubs#{SubPid => Reason})
end
end.
-spec strategy(emqx_topic:group()) -> strategy().
strategy(Group) ->
case emqx:get_config([broker, shared_subscription_group, Group, strategy], undefined) of
undefined -> emqx:get_config([broker, shared_subscription_strategy]);
Strategy -> Strategy
end.
-spec ack_enabled() -> boolean().
ack_enabled() ->
emqx:get_config([broker, shared_dispatch_ack_enabled]).
do_dispatch(SubPid, _Group, Topic, Msg, _Type) when SubPid =:= self() ->
SubPid ! {deliver, Topic, Msg},
ok;
do_dispatch(SubPid, _Group, Topic, #message{qos = ?QOS_0} = Msg, _Type) ->
send(SubPid, Topic, {deliver, Topic, Msg});
do_dispatch(SubPid, _Group, Topic, Msg, retry) ->
send(SubPid, Topic, {deliver, Topic, Msg});
do_dispatch(SubPid, Group, Topic, Msg, fresh) ->
case ack_enabled() of
true ->
FIXME : replace with ` emqx_shared_sub_proto : dispatch_with_ack ' in 5.2
do_dispatch_with_ack(SubPid, Group, Topic, Msg);
false ->
send(SubPid, Topic, {deliver, Topic, Msg})
end.
-spec do_dispatch_with_ack(pid(), emqx_types:group(), emqx_types:topic(), emqx_types:message()) ->
ok | {error, _}.
do_dispatch_with_ack(SubPid, Group, Topic, Msg) ->
For QoS 1/2 message , expect an ack
Ref = erlang:monitor(process, SubPid),
Sender = self(),
FIXME : replace with regular send in 5.2
send(SubPid, Topic, {deliver, Topic, with_group_ack(Msg, Group, Sender, Ref)}),
Timeout =
case Msg#message.qos of
?QOS_2 -> infinity;
_ -> timer:seconds(?SHARED_SUB_QOS1_DISPATCH_TIMEOUT_SECONDS)
end,
try
receive
{Ref, ?ACK} ->
ok;
{Ref, ?NACK(Reason)} ->
{error, Reason};
{'DOWN', Ref, process, SubPid, Reason} ->
{error, Reason}
after Timeout ->
{error, timeout}
end
after
ok = emqx_pmon:demonitor(Ref)
end.
with_group_ack(Msg, Group, Sender, Ref) ->
emqx_message:set_headers(#{shared_dispatch_ack => {Group, Sender, Ref}}, Msg).
-spec without_group_ack(emqx_types:message()) -> emqx_types:message().
without_group_ack(Msg) ->
emqx_message:set_headers(#{shared_dispatch_ack => ?NO_ACK}, Msg).
get_group_ack(Msg) ->
emqx_message:get_header(shared_dispatch_ack, Msg, ?NO_ACK).
with_redispatch_to(#message{qos = ?QOS_0} = Msg, _Group, _Topic) ->
Msg;
with_redispatch_to(Msg, Group, Topic) ->
emqx_message:set_headers(#{redispatch_to => ?REDISPATCH_TO(Group, Topic)}, Msg).
is_redispatch_needed(#message{} = Msg) ->
case get_redispatch_to(Msg) of
?REDISPATCH_TO(_, _) ->
true;
_ ->
false
end.
@doc Redispatch shared deliveries to other members in the group .
redispatch(Messages0) ->
Messages = lists:filter(fun is_redispatch_needed/1, Messages0),
case length(Messages) of
L when L > 0 ->
?SLOG(info, #{
msg => "redispatching_shared_subscription_message",
count => L
}),
lists:foreach(fun redispatch_shared_message/1, Messages);
_ ->
ok
end.
redispatch_shared_message(#message{} = Msg) ->
?REDISPATCH_TO(Group, Topic) = get_redispatch_to(Msg),
Delivery = #delivery{sender = self(), message = Msg},
FailedSubs = #{self() => ?SUBSCRIBER_DOWN},
dispatch(Group, Topic, Delivery, FailedSubs).
-spec get_redispatch_to(emqx_types:message()) -> redispatch_to() | false.
get_redispatch_to(Msg) ->
emqx_message:get_header(redispatch_to, Msg, false).
-spec is_ack_required(emqx_types:message()) -> boolean().
is_ack_required(Msg) -> ?NO_ACK =/= get_group_ack(Msg).
-spec maybe_nack_dropped(emqx_types:message()) -> boolean().
maybe_nack_dropped(Msg) ->
case get_group_ack(Msg) of
?NO_ACK -> false;
{_Group, Sender, Ref} -> ok == nack(Sender, Ref, dropped)
end.
-spec nack_no_connection(emqx_types:message()) -> ok.
nack_no_connection(Msg) ->
{_Group, Sender, Ref} = get_group_ack(Msg),
nack(Sender, Ref, no_connection).
-spec nack(pid(), reference(), dropped | no_connection) -> ok.
nack(Sender, Ref, Reason) ->
Sender ! {Ref, ?NACK(Reason)},
ok.
-spec maybe_ack(emqx_types:message()) -> emqx_types:message().
maybe_ack(Msg) ->
case get_group_ack(Msg) of
?NO_ACK ->
Msg;
{_Group, Sender, Ref} ->
Sender ! {Ref, ?ACK},
without_group_ack(Msg)
end.
pick(sticky, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
Sub0 = erlang:get({shared_sub_sticky, Group, Topic}),
All = subscribers(Group, Topic, FailedSubs),
case is_active_sub(Sub0, FailedSubs, All) of
true ->
{fresh, Sub0};
false ->
randomly pick one for the first message
FailedSubs1 = FailedSubs#{Sub0 => ?SUBSCRIBER_DOWN},
Res = do_pick(All, random, ClientId, SourceTopic, Group, Topic, FailedSubs1),
case Res of
{_, Sub} ->
erlang:put({shared_sub_sticky, Group, Topic}, Sub);
_ ->
ok
end,
Res
end;
pick(Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
All = subscribers(Group, Topic, FailedSubs),
do_pick(All, Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs).
do_pick([], _Strategy, _ClientId, _SourceTopic, _Group, _Topic, _FailedSubs) ->
false;
do_pick(All, Strategy, ClientId, SourceTopic, Group, Topic, FailedSubs) ->
case lists:filter(fun(Sub) -> not maps:is_key(Sub, FailedSubs) end, All) of
[] ->
All offline ? pick one anyway
{retry, pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, All)};
Subs ->
More than one available
{fresh, pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, Subs)}
end.
pick_subscriber(_Group, _Topic, _Strategy, _ClientId, _SourceTopic, [Sub]) ->
Sub;
pick_subscriber(Group, Topic, local, ClientId, SourceTopic, Subs) ->
case lists:filter(fun(Pid) -> erlang:node(Pid) =:= node() end, Subs) of
[_ | _] = LocalSubs ->
pick_subscriber(Group, Topic, random, ClientId, SourceTopic, LocalSubs);
[] ->
pick_subscriber(Group, Topic, random, ClientId, SourceTopic, Subs)
end;
pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, Subs) ->
Nth = do_pick_subscriber(Group, Topic, Strategy, ClientId, SourceTopic, length(Subs)),
lists:nth(Nth, Subs).
do_pick_subscriber(_Group, _Topic, random, _ClientId, _SourceTopic, Count) ->
rand:uniform(Count);
do_pick_subscriber(_Group, _Topic, hash_clientid, ClientId, _SourceTopic, Count) ->
1 + erlang:phash2(ClientId) rem Count;
do_pick_subscriber(_Group, _Topic, hash_topic, _ClientId, SourceTopic, Count) ->
1 + erlang:phash2(SourceTopic) rem Count;
do_pick_subscriber(Group, Topic, round_robin, _ClientId, _SourceTopic, Count) ->
Rem =
case erlang:get({shared_sub_round_robin, Group, Topic}) of
undefined -> rand:uniform(Count) - 1;
N -> (N + 1) rem Count
end,
_ = erlang:put({shared_sub_round_robin, Group, Topic}, Rem),
Rem + 1;
do_pick_subscriber(Group, Topic, round_robin_per_group, _ClientId, _SourceTopic, Count) ->
reset the counter to 1 if counter > subscriber count to avoid the counter to grow larger
if no counter for the given group topic exists - due to a configuration change - create a new one starting at 0
ets:update_counter(?SHARED_SUBS_ROUND_ROBIN_COUNTER, {Group, Topic}, {2, 1, Count, 1}, {
{Group, Topic}, 0
}).
subscribers(Group, Topic, FailedSubs) ->
lists:filter(
fun(P) ->
?SUBSCRIBER_DOWN =/= maps:get(P, FailedSubs, false)
end,
subscribers(Group, Topic)
).
subscribers(Group, Topic) ->
ets:select(?TAB, [{{emqx_shared_subscription, Group, Topic, '$1'}, [], ['$1']}]).
init([]) ->
ok = mria:wait_for_tables([?TAB]),
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
{ok, update_stats(#state{pmon = PMon})}.
init_monitors() ->
mnesia:foldl(
fun(#emqx_shared_subscription{subpid = SubPid}, Mon) ->
emqx_pmon:monitor(SubPid, Mon)
end,
emqx_pmon:new(),
?TAB
).
handle_call({subscribe, Group, Topic, SubPid}, _From, State = #state{pmon = PMon}) ->
mria:dirty_write(?TAB, record(Group, Topic, SubPid)),
case ets:member(?SHARED_SUBS, {Group, Topic}) of
true -> ok;
false -> ok = emqx_router:do_add_route(Topic, {Group, node()})
end,
ok = maybe_insert_alive_tab(SubPid),
ok = maybe_insert_round_robin_count({Group, Topic}),
true = ets:insert(?SHARED_SUBS, {{Group, Topic}, SubPid}),
{reply, ok, update_stats(State#state{pmon = emqx_pmon:monitor(SubPid, PMon)})};
handle_call({unsubscribe, Group, Topic, SubPid}, _From, State) ->
mria:dirty_delete_object(?TAB, record(Group, Topic, SubPid)),
true = ets:delete_object(?SHARED_SUBS, {{Group, Topic}, SubPid}),
delete_route_if_needed({Group, Topic}),
maybe_delete_round_robin_count({Group, Topic}),
{reply, ok, State};
handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info(
{mnesia_table_event, {write, #emqx_shared_subscription{subpid = SubPid}, _}},
State = #state{pmon = PMon}
) ->
ok = maybe_insert_alive_tab(SubPid),
{noreply, update_stats(State#state{pmon = emqx_pmon:monitor(SubPid, PMon)})};
handle_info({mnesia_table_event , { delete_object , OldRecord , _ } } , State = # state{pmon = PMon } ) - >
# emqx_shared_subscription{subpid = SubPid } = OldRecord ,
{ noreply , update_stats(State#state{pmon = emqx_pmon : demonitor(SubPid , PMon ) } ) } ;
handle_info({mnesia_table_event, _Event}, State) ->
{noreply, State};
handle_info({'DOWN', _MRef, process, SubPid, Reason}, State = #state{pmon = PMon}) ->
?SLOG(info, #{msg => "shared_subscriber_down", sub_pid => SubPid, reason => Reason}),
cleanup_down(SubPid),
{noreply, update_stats(State#state{pmon = emqx_pmon:erase(SubPid, PMon)})};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
mnesia:unsubscribe({table, ?TAB, simple}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
send(Pid, Topic, Msg) ->
Node = node(Pid),
_ =
case Node =:= node() of
true ->
Pid ! Msg;
false ->
emqx_shared_sub_proto_v1:send(Node, Pid, Topic, Msg)
end,
ok.
maybe_insert_round_robin_count({Group, _Topic} = GroupTopic) ->
strategy(Group) =:= round_robin_per_group andalso
ets:insert(?SHARED_SUBS_ROUND_ROBIN_COUNTER, {GroupTopic, 0}),
ok.
maybe_delete_round_robin_count({Group, _Topic} = GroupTopic) ->
strategy(Group) =:= round_robin_per_group andalso
if_no_more_subscribers(GroupTopic, fun() ->
ets:delete(?SHARED_SUBS_ROUND_ROBIN_COUNTER, GroupTopic)
end),
ok.
if_no_more_subscribers(GroupTopic, Fn) ->
case ets:member(?SHARED_SUBS, GroupTopic) of
true -> ok;
false -> Fn()
end,
ok.
maybe_insert_alive_tab(Pid) when ?IS_LOCAL_PID(Pid) -> ok;
maybe_insert_alive_tab(Pid) when is_pid(Pid) ->
ets:insert(?ALIVE_SUBS, {Pid}),
ok.
cleanup_down(SubPid) ->
?IS_LOCAL_PID(SubPid) orelse ets:delete(?ALIVE_SUBS, SubPid),
lists:foreach(
fun(Record = #emqx_shared_subscription{topic = Topic, group = Group}) ->
ok = mria:dirty_delete_object(?TAB, Record),
true = ets:delete_object(?SHARED_SUBS, {{Group, Topic}, SubPid}),
maybe_delete_round_robin_count({Group, Topic}),
delete_route_if_needed({Group, Topic})
end,
mnesia:dirty_match_object(#emqx_shared_subscription{_ = '_', subpid = SubPid})
).
update_stats(State) ->
emqx_stats:setstat(
'subscriptions.shared.count',
'subscriptions.shared.max',
ets:info(?TAB, size)
),
State.
is_active_sub(Pid, FailedSubs, All) ->
lists:member(Pid, All) andalso
(not maps:is_key(Pid, FailedSubs)) andalso
is_alive_sub(Pid).
erlang : is_process_alive/1 does not work with remote pid .
is_alive_sub(Pid) when ?IS_LOCAL_PID(Pid) ->
erlang:is_process_alive(Pid);
is_alive_sub(Pid) ->
[] =/= ets:lookup(?ALIVE_SUBS, Pid).
delete_route_if_needed({Group, Topic} = GroupTopic) ->
if_no_more_subscribers(GroupTopic, fun() ->
ok = emqx_router:do_delete_route(Topic, {Group, node()})
end).
|
e30deedd756661aa7320cc29758c27203300f8e8ca7c1accb95e3a03b15eb697 | Cyrik/omni-trace | omni_trace.cljc | (ns cyrik.omni-trace
(:require [cljs.test]
[cyrik.omni-trace.graph :as flame]
[cyrik.omni-trace.instrument :as i]
[cyrik.omni-trace.tree :as tree]
[net.cgrand.macrovich :as macros]
#?(:clj [cyrik.omni-trace.deep-trace :as deep])
#?(:clj [cyrik.omni-trace.util :as util])
#?(:clj [cljs.analyzer.api :as ana-api]))
#?(:cljs (:require-macros
[cyrik.omni-trace :refer [instrument-fn uninstrument-fn instrument-ns uninstrument-ns
run]])))
(defmacro instrument-fn
"Instruments a function.
Call with fully qualified quoted symbol:
(instrument-fn 'cyrik.omni-trace.testing-ns/press-button)"
([sym]
`(i/instrument-fn ~sym))
([sym opts]
`(i/instrument-fn ~sym ~opts)))
(defmacro uninstrument-fn
"Instruments a function.
Call with fully qualified quoted symbol:
(uninstrument-fn 'cyrik.omni-trace.testing-ns/press-button)"
([sym]
`(i/uninstrument-fn ~sym))
([sym opts]
`(i/uninstrument-fn ~sym ~opts)))
(defmacro instrument-ns
"Instruments all functions in the namespace.
Call with fully qualified quoted namespace:
(instrument-ns 'cyrik.omni-trace.testing-ns)"
([sym]
`(i/instrument-ns ~sym))
([sym opts]
`(i/instrument-ns ~sym ~opts)))
(defmacro uninstrument-ns
"Removes instrumentation.
Call with fully qualified quoted namespace:
(uninstrument-ns 'cyrik.omni-trace.testing-ns)"
([sym]
`(i/uninstrument-ns ~sym))
([sym opts]
`(i/uninstrument-ns ~sym ~opts)))
(defmacro trace
"Instruments all functions in passed namespaces or symbols.
syms can be a fully qualified symbol, a string or a var pointing to a namespace
or a function. A vector of syms can also be passed.
(instrument ['cyrik.omni-trace.testing-ns])"
([syms]
`(i/instrument ~syms))
([syms opts]
`(i/instrument ~syms ~opts)))
(defmacro untrace
"Instruments all functions in passed namespaces or symbols.
syms can be a fully qualified symbol, a string or a var pointing to a namespace
or a function. A vector of syms can also be passed.
(uninstrument 'cyrik.omni-trace.testing-ns)"
([]
`(i/uninstrument))
([syms]
`(i/uninstrument ~syms))
([syms opts]
`(i/uninstrument ~syms ~opts)))
(defn reset-workspace!
([]
(i/reset-workspace! i/workspace))
([workspace]
(i/reset-workspace! workspace)))
(defn flamegraph
([]
(flamegraph i/workspace))
([workspace]
(flame/flamegraph (flame/flamedata @workspace))))
(defn rooted-flamegraph
([root]
(rooted-flamegraph root i/workspace))
([root workspace]
(flame/flamegraph (flame/flamedata @workspace root))))
(defn last-call
([call]
(tree/last-call call (:log @i/workspace))))
#?(:clj
(defn run-traced [s & args]
(apply #'deep/run-traced {:cyrik.omni-trace/workspace i/workspace} (into [s] args))))
(macros/deftime
#?(:clj
(defmacro run
"Runs the form in traced mode. Does not work if the form starts with a macro."
[form]
(macros/case :clj `(deep/run-traced {:cyrik.omni-trace/workspace i/workspace} (~util/->sym ~(first form)) ~@(rest form))
:cljs `(do
~(let [fun (ana-api/resolve &env (first form))
args (rest form)
n (:ns fun)
f (-> fun
:name
name
symbol)
dep-list (deep/transitive-deps (deep/dependencies
(deep/analysis ["dev" "src"]) :cljs)
n f)
sym-list (mapv #(symbol (name (first %)) (name (second %))) (filter first dep-list)) ;;fix nil namespaces
instrumenters (mapv (fn [sym] `#(cyrik.omni-trace.instrument.cljs/cljs-instrument-fn '~sym {:cyrik.omni-trace/workspace cyrik.omni-trace.instrument/workspace} cyrik.omni-trace.instrument/instrumented)) sym-list)
deinstrumenters (mapv (fn [sym] `#(cyrik.omni-trace.instrument.cljs/cljs-instrument-fn '~sym {:cyrik.omni-trace/workspace cyrik.omni-trace.instrument/workspace} cyrik.omni-trace.instrument/uninstrumented)) sym-list)]
`(let [_# (doseq [f# ~instrumenters]
(f#))
result# (apply ~(symbol (name n) (name f)) (list ~@args))
_# (doseq [g# ~deinstrumenters]
(g#))]
result#)))))))
(comment
(require '[portal.api :as p])
(def portal (p/open))
(add-tap #'p/submit)
(filter #(and (= (:lang %) :cljs) (= (:from %) 'cyrik.omni-trace.testing-ns)) (:var-usages (deep/analysis ["dev" "src"])))
(require '[cyrik.cljs-macroexpand :as macro])
(clojure.walk/macroexpand-all '(run (+ 1 2)))
(macro/cljs-macroexpand-all '(run `(+ 1 2)))
(macroexpand '(run `(+ 1 2)))
(defn thing [a]
(inc a))
(defn thing2 [a b]
(+ a b))
(run `(thing (inc (inc 1))))
(macroexpand '(run `(thing (inc (inc 1)))))
(macro/cljs-macroexpand-all '(run `(thing (inc (inc 1)))))
(cyrik.omni-trace/run (thing2 1 2))
(macroexpand '(cyrik.omni-trace/run (thing2 1 2)))
(macro/cljs-macroexpand-all '(cyrik.omni-trace/run (thing2 1 2)))
) | null | https://raw.githubusercontent.com/Cyrik/omni-trace/977e32b10dd932e3ba0492254e00cfdd1a292e59/src/cyrik/omni_trace.cljc | clojure | fix nil namespaces | (ns cyrik.omni-trace
(:require [cljs.test]
[cyrik.omni-trace.graph :as flame]
[cyrik.omni-trace.instrument :as i]
[cyrik.omni-trace.tree :as tree]
[net.cgrand.macrovich :as macros]
#?(:clj [cyrik.omni-trace.deep-trace :as deep])
#?(:clj [cyrik.omni-trace.util :as util])
#?(:clj [cljs.analyzer.api :as ana-api]))
#?(:cljs (:require-macros
[cyrik.omni-trace :refer [instrument-fn uninstrument-fn instrument-ns uninstrument-ns
run]])))
(defmacro instrument-fn
"Instruments a function.
Call with fully qualified quoted symbol:
(instrument-fn 'cyrik.omni-trace.testing-ns/press-button)"
([sym]
`(i/instrument-fn ~sym))
([sym opts]
`(i/instrument-fn ~sym ~opts)))
(defmacro uninstrument-fn
"Instruments a function.
Call with fully qualified quoted symbol:
(uninstrument-fn 'cyrik.omni-trace.testing-ns/press-button)"
([sym]
`(i/uninstrument-fn ~sym))
([sym opts]
`(i/uninstrument-fn ~sym ~opts)))
(defmacro instrument-ns
"Instruments all functions in the namespace.
Call with fully qualified quoted namespace:
(instrument-ns 'cyrik.omni-trace.testing-ns)"
([sym]
`(i/instrument-ns ~sym))
([sym opts]
`(i/instrument-ns ~sym ~opts)))
(defmacro uninstrument-ns
"Removes instrumentation.
Call with fully qualified quoted namespace:
(uninstrument-ns 'cyrik.omni-trace.testing-ns)"
([sym]
`(i/uninstrument-ns ~sym))
([sym opts]
`(i/uninstrument-ns ~sym ~opts)))
(defmacro trace
"Instruments all functions in passed namespaces or symbols.
syms can be a fully qualified symbol, a string or a var pointing to a namespace
or a function. A vector of syms can also be passed.
(instrument ['cyrik.omni-trace.testing-ns])"
([syms]
`(i/instrument ~syms))
([syms opts]
`(i/instrument ~syms ~opts)))
(defmacro untrace
"Instruments all functions in passed namespaces or symbols.
syms can be a fully qualified symbol, a string or a var pointing to a namespace
or a function. A vector of syms can also be passed.
(uninstrument 'cyrik.omni-trace.testing-ns)"
([]
`(i/uninstrument))
([syms]
`(i/uninstrument ~syms))
([syms opts]
`(i/uninstrument ~syms ~opts)))
(defn reset-workspace!
([]
(i/reset-workspace! i/workspace))
([workspace]
(i/reset-workspace! workspace)))
(defn flamegraph
([]
(flamegraph i/workspace))
([workspace]
(flame/flamegraph (flame/flamedata @workspace))))
(defn rooted-flamegraph
([root]
(rooted-flamegraph root i/workspace))
([root workspace]
(flame/flamegraph (flame/flamedata @workspace root))))
(defn last-call
([call]
(tree/last-call call (:log @i/workspace))))
#?(:clj
(defn run-traced [s & args]
(apply #'deep/run-traced {:cyrik.omni-trace/workspace i/workspace} (into [s] args))))
(macros/deftime
#?(:clj
(defmacro run
"Runs the form in traced mode. Does not work if the form starts with a macro."
[form]
(macros/case :clj `(deep/run-traced {:cyrik.omni-trace/workspace i/workspace} (~util/->sym ~(first form)) ~@(rest form))
:cljs `(do
~(let [fun (ana-api/resolve &env (first form))
args (rest form)
n (:ns fun)
f (-> fun
:name
name
symbol)
dep-list (deep/transitive-deps (deep/dependencies
(deep/analysis ["dev" "src"]) :cljs)
n f)
instrumenters (mapv (fn [sym] `#(cyrik.omni-trace.instrument.cljs/cljs-instrument-fn '~sym {:cyrik.omni-trace/workspace cyrik.omni-trace.instrument/workspace} cyrik.omni-trace.instrument/instrumented)) sym-list)
deinstrumenters (mapv (fn [sym] `#(cyrik.omni-trace.instrument.cljs/cljs-instrument-fn '~sym {:cyrik.omni-trace/workspace cyrik.omni-trace.instrument/workspace} cyrik.omni-trace.instrument/uninstrumented)) sym-list)]
`(let [_# (doseq [f# ~instrumenters]
(f#))
result# (apply ~(symbol (name n) (name f)) (list ~@args))
_# (doseq [g# ~deinstrumenters]
(g#))]
result#)))))))
(comment
(require '[portal.api :as p])
(def portal (p/open))
(add-tap #'p/submit)
(filter #(and (= (:lang %) :cljs) (= (:from %) 'cyrik.omni-trace.testing-ns)) (:var-usages (deep/analysis ["dev" "src"])))
(require '[cyrik.cljs-macroexpand :as macro])
(clojure.walk/macroexpand-all '(run (+ 1 2)))
(macro/cljs-macroexpand-all '(run `(+ 1 2)))
(macroexpand '(run `(+ 1 2)))
(defn thing [a]
(inc a))
(defn thing2 [a b]
(+ a b))
(run `(thing (inc (inc 1))))
(macroexpand '(run `(thing (inc (inc 1)))))
(macro/cljs-macroexpand-all '(run `(thing (inc (inc 1)))))
(cyrik.omni-trace/run (thing2 1 2))
(macroexpand '(cyrik.omni-trace/run (thing2 1 2)))
(macro/cljs-macroexpand-all '(cyrik.omni-trace/run (thing2 1 2)))
) |
662c31ee69413ce4ac3588b3dde9871c2239903527b816f8356c95f424e052f5 | KonnexionsGmbH/oranif | cover_tests.erl | -module(cover_tests).
-include_lib("eunit/include/eunit.hrl").
%-------------------------------------------------------------------------------
MACROs
%-------------------------------------------------------------------------------
-define(DPI_MAJOR_VERSION, 3).
-define(DPI_MINOR_VERSION, 0).
-define(EXEC_STMT(_Conn, _Sql),
(fun() ->
__Stmt = dpiCall(
TestCtx, conn_prepareStmt, [_Conn, false, _Sql, <<>>]
),
__StmtExecResult = (catch dpiCall(TestCtx, stmt_execute, [__Stmt, []])),
catch dpiCall(TestCtx, stmt_close, [__Stmt, <<>>]),
__StmtExecResult
end)()
).
-define(BAD_INT, -16#FFFFFFFFFFFFFFFF1).
-define(BAD_FLOAT, notEvenAFloatAtAll).
-define(BAD_REF, make_ref()).
-define(W(_Tests), fun(__Ctx) -> _Tests end).
-define(F(__Fn), {??__Fn, fun() -> __Fn(__Ctx) end}).
-define(ASSERT_EX(_Error, _Expern),
?assertException(error, {error, _File, _Line, _Error}, _Expern)
).
%-------------------------------------------------------------------------------
% Context APIs
%-------------------------------------------------------------------------------
contextCreate(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve uint major from arg0",
dpiCall(TestCtx, context_create, [?BAD_INT, ?DPI_MINOR_VERSION])
),
?ASSERT_EX(
"Unable to retrieve uint minor from arg1",
dpiCall(TestCtx, context_create, [?DPI_MAJOR_VERSION, ?BAD_INT])
),
% fails due to nonsense major version
?ASSERT_EX(
#{message := "DPI-1020: version 1337.0 is not supported by ODPI-C"
" library version 3.0"},
dpiCall(TestCtx, context_create, [1337, ?DPI_MINOR_VERSION])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
?assert(is_reference(Context)),
dpiCall(TestCtx, context_destroy, [Context]).
contextDestroy(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_destroy, [?BAD_REF])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
% destroy the context
?assertEqual(ok, dpiCall(TestCtx, context_destroy, [Context])),
% try to destroy it again
?ASSERT_EX(
#{message := "DPI-1002: invalid dpiContext handle"},
dpiCall(TestCtx, context_destroy, [Context])
).
contextGetClientVersion(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_getClientVersion, [?BAD_REF])
),
% fails due to a wrong handle being passed
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_getClientVersion, [BindData])
),
dpiCall(TestCtx, data_release, [BindData]),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
#{releaseNum := CRNum, versionNum := CVNum, fullVersionNum := CFNum} =
dpiCall(TestCtx, context_getClientVersion, [Context]),
?assert(is_integer(CRNum)),
?assert(is_integer(CVNum)),
?assert(is_integer(CFNum)),
dpiCall(TestCtx, context_destroy, [Context]).
%-------------------------------------------------------------------------------
% Connection APIs
%-------------------------------------------------------------------------------
connCreate(TestCtx) ->
#{tns := Tns, user := User, password := Password} = getConfig(),
CP = #{encoding => "AL32UTF8", nencoding => "AL32UTF8"},
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, conn_create, [?BAD_REF, User, Password, Tns, CP, #{}])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
?ASSERT_EX(
"Unable to retrieve string/binary userName from arg1",
dpiCall(TestCtx, conn_create, [Context, badBin, Password, Tns, CP, #{}])
),
?ASSERT_EX(
"Unable to retrieve string/binary password from arg2",
dpiCall(TestCtx, conn_create, [Context, User, badBin, Tns, CP, #{}])
),
?ASSERT_EX(
"Unable to retrieve string/binary connectString from arg3",
dpiCall(
TestCtx, conn_create, [Context, User, Password, badBin, CP, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve map commonParams from arg4",
dpiCall(
TestCtx, conn_create, [Context, User, Password, Tns, badMap, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve string",
dpiCall(
TestCtx, conn_create,
[Context, User, Password, Tns, CP#{encoding => badList}, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve string",
dpiCall(
TestCtx, conn_create,
[Context, User, Password, Tns, CP#{nencoding => badList}, #{}]
)
),
?ASSERT_EX(
#{message := "ORA-01017: invalid username/password; logon denied"},
dpiCall(TestCtx, conn_create, [Context, <<"C">>, <<"N">>, Tns, CP, #{}])
),
Conn = dpiCall(
TestCtx, conn_create, [Context, User, Password, Tns, CP, #{}]
),
?assert(is_reference(Conn)),
dpiCall(TestCtx, conn_close, [Conn, [], <<>>]),
dpiCall(TestCtx, context_destroy, [Context]).
connPrepareStmt(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(
TestCtx, conn_prepareStmt, [?BAD_REF, false, <<"miau">>, <<>>]
)
),
?ASSERT_EX(
"Unable to retrieve bool/atom scrollable from arg1",
dpiCall(
TestCtx, conn_prepareStmt, [Conn, "badAtom", <<"miau">>, <<>>]
)
),
?ASSERT_EX(
"Unable to retrieve binary/string sql from arg2",
dpiCall(TestCtx, conn_prepareStmt, [Conn, false, badBinary, <<>>])
),
?ASSERT_EX(
"Unable to retrieve binary/string tag from arg3",
dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, <<"miau">>, badBinary]
)
),
% fails due to both SQL and Tag being empty
?ASSERT_EX(
#{message := "ORA-24373: invalid length specified for statement"},
dpiCall(TestCtx, conn_prepareStmt, [Conn, false, <<>>, <<>>])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, <<"miau">>, <<"foo">>]
),
?assert(is_reference(Stmt)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
connNewVar(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(
TestCtx, conn_newVar,
[
?BAD_REF, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE',
'DPI_NATIVE_TYPE_DOUBLE', 100, 0, false, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiOracleType type",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'BAD_DPI_ORACLE_TYPE', 'DPI_NATIVE_TYPE_DOUBLE', 100, 0,
false, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'BAD_DPI_NATIVE_TYPE', 100, 0, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg3",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
?BAD_INT, 0, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg4",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, ?BAD_INT, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom sizeIsBytes from arg5",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, "badAtom", false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom isArray from arg6",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, "badAtom", null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom objType from arg7",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, "badAtom"
]
)
),
% fails due to array size being 0
?ASSERT_EX(
#{message := "DPI-1031: array size cannot be zero"},
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
0, 0, false, false, null
]
)
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, null
]
),
?assert(is_reference(Var)),
?assert(is_list(Data)),
[FirstData | _] = Data,
?assert(is_reference(FirstData)),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
connCommit(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_commit, [?BAD_REF])
),
% fails due to the reference being wrong
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_commit, [Context])
),
Result = dpiCall(TestCtx, conn_commit, [Conn]),
?assertEqual(ok, Result).
connRollback(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_rollback, [?BAD_REF])
),
% fails due to the reference being wrong
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_rollback, [Context])
),
Result = dpiCall(TestCtx, conn_rollback, [Conn]),
?assertEqual(ok, Result).
connPing(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_ping, [?BAD_REF])
),
% fails due to the reference being wrong
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_ping, [Context])
),
Result = dpiCall(TestCtx, conn_ping, [Conn]),
?assertEqual(ok, Result).
connClose(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_close, [?BAD_REF, [], <<>>])
),
?ASSERT_EX(
"Unable to retrieve atom list modes, not a list from arg1",
dpiCall(TestCtx, conn_close, [Conn, badList, <<>>])
),
?ASSERT_EX(
"Unable to retrieve mode list value from arg1",
dpiCall(TestCtx, conn_close, [Conn, ["badAtom"], <<>>])
),
?ASSERT_EX(
"Unable to retrieve DPI_MODE atom from arg1",
dpiCall(TestCtx, conn_close, [Conn, [wrongAtom], <<>>])
),
?ASSERT_EX(
"Unable to retrieve binary/string tag from arg2",
dpiCall(TestCtx, conn_close, [Conn, [], badBinary])
),
% fails due to the reference being wrong
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_close, [Context, [], <<>>])
),
#{tns := Tns, user := User, password := Password} = getConfig(),
Conn1 = dpiCall(
TestCtx, conn_create,
[
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
),
the other two do n't work without a session pool
Result = dpiCall(
TestCtx, conn_close, [Conn1, ['DPI_MODE_CONN_CLOSE_DEFAULT'], <<>>]
),
?assertEqual(ok, Result).
connGetServerVersion(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_getServerVersion, [?BAD_REF])
),
#{
releaseNum := ReleaseNum, versionNum := VersionNum,
fullVersionNum := FullVersionNum, portReleaseNum := PortReleaseNum,
portUpdateNum := PortUpdateNum, releaseString := ReleaseString
} = dpiCall(TestCtx, conn_getServerVersion, [Conn]),
?assert(is_integer(ReleaseNum)),
?assert(is_integer(VersionNum)),
?assert(is_integer(FullVersionNum)),
?assert(is_integer(PortReleaseNum)),
?assert(is_integer(PortUpdateNum)),
?assert(is_list(ReleaseString)).
connSetClientIdentifier(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_setClientIdentifier, [?BAD_REF, <<"myCoolConn">>])
),
?ASSERT_EX(
"Unable to retrieve string/binary value from arg1",
dpiCall(TestCtx, conn_setClientIdentifier, [Conn, badBinary])
),
?assertEqual(ok,
dpiCall(TestCtx, conn_setClientIdentifier, [Conn, <<"myCoolConn">>])
).
%-------------------------------------------------------------------------------
% Statement APIs
%-------------------------------------------------------------------------------
stmtExecuteMany_varGetReturnedData(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_executeMany, [?BAD_REF, [], 0])
),
StmtDrop = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"drop table oranif_test">>, <<>>]
),
catch dpiCall(TestCtx, stmt_execute, [StmtDrop, []]),
catch dpiCall(TestCtx, stmt_close, [StmtDrop, <<>>]),
StmtCreate = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"create table oranif_test (col1 varchar2(100))">>, <<>>]
),
0 = dpiCall(TestCtx, stmt_execute, [StmtCreate, []]),
ok = dpiCall(TestCtx, stmt_close, [StmtCreate, <<>>]),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[
Conn, false,
<<
"insert into oranif_test values(:col1)"
" returning rowid into :rid"
>>,
<<>>
]
),
?ASSERT_EX(
"Unable to retrieve list of atoms from arg1",
dpiCall(TestCtx, stmt_executeMany, [Stmt, badList, 0])
),
?ASSERT_EX(
"Unable to retrieve uint32 numIters from arg2",
dpiCall(TestCtx, stmt_executeMany, [Stmt, [], ?BAD_INT])
),
?ASSERT_EX(
"mode must be a list of atoms",
dpiCall(TestCtx, stmt_executeMany, [Stmt, ["badAtom"], 0])
),
#{var := Var} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 10,
10, true, false, null
]
),
#{var := VarRowId} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_ROWID', 'DPI_NATIVE_TYPE_ROWID',
10, 0, false, false, null
]
),
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"col1">>, Var]),
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"rid">>, VarRowId]),
Data = lists:seq($0, $z),
DataLen = length(Data),
Indices = lists:seq(0, 9),
rand:seed(exsplus, {0, 0, 0}),
[dpiCall(
TestCtx, var_setFromBytes,
[
Var, Idx,
<< <<(lists:nth(rand:uniform(DataLen), Data))>>
|| _ <- lists:seq(1, 10) >>
]
) || Idx <- Indices],
?assertEqual(
ok,
dpiCall(
TestCtx, stmt_executeMany,
[Stmt, ['DPI_MODE_EXEC_COMMIT_ON_SUCCESS'], 10]
)
),
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_getReturnedData, [?BAD_REF, 0])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, var_getReturnedData, [VarRowId, ?BAD_INT])
),
[begin
Result = dpiCall(TestCtx, var_getReturnedData, [VarRowId, Idx]),
?assertMatch(#{numElements := 1, data := [_]}, Result),
[D] = maps:get(data, Result),
?assert(byte_size(dpiCall(TestCtx, data_get, [D])) > 0)
end || Idx <- Indices],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, var_release, [VarRowId]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtExecute(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_execute, [?BAD_REF, []])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve list of atoms from arg1",
dpiCall(TestCtx, stmt_execute, [Stmt, badList])
),
?ASSERT_EX(
"mode must be a list of atoms",
dpiCall(TestCtx, stmt_execute, [Stmt, ["badAtom"]])
),
?assertEqual(
1,
dpiCall(
TestCtx, stmt_execute, [Stmt, ['DPI_MODE_EXEC_DEFAULT']]
)
),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
% fails due to the SQL being invalid
Stmt1 = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"all your base are belong to us">>, <<>>]
),
?ASSERT_EX(
#{message := "ORA-00900: invalid SQL statement"},
dpiCall(TestCtx, stmt_execute, [Stmt1, []])
),
dpiCall(TestCtx, stmt_close, [Stmt1, <<>>]).
stmtFetch(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetch, [?BAD_REF])
),
% fails due to the reference being of the wrong type
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetch, [Conn])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
#{found := Found, bufferRowIndex := BufferRowIndex} =
dpiCall(TestCtx, stmt_fetch, [Stmt]),
?assert(is_atom(Found)),
?assert(is_integer(BufferRowIndex)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtFetchRows(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetchRows, [?BAD_REF, 1])
),
% fails due to the reference being of the wrong type
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetchRows, [Conn, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint maxRows from arg1",
dpiCall(TestCtx, stmt_fetchRows, [Stmt, ?BAD_INT])
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
#{
numRowsFetched := NumRowsFetched,
bufferRowIndex := BufferRowIndex,
moreRows := MoreRows
} = dpiCall(TestCtx, stmt_fetchRows, [Stmt, 1]),
?assert(is_atom(MoreRows)),
?assert(is_integer(BufferRowIndex)),
?assert(is_integer(NumRowsFetched)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtSetFetchArraySize(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_setFetchArraySize, [?BAD_REF, 1])
),
% fails due to the reference being of the wrong type
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_setFetchArraySize, [Conn, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint arraySize from arg1",
dpiCall(TestCtx, stmt_setFetchArraySize, [Stmt, ?BAD_INT])
),
Ok = dpiCall(TestCtx, stmt_setFetchArraySize, [Stmt, 1]),
?assert(is_atom(Ok)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetQueryValue(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getQueryValue, [?BAD_REF, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_getQueryValue, [Stmt, ?BAD_INT])
),
% fails due to the fetch not being done
?ASSERT_EX(
#{message := "DPI-1029: no row currently fetched"},
dpiCall(TestCtx, stmt_getQueryValue, [Stmt, 1])
),
dpiCall(TestCtx, stmt_fetch, [Stmt]),
#{nativeTypeNum := Type, data := Result} = dpiCall(
TestCtx, stmt_getQueryValue, [Stmt, 1]
),
?assert(is_atom(Type)),
?assert(is_reference(Result)),
dpiCall(TestCtx, data_release, [Result]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetQueryInfo(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getQueryInfo, [?BAD_REF, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_getQueryInfo, [Stmt, ?BAD_INT])
),
#{
name := Name, nullOk := NullOk, typeInfo := #{
clientSizeInBytes := ClientSizeInBytes,
dbSizeInBytes := DbSizeInBytes,
defaultNativeTypeNum := DefaultNativeTypeNum,
fsPrecision := FsPrecision,
objectType := ObjectType, ociTypeCode := OciTypeCode,
oracleTypeNum := OracleTypeNum , precision := Precision,
scale := Scale, sizeInChars := SizeInChars
}
} = dpiCall(TestCtx, stmt_getQueryInfo, [Stmt, 1]),
?assert(is_list(Name)),
?assert(is_atom(NullOk)),
?assert(is_integer(ClientSizeInBytes)),
?assert(is_integer(DbSizeInBytes)),
?assert(is_atom(DefaultNativeTypeNum)),
?assert(is_integer(FsPrecision)),
?assert(is_atom(ObjectType)),
?assert(is_integer(OciTypeCode)),
?assert(is_atom(OracleTypeNum)),
?assert(is_integer(Precision)),
?assert(is_integer(Scale)),
?assert(is_integer(SizeInChars)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetInfo(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getInfo, [?BAD_REF])
),
% fails due to the ref being wrong
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getInfo, [Conn])
),
lists:foreach(
fun({Match, StmtStr}) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, StmtStr, <<>>]
),
#{
isDDL := IsDDL, isDML := IsDML,
isPLSQL := IsPLSQL, isQuery := IsQuery,
isReturning := IsReturning, statementType := StatementType
} = dpiCall(TestCtx, stmt_getInfo, [Stmt]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
?assert(is_boolean(IsDDL)),
?assert(is_boolean(IsDML)),
?assert(is_boolean(IsPLSQL)),
?assert(is_boolean(IsQuery)),
?assert(is_boolean(IsReturning)),
?assertEqual(Match, StatementType)
end,
[
{'DPI_STMT_TYPE_UNKNOWN', <<"another one bites the dust">>},
{'DPI_STMT_TYPE_SELECT', <<"select 2 from dual">>},
{'DPI_STMT_TYPE_UPDATE', <<"update a set b = 5 where c = 3">>},
{'DPI_STMT_TYPE_DELETE', <<"delete from a where b = 5">>},
{'DPI_STMT_TYPE_INSERT', <<"insert into a (b) values (5)">>},
{'DPI_STMT_TYPE_CREATE', <<"create table a (b int)">>},
{'DPI_STMT_TYPE_DROP', <<"drop table students">>},
{'DPI_STMT_TYPE_ALTER', <<"alter table a add b int">>},
{'DPI_STMT_TYPE_BEGIN', <<"begin null end">>},
{'DPI_STMT_TYPE_DECLARE', <<"declare mambo number(5)">>},
{'DPI_STMT_TYPE_CALL', <<"call a.b(c)">>},
{'DPI_STMT_TYPE_MERGE', <<"MERGE INTO a USING b ON (1 = 1)">>},
{'DPI_STMT_TYPE_EXPLAIN_PLAN', <<"EXPLAIN">>},
{'DPI_STMT_TYPE_COMMIT', <<"commit">>},
{'DPI_STMT_TYPE_ROLLBACK', <<"rollback">>}
]
).
stmtGetNumQueryColumns(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getNumQueryColumns, [?BAD_REF])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
Count = dpiCall(TestCtx, stmt_getNumQueryColumns, [Stmt]),
?assert(is_integer(Count)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
% fails due to the statement already released
?ASSERT_EX(
#{message := "DPI-1039: statement was already closed"},
dpiCall(TestCtx, stmt_getNumQueryColumns, [Stmt])
).
stmtBindValueByPos(#{session := Conn} = TestCtx) ->
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_bindValueByPos,
[?BAD_REF, 1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, ?BAD_INT, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(TestCtx, stmt_bindValueByPos, [Stmt, 1, "badAtom", BindData])
),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, 1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, 1, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
% fails due to the position being invalid
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, -1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
dpiCall(TestCtx, data_release, [BindData]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindValueByName(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_bindValueByName,
[?BAD_REF, <<"A">>, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"Unable to retrieve string/list name from arg1",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, ?BAD_INT, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, stmt_bindValueByName, [Stmt, <<"A">>, "badAtom", BindData]
)
),
% fails due to bad data handle passing
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
dpiCall(TestCtx, data_release, [BindData]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindByPos(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 100,
0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_bindByPos, [?BAD_REF, 1, Var])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_bindByPos, [Stmt, ?BAD_INT, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg3",
dpiCall(TestCtx, stmt_bindByPos, [Stmt, 1, ?BAD_REF])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_bindByPos, [Stmt, 1, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindByName(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 100,
0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_bindByName, [?BAD_REF, <<"A">>, Var])
),
?ASSERT_EX(
"Unable to retrieve string/list name from arg1",
dpiCall(TestCtx, stmt_bindByName, [Stmt, badBinary, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg3",
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"A">>, ?BAD_REF])
),
% fails due to the position being invalid
?ASSERT_EX(
#{message := "ORA-01036: illegal variable name/number"},
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"B">>, Var])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"A">>, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtDefine(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_define, [?BAD_REF, 1, Var])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_define, [Stmt, ?BAD_INT, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg2",
dpiCall(TestCtx, stmt_define, [Stmt, 1, ?BAD_REF])
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
% fails due to the pos being invalid
?ASSERT_EX(
#{message := "DPI-1028: query position 12345 is invalid"},
dpiCall(TestCtx, stmt_define, [Stmt, 12345, Var])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_define, [Stmt, 1, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtDefineValue(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_defineValue,
[
?BAD_REF, 1, 'DPI_ORACLE_TYPE_NATIVE_INT',
'DPI_NATIVE_TYPE_INT64', 0, false, null
]
)
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, ?BAD_INT, 'DPI_ORACLE_TYPE_NATIVE_INT',
'DPI_NATIVE_TYPE_INT64', 0, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiOracleType type",
dpiCall(
TestCtx, stmt_defineValue,
[Stmt, 1, badAtom, 'DPI_NATIVE_TYPE_INT64', 0, false, null]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, stmt_defineValue,
[Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', badAtom, 0, false, null]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg4",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
?BAD_INT, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve bool/atom sizeIsBytes from arg5",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
0, "badAtom", null
]
)
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
0, false, null
]
)
),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtClose(#{session := Conn} = TestCtx) ->
% fails due to wrong reference
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_close, [?BAD_REF, <<>>])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve string tag from arg1",
dpiCall(TestCtx, stmt_close, [Stmt, badBinary])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_close, [Stmt, <<>>])).
%-------------------------------------------------------------------------------
% Variable APIs
%-------------------------------------------------------------------------------
varSetNumElementsInArray(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_setNumElementsInArray, [?BAD_REF, 100])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
?ASSERT_EX(
"Unable to retrieve uint numElements from arg1",
dpiCall(TestCtx, var_setNumElementsInArray, [Var, ?BAD_INT])
),
?assertEqual(ok, dpiCall(TestCtx, var_setNumElementsInArray, [Var, 100])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
varSetFromBytes(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_setFromBytes, [?BAD_REF, 0, <<"abc">>])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, var_setFromBytes, [Var, ?BAD_INT, <<"abc">>])
),
?ASSERT_EX(
"Unable to retrieve binary/string value from arg2",
dpiCall(TestCtx, var_setFromBytes, [Var, 0, badBinary])
),
?ASSERT_EX(
#{message :=
"DPI-1009: zero-based position 1000 is not valid with max array"
" size of 100"
},
dpiCall(TestCtx, var_setFromBytes, [Var, 1000, <<"abc">>])
),
?assertEqual(ok, dpiCall(TestCtx, var_setFromBytes, [Var, 0, <<"abc">>])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
varRelease(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_release, [?BAD_REF])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
?assertEqual(ok, dpiCall(TestCtx, var_release, [Var])).
%-------------------------------------------------------------------------------
% Data APIs
%-------------------------------------------------------------------------------
dataSetTimestamp(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(
TestCtx, data_setTimestamp, [?BAD_REF, 1, 2, 3, 4, 5, 6, 7, 8, 9]
)
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int year from arg1",
dpiCall(
TestCtx, data_setTimestamp, [Data, ?BAD_INT, 2, 3, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int month from arg2",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, ?BAD_INT, 3, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int day from arg3",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, ?BAD_INT, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int hour from arg4",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, ?BAD_INT, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int minute from arg5",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, ?BAD_INT, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int second from arg6",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, ?BAD_INT, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int fsecond from arg7",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, ?BAD_INT, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int tzHourOffset from arg8",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, ?BAD_INT, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int tzMinuteOffset from arg9",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, 8, ?BAD_INT]
)
),
?assertEqual(
ok,
dpiCall(TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, 8, 9])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_TIMESTAMP_TZ', 'DPI_NATIVE_TYPE_TIMESTAMP',
1, 1, true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, data_setTimestamp, [Data1, 1, 2, 3, 4, 5, 6, 7, 8, 9])
),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetIntervalDS(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIntervalDS, [?BAD_REF, 1, 2, 3, 4, 5])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int days from arg1",
dpiCall(TestCtx, data_setIntervalDS, [Data, ?BAD_INT, 2, 3, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int hours from arg2",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, ?BAD_INT, 3, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int minutes from arg3",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, ?BAD_INT, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int seconds from arg4",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, ?BAD_INT, 5])
),
?ASSERT_EX(
"Unable to retrieve int fseconds from arg5",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, 4, ?BAD_INT])
),
?assertEqual(
ok,
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, 4, 5])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_DS', 'DPI_NATIVE_TYPE_INTERVAL_DS',
1, 1, true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, data_setIntervalDS, [Data1, 1, 2, 3, 4, 5])
),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetIntervalYM(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIntervalYM, [?BAD_REF, 1, 2])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int years from arg1",
dpiCall(TestCtx, data_setIntervalYM, [Data, ?BAD_INT, 2])
),
?ASSERT_EX(
"Unable to retrieve int months from arg2",
dpiCall(TestCtx, data_setIntervalYM, [Data, 1, ?BAD_INT])
),
?assertEqual(ok,
dpiCall(TestCtx, data_setIntervalYM, [Data, 1, 2])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setIntervalYM, [Data1, 1, 2])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetInt64(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setInt64, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int amount from arg1",
dpiCall(TestCtx, data_setInt64, [Data, ?BAD_INT])
),
?assertEqual(ok, dpiCall(TestCtx, data_setInt64, [Data, 1])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setInt64, [Data1, 1])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetDouble(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setDouble, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve double amount from arg1",
dpiCall(TestCtx, data_setDouble, [Data, ?BAD_FLOAT])
),
?assertEqual(ok, dpiCall(TestCtx, data_setDouble, [Data, 1.0])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setDouble, [Data1, 1.0])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetBytes(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setBytes, [?BAD_REF, <<"my string">>])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve binary data from arg1",
dpiCall(TestCtx, data_setBytes, [Data, badBinary])
),
?assertEqual(ok, dpiCall(TestCtx, data_setBytes, [Data, <<"my string">>])),
dpiCall(TestCtx, data_release, [Data]).
dataSetIsNull(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIsNull, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve bool/atom isNull from arg1",
dpiCall(TestCtx, data_setIsNull, [Data, "not an atom"])
),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data, true])),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data, false])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data1, true])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataGet(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_get, [?BAD_REF])
),
Types = [
{null, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM'},
{int, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64'},
{int, 'DPI_ORACLE_TYPE_NATIVE_UINT', 'DPI_NATIVE_TYPE_UINT64'},
{float, 'DPI_ORACLE_TYPE_NATIVE_FLOAT', 'DPI_NATIVE_TYPE_FLOAT'},
{float, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE'},
{double, 'DPI_ORACLE_TYPE_NATIVE_FLOAT', 'DPI_NATIVE_TYPE_FLOAT'},
{double, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE'},
{ts, 'DPI_ORACLE_TYPE_TIMESTAMP_TZ', 'DPI_NATIVE_TYPE_TIMESTAMP'},
{intvlds, 'DPI_ORACLE_TYPE_INTERVAL_DS', 'DPI_NATIVE_TYPE_INTERVAL_DS'},
{intvlym, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM'},
{unsupported, 'DPI_ORACLE_TYPE_CLOB', 'DPI_NATIVE_TYPE_LOB'}
],
lists:foreach(
fun({Test, OraType, NativeType}) ->
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar,
[Conn, OraType, NativeType, 1, 0, false, false, null]
),
if Test == null -> dpiCall(TestCtx, data_setIsNull, [Data, true]);
true -> dpiCall(TestCtx, data_setIsNull, [Data, false])
end,
case Test of
null -> ?assertEqual(null, dpiCall(TestCtx, data_get, [Data]));
int ->
?assert(
is_integer(dpiCall(TestCtx, data_getInt64, [Data]))
),
?assert(is_integer(dpiCall(TestCtx, data_get, [Data])));
float -> ?assert(is_float(dpiCall(TestCtx, data_get, [Data])));
double ->
?assert(
is_float(dpiCall(TestCtx, data_getDouble, [Data]))
);
ts ->
#{
year := Year, month := Month, day := Day, hour := Hour,
minute := Minute, second := Second, fsecond := Fsecond,
tzHourOffset := TzHourOffset,
tzMinuteOffset := TzMinuteOffset
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Year)),
?assert(is_integer(Month)),
?assert(is_integer(Day)),
?assert(is_integer(Hour)),
?assert(is_integer(Minute)),
?assert(is_integer(Second)),
?assert(is_integer(Fsecond)),
?assert(is_integer(TzHourOffset)),
?assert(is_integer(TzMinuteOffset));
intvlds ->
#{
days := Days, hours := Hours, minutes := Minutes,
seconds := Seconds, fseconds := Fseconds
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Days)),
?assert(is_integer(Hours)),
?assert(is_integer(Minutes)),
?assert(is_integer(Seconds)),
?assert(is_integer(Fseconds));
intvlym ->
#{
years := Years,
months := Months
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Years)),
?assert(is_integer(Months));
unsupported ->
?ASSERT_EX(
"Unsupported nativeTypeNum",
dpiCall(TestCtx, data_get, [Data])
)
end,
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var])
end,
Types
).
dataGetBinary(#{session := Conn} = TestCtx) ->
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_NCHAR', 'DPI_NATIVE_TYPE_BYTES', 1, 100,
true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, var_setFromBytes, [Var, 0, <<"my string">>])
),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_binary(dpiCall(TestCtx, data_get, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var]).
dataGetRowid(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select rowid from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
dpiCall(TestCtx, stmt_fetch, [Stmt]),
#{data := Data} = dpiCall(TestCtx, stmt_getQueryValue, [Stmt, 1]),
?assert(is_binary(dpiCall(TestCtx, data_get, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
dataGetStmt(#{session := Conn} = TestCtx) ->
SQL = <<"
DECLARE
p_cursor SYS_REFCURSOR;
BEGIN
IF :choice > 0 THEN
OPEN p_cursor FOR SELECT 1 FROM dual;
:cursor := p_cursor;
ELSE
OPEN p_cursor FOR SELECT 2 FROM dual;
:cursor := p_cursor;
END IF;
END;
">>,
#{var := VarChoice, data := [DataChoice]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 1, 0,
false, false, null
]
),
#{var := VarStmt, data := [DataStmt]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_STMT', 'DPI_NATIVE_TYPE_STMT', 1, 0,
false, false, null
]
),
Stmt = dpiCall(TestCtx, conn_prepareStmt, [Conn, false, SQL, <<>>]),
ok = dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"choice">>, VarChoice]),
ok = dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"cursor">>, VarStmt]),
first - time get
ok = dpiCall(TestCtx, data_setInt64, [DataChoice, 0]),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assert(is_reference(dpiCall(TestCtx, data_get, [DataStmt]))),
% cached re-get
ok = dpiCall(TestCtx, data_setInt64, [DataChoice, 1]),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assert(is_reference(dpiCall(TestCtx, data_get, [DataStmt]))),
dpiCall(TestCtx, data_release, [DataChoice]),
dpiCall(TestCtx, var_release, [VarChoice]),
dpiCall(TestCtx, data_release, [DataStmt]),
dpiCall(TestCtx, var_release, [VarStmt]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
dataGetInt64(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getInt64, [?BAD_REF])
),
Data = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data, true]),
?assertEqual(null, dpiCall(TestCtx, data_getInt64, [Data])),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_integer(dpiCall(TestCtx, data_getInt64, [Data]))),
dpiCall(TestCtx, data_release, [Data]).
dataGetDouble(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getDouble, [?BAD_REF])
),
Data = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data, true]),
?assertEqual(null, dpiCall(TestCtx, data_getDouble, [Data])),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_float(dpiCall(TestCtx, data_getDouble, [Data]))),
dpiCall(TestCtx, data_release, [Data]).
no non - pointer test for this one
dataGetBytes(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getBytes, [?BAD_REF])
),
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 1, 1,
true, true, null
]
),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_binary(dpiCall(TestCtx, data_getBytes, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var]),
Data1 = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data1, true]),
?assertEqual(null, dpiCall(TestCtx, data_getBytes, [Data1])),
dpiCall(TestCtx, data_release, [Data1]).
dataRelease(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_release, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_release, [Conn])
),
Data = dpiCall(TestCtx, data_ctor, []),
?assertEqual(ok, dpiCall(TestCtx, data_release, [Data])),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 1, 1,
true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_release, [Data1])),
dpiCall(TestCtx, var_release, [Var]).
resourceCounting(#{context := Context, session := Conn} = TestCtx) ->
#{tns := Tns, user := User, password := Password} = getConfig(),
Indices = lists:seq(1, 5),
#{
context := ICtxs,
variable := IVars,
connection := IConns,
data := IDatas,
statement := IStmts,
datapointer := IDataPtrs
} = InitialRC = dpiCall(TestCtx, resource_count, []),
Resources = [{
dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
dpiCall(
TestCtx, conn_create, [
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
),
dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select * from dual">>, <<>>]
),
dpiCall(
TestCtx, conn_newVar,
[Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
1, 0, false, false, null]
),
dpiCall(TestCtx, data_ctor, [])
} || _ <- Indices],
#{
context := Ctxs,
variable := Vars,
connection := Conns,
data := Datas,
statement := Stmts,
datapointer := DataPtrs
} = dpiCall(TestCtx, resource_count, []),
?assertEqual(5, Ctxs - ICtxs),
?assertEqual(5, Vars - IVars),
?assertEqual(5, Conns - IConns),
?assertEqual(5, Stmts - IStmts),
?assertEqual(5, Datas - IDatas),
?assertEqual(5, DataPtrs - IDataPtrs),
lists:foreach(
fun({Ctx, LConn, Stmt, #{var := Var}, Data}) ->
ok = dpiCall(TestCtx, var_release, [Var]),
ok = dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
ok = dpiCall(TestCtx, conn_close, [LConn, [], <<>>]),
ok = dpiCall(TestCtx, context_destroy, [Ctx]),
ok = dpiCall(TestCtx, data_release, [Data])
end,
Resources
),
?assertEqual(InitialRC, dpiCall(TestCtx, resource_count, [])).
%-------------------------------------------------------------------------------
% eunit infrastructure callbacks
%-------------------------------------------------------------------------------
-define(SLAVE, oranif_slave).
setup(#{safe := false}) ->
ok = dpi:load_unsafe(),
#{safe => false};
setup(#{safe := true}) ->
SlaveNode = dpi:load(?SLAVE),
pong = net_adm:ping(SlaveNode),
#{safe => true, node => SlaveNode}.
setup_context(TestCtx) ->
SlaveCtx = setup(TestCtx),
maps:fold(
fun(K, V, _) ->
if V > 0 -> ?debugFmt("~p ~p = ~p", [?FUNCTION_NAME, K, V]);
true -> ok
end
end,
noacc,
dpiCall(SlaveCtx, resource_count, [])
),
SlaveCtx#{
context => dpiCall(
SlaveCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
)
}.
setup_connecion(TestCtx) ->
ContextCtx = #{context := Context} = setup_context(TestCtx),
#{tns := Tns, user := User, password := Password} = getConfig(),
maps:fold(
fun
(_K, 0, _) -> ok;
(context, 1, _) -> ok;
(K, V, _) -> ?assertEqual({K, 0}, {K, V})
end,
noacc,
dpiCall(ContextCtx, resource_count, [])
),
ContextCtx#{
session => dpiCall(
ContextCtx, conn_create,
[
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
)
}.
cleanup(#{session := Connnnection} = Ctx) ->
dpiCall(Ctx, conn_close, [Connnnection, [], <<>>]),
maps:fold(
fun
(_K, 0, _) -> ok;
(context, 1, _) -> ok;
(K, V, _) -> ?debugFmt("~p ~p = ~p", [?FUNCTION_NAME, K, V])
end,
noacc,
dpiCall(Ctx, resource_count, [])
),
cleanup(maps:without([session], Ctx));
cleanup(#{context := Context} = Ctx) ->
dpiCall(Ctx, context_destroy, [Context]),
maps:fold(
fun
(_K, 0, _) -> ok;
(K, V, _) -> ?assertEqual({K, 0}, {K, V})
end,
noacc,
dpiCall(Ctx, resource_count, [])
),
cleanup(maps:without([context], Ctx));
cleanup(#{safe := true, node := SlaveNode}) ->
unloaded = dpi:unload(SlaveNode);
cleanup(_) -> ok.
%-------------------------------------------------------------------------------
Internal functions
%-------------------------------------------------------------------------------
dpiCall(#{safe := true, node := Node}, F, A) ->
case dpi:safe(Node, dpi, F, A) of
{error, _, _, _} = Error -> error(Error);
Result -> Result
end;
dpiCall(#{safe := false}, F, A) -> apply(dpi, F, A).
getConfig() ->
case file:get_cwd() of
{ok, Cwd} ->
ConnectConfigFile = filename:join(
lists:reverse(
["connect.config", "test"
| lists:reverse(filename:split(Cwd))]
)
),
case file:consult(ConnectConfigFile) of
{ok, [Params]} when is_map(Params) -> Params;
{ok, Params} ->
?debugFmt("bad config (expected map) ~p", [Params]),
error(badconfig);
{error, Reason} ->
?debugFmt("~p", [Reason]),
error(Reason)
end;
{error, Reason} ->
?debugFmt("~p", [Reason]),
error(Reason)
end.
%-------------------------------------------------------------------------------
% Unit Tests
%-------------------------------------------------------------------------------
-define(NO_CONTEXT_TESTS, [
?F(contextCreate),
?F(contextDestroy),
?F(contextGetClientVersion),
?F(connCreate)
]).
-define(AFTER_CONNECTION_TESTS, [
?F(connPrepareStmt),
?F(connNewVar),
?F(connCommit),
?F(connRollback),
?F(connPing),
?F(connClose),
?F(connGetServerVersion),
?F(connSetClientIdentifier),
?F(stmtExecute),
?F(stmtExecuteMany_varGetReturnedData),
?F(stmtFetch),
?F(stmtFetchRows),
?F(stmtSetFetchArraySize),
?F(stmtGetQueryValue),
?F(stmtGetQueryInfo),
?F(stmtGetInfo),
?F(stmtGetNumQueryColumns),
?F(stmtBindValueByPos),
?F(stmtBindValueByName),
?F(stmtBindByPos),
?F(stmtBindByName),
?F(stmtDefine),
?F(stmtDefineValue),
?F(stmtClose),
?F(varSetNumElementsInArray),
?F(varSetFromBytes),
?F(varRelease),
?F(dataSetTimestamp),
?F(dataSetIntervalDS),
?F(dataSetIntervalYM),
?F(dataSetInt64),
?F(dataSetDouble),
?F(dataSetBytes),
?F(dataSetIsNull),
?F(dataGet),
?F(dataGetBinary),
?F(dataGetRowid),
?F(dataGetStmt),
?F(dataGetInt64),
?F(dataGetDouble),
?F(dataGetBytes),
?F(dataRelease),
?F(resourceCounting)
]).
unsafe_no_context_test_() ->
{
setup,
fun() -> setup(#{safe => false}) end,
fun cleanup/1,
?W(?NO_CONTEXT_TESTS)
}.
unsafe_session_test_() ->
{
setup,
fun() -> setup_connecion(#{safe => false}) end,
fun cleanup/1,
?W(?AFTER_CONNECTION_TESTS)
}.
no_context_test_() ->
{
setup,
fun() -> setup(#{safe => true}) end,
fun cleanup/1,
?W(?NO_CONTEXT_TESTS)
}.
session_test_() ->
{
setup,
fun() -> setup_connecion(#{safe => true}) end,
fun cleanup/1,
?W(?AFTER_CONNECTION_TESTS)
}.
load_test() ->
This is a place holder to trigger the upgrade and unload calbacks of the
NIF code . This does n't test anything only ensures code coverage .
?assertEqual(ok, dpi:load_unsafe()),
c:c(dpi),
% triggering upgrade callback
?assertEqual(ok, dpi:load_unsafe()),
% at this point, both old and current dpi code might be "bad"
% delete the old code, triggers unload callback
code:purge(dpi),
% make the new code old
code:delete(dpi),
% delete that old code, too. Now all the code is gone, triggering unload
% callback again
code:purge(dpi).
slave_reuse_test() ->
% single load / unload test
Node = dpi:load(?SLAVE),
?assertEqual([Node], nodes(hidden)),
?assertEqual([self()], reg_pids(Node)),
?assertEqual(unloaded, dpi:unload(Node)),
?assertEqual([], reg_pids(Node)),
% multiple load / unload test
RxTO = 1000,
- first process which creates the slave node
Self = self(),
Pid1 = spawn(fun() -> slave_client_proc(Self) end),
Pid1 ! load,
?assertEqual(ok, receive {Pid1, loaded} -> ok after RxTO -> timeout end),
?assertEqual([Node], nodes(hidden)),
- create three more processes sharing the same slave node
Pids0 = [spawn(fun() -> slave_client_proc(Self) end) || _ <- lists:seq(1, 3)],
ok = lists:foreach(fun(Pid) -> Pid ! load end, Pids0),
?assertEqual(done,
(fun
WaitLoad([]) -> done;
WaitLoad(Workers) when length(Workers) > 0 ->
receive {Pid, loaded} -> WaitLoad(Workers -- [Pid])
after RxTO -> timeout
end
end)(Pids0)
),
Pids = [P1, P2, P3, P4] = lists:usort([Pid1 | Pids0]),
?assertEqual(Pids, lists:usort(reg_pids(Node))),
slave is still running after first process calls dpi : unload/1
P1 ! {unload, Node},
?assertEqual(ok, receive {P1, unloaded} -> ok after RxTO -> timeout end),
?assertEqual(lists:usort(Pids -- [P1]), lists:usort(reg_pids(Node))),
?assertEqual([Node], nodes(hidden)),
slave is still running after second process exists without
% calling dpi:unload/1 (crash simulation)
P2 ! exit,
?assertEqual(ok, receive {P2, exited} -> ok after RxTO -> timeout end),
?assertEqual(lists:usort(Pids -- [P1, P2]), lists:usort(reg_pids(Node))),
?assertEqual([Node], nodes(hidden)),
slave is still running after third process calls dpi : unload/1
P3 ! {unload, Node},
?assertEqual(ok, receive {P3, unloaded} -> ok after RxTO -> timeout end),
?assertEqual(
lists:usort(Pids -- [P1, P2, P3]),
lists:usort(reg_pids(Node))
),
?assertEqual([Node], nodes(hidden)),
% slave is still running after last process exists without
% calling dpi:unload/1 (last process crash simulation)
P4 ! exit,
?assertEqual(ok, receive {P4, exited} -> ok after RxTO -> timeout end),
?assertEqual([], reg_pids(Node)), % global register is empty
lists:foreach( % all processes are also dead
fun(Pid) -> ?assertEqual(false, is_process_alive(Pid)) end,
Pids
),
?assertEqual([Node], nodes(hidden)),
console cleanup simulation after last process carsh
?assertEqual(unloaded, dpi:unload(Node)),
?assertEqual([], reg_pids(Node)),
?assertEqual([], nodes(hidden)).
slave_client_proc(TestPid) ->
receive
load ->
dpi:load(?SLAVE),
TestPid ! {self(), loaded},
slave_client_proc(TestPid);
{unload, Node} ->
ok = dpi:unload(Node),
TestPid ! {self(), unloaded};
exit ->
TestPid ! {self(), exited}
end.
reg_pids(Node) ->
lists:filtermap(
fun
({dpi, N, SN, _} = Name) when N == Node, SN == node() ->
case global:whereis_name(Name) of
Pid when is_pid(Pid) -> {true, Pid};
_ -> false
end;
(_) -> false
end,
global:registered_names()
).
| null | https://raw.githubusercontent.com/KonnexionsGmbH/oranif/b51a979c4b102d1b180e08bba498c9e15ec650cc/test/cover_tests.erl | erlang | -------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
Context APIs
-------------------------------------------------------------------------------
fails due to nonsense major version
destroy the context
try to destroy it again
fails due to a wrong handle being passed
-------------------------------------------------------------------------------
Connection APIs
-------------------------------------------------------------------------------
fails due to both SQL and Tag being empty
fails due to array size being 0
fails due to the reference being wrong
fails due to the reference being wrong
fails due to the reference being wrong
fails due to the reference being wrong
-------------------------------------------------------------------------------
Statement APIs
-------------------------------------------------------------------------------
fails due to the SQL being invalid
fails due to the reference being of the wrong type
fails due to the reference being of the wrong type
fails due to the reference being of the wrong type
fails due to the fetch not being done
fails due to the ref being wrong
fails due to the statement already released
fails due to the position being invalid
fails due to bad data handle passing
fails due to the position being invalid
fails due to the pos being invalid
fails due to wrong reference
-------------------------------------------------------------------------------
Variable APIs
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
Data APIs
-------------------------------------------------------------------------------
cached re-get
-------------------------------------------------------------------------------
eunit infrastructure callbacks
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
Unit Tests
-------------------------------------------------------------------------------
triggering upgrade callback
at this point, both old and current dpi code might be "bad"
delete the old code, triggers unload callback
make the new code old
delete that old code, too. Now all the code is gone, triggering unload
callback again
single load / unload test
multiple load / unload test
calling dpi:unload/1 (crash simulation)
slave is still running after last process exists without
calling dpi:unload/1 (last process crash simulation)
global register is empty
all processes are also dead | -module(cover_tests).
-include_lib("eunit/include/eunit.hrl").
MACROs
-define(DPI_MAJOR_VERSION, 3).
-define(DPI_MINOR_VERSION, 0).
-define(EXEC_STMT(_Conn, _Sql),
(fun() ->
__Stmt = dpiCall(
TestCtx, conn_prepareStmt, [_Conn, false, _Sql, <<>>]
),
__StmtExecResult = (catch dpiCall(TestCtx, stmt_execute, [__Stmt, []])),
catch dpiCall(TestCtx, stmt_close, [__Stmt, <<>>]),
__StmtExecResult
end)()
).
-define(BAD_INT, -16#FFFFFFFFFFFFFFFF1).
-define(BAD_FLOAT, notEvenAFloatAtAll).
-define(BAD_REF, make_ref()).
-define(W(_Tests), fun(__Ctx) -> _Tests end).
-define(F(__Fn), {??__Fn, fun() -> __Fn(__Ctx) end}).
-define(ASSERT_EX(_Error, _Expern),
?assertException(error, {error, _File, _Line, _Error}, _Expern)
).
contextCreate(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve uint major from arg0",
dpiCall(TestCtx, context_create, [?BAD_INT, ?DPI_MINOR_VERSION])
),
?ASSERT_EX(
"Unable to retrieve uint minor from arg1",
dpiCall(TestCtx, context_create, [?DPI_MAJOR_VERSION, ?BAD_INT])
),
?ASSERT_EX(
#{message := "DPI-1020: version 1337.0 is not supported by ODPI-C"
" library version 3.0"},
dpiCall(TestCtx, context_create, [1337, ?DPI_MINOR_VERSION])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
?assert(is_reference(Context)),
dpiCall(TestCtx, context_destroy, [Context]).
contextDestroy(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_destroy, [?BAD_REF])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
?assertEqual(ok, dpiCall(TestCtx, context_destroy, [Context])),
?ASSERT_EX(
#{message := "DPI-1002: invalid dpiContext handle"},
dpiCall(TestCtx, context_destroy, [Context])
).
contextGetClientVersion(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_getClientVersion, [?BAD_REF])
),
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, context_getClientVersion, [BindData])
),
dpiCall(TestCtx, data_release, [BindData]),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
#{releaseNum := CRNum, versionNum := CVNum, fullVersionNum := CFNum} =
dpiCall(TestCtx, context_getClientVersion, [Context]),
?assert(is_integer(CRNum)),
?assert(is_integer(CVNum)),
?assert(is_integer(CFNum)),
dpiCall(TestCtx, context_destroy, [Context]).
connCreate(TestCtx) ->
#{tns := Tns, user := User, password := Password} = getConfig(),
CP = #{encoding => "AL32UTF8", nencoding => "AL32UTF8"},
?ASSERT_EX(
"Unable to retrieve resource context from arg0",
dpiCall(TestCtx, conn_create, [?BAD_REF, User, Password, Tns, CP, #{}])
),
Context = dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
?ASSERT_EX(
"Unable to retrieve string/binary userName from arg1",
dpiCall(TestCtx, conn_create, [Context, badBin, Password, Tns, CP, #{}])
),
?ASSERT_EX(
"Unable to retrieve string/binary password from arg2",
dpiCall(TestCtx, conn_create, [Context, User, badBin, Tns, CP, #{}])
),
?ASSERT_EX(
"Unable to retrieve string/binary connectString from arg3",
dpiCall(
TestCtx, conn_create, [Context, User, Password, badBin, CP, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve map commonParams from arg4",
dpiCall(
TestCtx, conn_create, [Context, User, Password, Tns, badMap, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve string",
dpiCall(
TestCtx, conn_create,
[Context, User, Password, Tns, CP#{encoding => badList}, #{}]
)
),
?ASSERT_EX(
"Unable to retrieve string",
dpiCall(
TestCtx, conn_create,
[Context, User, Password, Tns, CP#{nencoding => badList}, #{}]
)
),
?ASSERT_EX(
#{message := "ORA-01017: invalid username/password; logon denied"},
dpiCall(TestCtx, conn_create, [Context, <<"C">>, <<"N">>, Tns, CP, #{}])
),
Conn = dpiCall(
TestCtx, conn_create, [Context, User, Password, Tns, CP, #{}]
),
?assert(is_reference(Conn)),
dpiCall(TestCtx, conn_close, [Conn, [], <<>>]),
dpiCall(TestCtx, context_destroy, [Context]).
connPrepareStmt(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(
TestCtx, conn_prepareStmt, [?BAD_REF, false, <<"miau">>, <<>>]
)
),
?ASSERT_EX(
"Unable to retrieve bool/atom scrollable from arg1",
dpiCall(
TestCtx, conn_prepareStmt, [Conn, "badAtom", <<"miau">>, <<>>]
)
),
?ASSERT_EX(
"Unable to retrieve binary/string sql from arg2",
dpiCall(TestCtx, conn_prepareStmt, [Conn, false, badBinary, <<>>])
),
?ASSERT_EX(
"Unable to retrieve binary/string tag from arg3",
dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, <<"miau">>, badBinary]
)
),
?ASSERT_EX(
#{message := "ORA-24373: invalid length specified for statement"},
dpiCall(TestCtx, conn_prepareStmt, [Conn, false, <<>>, <<>>])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, <<"miau">>, <<"foo">>]
),
?assert(is_reference(Stmt)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
connNewVar(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(
TestCtx, conn_newVar,
[
?BAD_REF, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE',
'DPI_NATIVE_TYPE_DOUBLE', 100, 0, false, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiOracleType type",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'BAD_DPI_ORACLE_TYPE', 'DPI_NATIVE_TYPE_DOUBLE', 100, 0,
false, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'BAD_DPI_NATIVE_TYPE', 100, 0, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg3",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
?BAD_INT, 0, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg4",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, ?BAD_INT, false, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom sizeIsBytes from arg5",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, "badAtom", false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom isArray from arg6",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, "badAtom", null
]
)
),
?ASSERT_EX(
"Unable to retrieve atom objType from arg7",
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, "badAtom"
]
)
),
?ASSERT_EX(
#{message := "DPI-1031: array size cannot be zero"},
dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
0, 0, false, false, null
]
)
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, null
]
),
?assert(is_reference(Var)),
?assert(is_list(Data)),
[FirstData | _] = Data,
?assert(is_reference(FirstData)),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
connCommit(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_commit, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_commit, [Context])
),
Result = dpiCall(TestCtx, conn_commit, [Conn]),
?assertEqual(ok, Result).
connRollback(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_rollback, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_rollback, [Context])
),
Result = dpiCall(TestCtx, conn_rollback, [Conn]),
?assertEqual(ok, Result).
connPing(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_ping, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_ping, [Context])
),
Result = dpiCall(TestCtx, conn_ping, [Conn]),
?assertEqual(ok, Result).
connClose(#{context := Context, session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_close, [?BAD_REF, [], <<>>])
),
?ASSERT_EX(
"Unable to retrieve atom list modes, not a list from arg1",
dpiCall(TestCtx, conn_close, [Conn, badList, <<>>])
),
?ASSERT_EX(
"Unable to retrieve mode list value from arg1",
dpiCall(TestCtx, conn_close, [Conn, ["badAtom"], <<>>])
),
?ASSERT_EX(
"Unable to retrieve DPI_MODE atom from arg1",
dpiCall(TestCtx, conn_close, [Conn, [wrongAtom], <<>>])
),
?ASSERT_EX(
"Unable to retrieve binary/string tag from arg2",
dpiCall(TestCtx, conn_close, [Conn, [], badBinary])
),
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_close, [Context, [], <<>>])
),
#{tns := Tns, user := User, password := Password} = getConfig(),
Conn1 = dpiCall(
TestCtx, conn_create,
[
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
),
the other two do n't work without a session pool
Result = dpiCall(
TestCtx, conn_close, [Conn1, ['DPI_MODE_CONN_CLOSE_DEFAULT'], <<>>]
),
?assertEqual(ok, Result).
connGetServerVersion(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_getServerVersion, [?BAD_REF])
),
#{
releaseNum := ReleaseNum, versionNum := VersionNum,
fullVersionNum := FullVersionNum, portReleaseNum := PortReleaseNum,
portUpdateNum := PortUpdateNum, releaseString := ReleaseString
} = dpiCall(TestCtx, conn_getServerVersion, [Conn]),
?assert(is_integer(ReleaseNum)),
?assert(is_integer(VersionNum)),
?assert(is_integer(FullVersionNum)),
?assert(is_integer(PortReleaseNum)),
?assert(is_integer(PortUpdateNum)),
?assert(is_list(ReleaseString)).
connSetClientIdentifier(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource connection from arg0",
dpiCall(TestCtx, conn_setClientIdentifier, [?BAD_REF, <<"myCoolConn">>])
),
?ASSERT_EX(
"Unable to retrieve string/binary value from arg1",
dpiCall(TestCtx, conn_setClientIdentifier, [Conn, badBinary])
),
?assertEqual(ok,
dpiCall(TestCtx, conn_setClientIdentifier, [Conn, <<"myCoolConn">>])
).
stmtExecuteMany_varGetReturnedData(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_executeMany, [?BAD_REF, [], 0])
),
StmtDrop = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"drop table oranif_test">>, <<>>]
),
catch dpiCall(TestCtx, stmt_execute, [StmtDrop, []]),
catch dpiCall(TestCtx, stmt_close, [StmtDrop, <<>>]),
StmtCreate = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"create table oranif_test (col1 varchar2(100))">>, <<>>]
),
0 = dpiCall(TestCtx, stmt_execute, [StmtCreate, []]),
ok = dpiCall(TestCtx, stmt_close, [StmtCreate, <<>>]),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[
Conn, false,
<<
"insert into oranif_test values(:col1)"
" returning rowid into :rid"
>>,
<<>>
]
),
?ASSERT_EX(
"Unable to retrieve list of atoms from arg1",
dpiCall(TestCtx, stmt_executeMany, [Stmt, badList, 0])
),
?ASSERT_EX(
"Unable to retrieve uint32 numIters from arg2",
dpiCall(TestCtx, stmt_executeMany, [Stmt, [], ?BAD_INT])
),
?ASSERT_EX(
"mode must be a list of atoms",
dpiCall(TestCtx, stmt_executeMany, [Stmt, ["badAtom"], 0])
),
#{var := Var} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 10,
10, true, false, null
]
),
#{var := VarRowId} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_ROWID', 'DPI_NATIVE_TYPE_ROWID',
10, 0, false, false, null
]
),
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"col1">>, Var]),
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"rid">>, VarRowId]),
Data = lists:seq($0, $z),
DataLen = length(Data),
Indices = lists:seq(0, 9),
rand:seed(exsplus, {0, 0, 0}),
[dpiCall(
TestCtx, var_setFromBytes,
[
Var, Idx,
<< <<(lists:nth(rand:uniform(DataLen), Data))>>
|| _ <- lists:seq(1, 10) >>
]
) || Idx <- Indices],
?assertEqual(
ok,
dpiCall(
TestCtx, stmt_executeMany,
[Stmt, ['DPI_MODE_EXEC_COMMIT_ON_SUCCESS'], 10]
)
),
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_getReturnedData, [?BAD_REF, 0])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, var_getReturnedData, [VarRowId, ?BAD_INT])
),
[begin
Result = dpiCall(TestCtx, var_getReturnedData, [VarRowId, Idx]),
?assertMatch(#{numElements := 1, data := [_]}, Result),
[D] = maps:get(data, Result),
?assert(byte_size(dpiCall(TestCtx, data_get, [D])) > 0)
end || Idx <- Indices],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, var_release, [VarRowId]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtExecute(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_execute, [?BAD_REF, []])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve list of atoms from arg1",
dpiCall(TestCtx, stmt_execute, [Stmt, badList])
),
?ASSERT_EX(
"mode must be a list of atoms",
dpiCall(TestCtx, stmt_execute, [Stmt, ["badAtom"]])
),
?assertEqual(
1,
dpiCall(
TestCtx, stmt_execute, [Stmt, ['DPI_MODE_EXEC_DEFAULT']]
)
),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
Stmt1 = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"all your base are belong to us">>, <<>>]
),
?ASSERT_EX(
#{message := "ORA-00900: invalid SQL statement"},
dpiCall(TestCtx, stmt_execute, [Stmt1, []])
),
dpiCall(TestCtx, stmt_close, [Stmt1, <<>>]).
stmtFetch(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetch, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetch, [Conn])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
#{found := Found, bufferRowIndex := BufferRowIndex} =
dpiCall(TestCtx, stmt_fetch, [Stmt]),
?assert(is_atom(Found)),
?assert(is_integer(BufferRowIndex)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtFetchRows(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetchRows, [?BAD_REF, 1])
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_fetchRows, [Conn, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint maxRows from arg1",
dpiCall(TestCtx, stmt_fetchRows, [Stmt, ?BAD_INT])
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
#{
numRowsFetched := NumRowsFetched,
bufferRowIndex := BufferRowIndex,
moreRows := MoreRows
} = dpiCall(TestCtx, stmt_fetchRows, [Stmt, 1]),
?assert(is_atom(MoreRows)),
?assert(is_integer(BufferRowIndex)),
?assert(is_integer(NumRowsFetched)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtSetFetchArraySize(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_setFetchArraySize, [?BAD_REF, 1])
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_setFetchArraySize, [Conn, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint arraySize from arg1",
dpiCall(TestCtx, stmt_setFetchArraySize, [Stmt, ?BAD_INT])
),
Ok = dpiCall(TestCtx, stmt_setFetchArraySize, [Stmt, 1]),
?assert(is_atom(Ok)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetQueryValue(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getQueryValue, [?BAD_REF, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_getQueryValue, [Stmt, ?BAD_INT])
),
?ASSERT_EX(
#{message := "DPI-1029: no row currently fetched"},
dpiCall(TestCtx, stmt_getQueryValue, [Stmt, 1])
),
dpiCall(TestCtx, stmt_fetch, [Stmt]),
#{nativeTypeNum := Type, data := Result} = dpiCall(
TestCtx, stmt_getQueryValue, [Stmt, 1]
),
?assert(is_atom(Type)),
?assert(is_reference(Result)),
dpiCall(TestCtx, data_release, [Result]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetQueryInfo(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getQueryInfo, [?BAD_REF, 1])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_getQueryInfo, [Stmt, ?BAD_INT])
),
#{
name := Name, nullOk := NullOk, typeInfo := #{
clientSizeInBytes := ClientSizeInBytes,
dbSizeInBytes := DbSizeInBytes,
defaultNativeTypeNum := DefaultNativeTypeNum,
fsPrecision := FsPrecision,
objectType := ObjectType, ociTypeCode := OciTypeCode,
oracleTypeNum := OracleTypeNum , precision := Precision,
scale := Scale, sizeInChars := SizeInChars
}
} = dpiCall(TestCtx, stmt_getQueryInfo, [Stmt, 1]),
?assert(is_list(Name)),
?assert(is_atom(NullOk)),
?assert(is_integer(ClientSizeInBytes)),
?assert(is_integer(DbSizeInBytes)),
?assert(is_atom(DefaultNativeTypeNum)),
?assert(is_integer(FsPrecision)),
?assert(is_atom(ObjectType)),
?assert(is_integer(OciTypeCode)),
?assert(is_atom(OracleTypeNum)),
?assert(is_integer(Precision)),
?assert(is_integer(Scale)),
?assert(is_integer(SizeInChars)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtGetInfo(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getInfo, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getInfo, [Conn])
),
lists:foreach(
fun({Match, StmtStr}) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt, [Conn, false, StmtStr, <<>>]
),
#{
isDDL := IsDDL, isDML := IsDML,
isPLSQL := IsPLSQL, isQuery := IsQuery,
isReturning := IsReturning, statementType := StatementType
} = dpiCall(TestCtx, stmt_getInfo, [Stmt]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
?assert(is_boolean(IsDDL)),
?assert(is_boolean(IsDML)),
?assert(is_boolean(IsPLSQL)),
?assert(is_boolean(IsQuery)),
?assert(is_boolean(IsReturning)),
?assertEqual(Match, StatementType)
end,
[
{'DPI_STMT_TYPE_UNKNOWN', <<"another one bites the dust">>},
{'DPI_STMT_TYPE_SELECT', <<"select 2 from dual">>},
{'DPI_STMT_TYPE_UPDATE', <<"update a set b = 5 where c = 3">>},
{'DPI_STMT_TYPE_DELETE', <<"delete from a where b = 5">>},
{'DPI_STMT_TYPE_INSERT', <<"insert into a (b) values (5)">>},
{'DPI_STMT_TYPE_CREATE', <<"create table a (b int)">>},
{'DPI_STMT_TYPE_DROP', <<"drop table students">>},
{'DPI_STMT_TYPE_ALTER', <<"alter table a add b int">>},
{'DPI_STMT_TYPE_BEGIN', <<"begin null end">>},
{'DPI_STMT_TYPE_DECLARE', <<"declare mambo number(5)">>},
{'DPI_STMT_TYPE_CALL', <<"call a.b(c)">>},
{'DPI_STMT_TYPE_MERGE', <<"MERGE INTO a USING b ON (1 = 1)">>},
{'DPI_STMT_TYPE_EXPLAIN_PLAN', <<"EXPLAIN">>},
{'DPI_STMT_TYPE_COMMIT', <<"commit">>},
{'DPI_STMT_TYPE_ROLLBACK', <<"rollback">>}
]
).
stmtGetNumQueryColumns(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_getNumQueryColumns, [?BAD_REF])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1337 from dual">>, <<>>]
),
Count = dpiCall(TestCtx, stmt_getNumQueryColumns, [Stmt]),
?assert(is_integer(Count)),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
?ASSERT_EX(
#{message := "DPI-1039: statement was already closed"},
dpiCall(TestCtx, stmt_getNumQueryColumns, [Stmt])
).
stmtBindValueByPos(#{session := Conn} = TestCtx) ->
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_bindValueByPos,
[?BAD_REF, 1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, ?BAD_INT, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(TestCtx, stmt_bindValueByPos, [Stmt, 1, "badAtom", BindData])
),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, 1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, 1, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_bindValueByPos,
[Stmt, -1, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
dpiCall(TestCtx, data_release, [BindData]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindValueByName(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
BindData = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_bindValueByName,
[?BAD_REF, <<"A">>, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"Unable to retrieve string/list name from arg1",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, ?BAD_INT, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, stmt_bindValueByName, [Stmt, <<"A">>, "badAtom", BindData]
)
),
?ASSERT_EX(
"Unable to retrieve resource data from arg3",
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', ?BAD_REF]
)
),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_bindValueByName,
[Stmt, <<"A">>, 'DPI_NATIVE_TYPE_INT64', BindData]
)
),
dpiCall(TestCtx, data_release, [BindData]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindByPos(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 100,
0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_bindByPos, [?BAD_REF, 1, Var])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_bindByPos, [Stmt, ?BAD_INT, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg3",
dpiCall(TestCtx, stmt_bindByPos, [Stmt, 1, ?BAD_REF])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_bindByPos, [Stmt, 1, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtBindByName(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"insert into dual values (:A)">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 100,
0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_bindByName, [?BAD_REF, <<"A">>, Var])
),
?ASSERT_EX(
"Unable to retrieve string/list name from arg1",
dpiCall(TestCtx, stmt_bindByName, [Stmt, badBinary, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg3",
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"A">>, ?BAD_REF])
),
?ASSERT_EX(
#{message := "ORA-01036: illegal variable name/number"},
dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"B">>, Var])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"A">>, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtDefine(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
100, 0, false, false, null
]
),
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_define, [?BAD_REF, 1, Var])
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, stmt_define, [Stmt, ?BAD_INT, Var])
),
?ASSERT_EX(
"Unable to retrieve resource var from arg2",
dpiCall(TestCtx, stmt_define, [Stmt, 1, ?BAD_REF])
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?ASSERT_EX(
#{message := "DPI-1028: query position 12345 is invalid"},
dpiCall(TestCtx, stmt_define, [Stmt, 12345, Var])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_define, [Stmt, 1, Var])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtDefineValue(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(
TestCtx, stmt_defineValue,
[
?BAD_REF, 1, 'DPI_ORACLE_TYPE_NATIVE_INT',
'DPI_NATIVE_TYPE_INT64', 0, false, null
]
)
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, ?BAD_INT, 'DPI_ORACLE_TYPE_NATIVE_INT',
'DPI_NATIVE_TYPE_INT64', 0, false, null
]
)
),
?ASSERT_EX(
"wrong or unsupported dpiOracleType type",
dpiCall(
TestCtx, stmt_defineValue,
[Stmt, 1, badAtom, 'DPI_NATIVE_TYPE_INT64', 0, false, null]
)
),
?ASSERT_EX(
"wrong or unsupported dpiNativeType type",
dpiCall(
TestCtx, stmt_defineValue,
[Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', badAtom, 0, false, null]
)
),
?ASSERT_EX(
"Unable to retrieve uint size from arg4",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
?BAD_INT, false, null
]
)
),
?ASSERT_EX(
"Unable to retrieve bool/atom sizeIsBytes from arg5",
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
0, "badAtom", null
]
)
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assertEqual(ok,
dpiCall(
TestCtx, stmt_defineValue,
[
Stmt, 1, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64',
0, false, null
]
)
),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
stmtClose(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource statement from arg0",
dpiCall(TestCtx, stmt_close, [?BAD_REF, <<>>])
),
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select 1 from dual">>, <<>>]
),
?ASSERT_EX(
"Unable to retrieve string tag from arg1",
dpiCall(TestCtx, stmt_close, [Stmt, badBinary])
),
?assertEqual(ok, dpiCall(TestCtx, stmt_close, [Stmt, <<>>])).
varSetNumElementsInArray(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_setNumElementsInArray, [?BAD_REF, 100])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
?ASSERT_EX(
"Unable to retrieve uint numElements from arg1",
dpiCall(TestCtx, var_setNumElementsInArray, [Var, ?BAD_INT])
),
?assertEqual(ok, dpiCall(TestCtx, var_setNumElementsInArray, [Var, 100])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
varSetFromBytes(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_setFromBytes, [?BAD_REF, 0, <<"abc">>])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
?ASSERT_EX(
"Unable to retrieve uint pos from arg1",
dpiCall(TestCtx, var_setFromBytes, [Var, ?BAD_INT, <<"abc">>])
),
?ASSERT_EX(
"Unable to retrieve binary/string value from arg2",
dpiCall(TestCtx, var_setFromBytes, [Var, 0, badBinary])
),
?ASSERT_EX(
#{message :=
"DPI-1009: zero-based position 1000 is not valid with max array"
" size of 100"
},
dpiCall(TestCtx, var_setFromBytes, [Var, 1000, <<"abc">>])
),
?assertEqual(ok, dpiCall(TestCtx, var_setFromBytes, [Var, 0, <<"abc">>])),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
dpiCall(TestCtx, var_release, [Var]).
varRelease(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource var from arg0",
dpiCall(TestCtx, var_release, [?BAD_REF])
),
#{var := Var, data := Data} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 100, 100,
true, true, null
]
),
[dpiCall(TestCtx, data_release, [X]) || X <- Data],
?assertEqual(ok, dpiCall(TestCtx, var_release, [Var])).
dataSetTimestamp(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(
TestCtx, data_setTimestamp, [?BAD_REF, 1, 2, 3, 4, 5, 6, 7, 8, 9]
)
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int year from arg1",
dpiCall(
TestCtx, data_setTimestamp, [Data, ?BAD_INT, 2, 3, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int month from arg2",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, ?BAD_INT, 3, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int day from arg3",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, ?BAD_INT, 4, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int hour from arg4",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, ?BAD_INT, 5, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int minute from arg5",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, ?BAD_INT, 6, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int second from arg6",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, ?BAD_INT, 7, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int fsecond from arg7",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, ?BAD_INT, 8, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int tzHourOffset from arg8",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, ?BAD_INT, 9]
)
),
?ASSERT_EX(
"Unable to retrieve int tzMinuteOffset from arg9",
dpiCall(
TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, 8, ?BAD_INT]
)
),
?assertEqual(
ok,
dpiCall(TestCtx, data_setTimestamp, [Data, 1, 2, 3, 4, 5, 6, 7, 8, 9])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_TIMESTAMP_TZ', 'DPI_NATIVE_TYPE_TIMESTAMP',
1, 1, true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, data_setTimestamp, [Data1, 1, 2, 3, 4, 5, 6, 7, 8, 9])
),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetIntervalDS(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIntervalDS, [?BAD_REF, 1, 2, 3, 4, 5])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int days from arg1",
dpiCall(TestCtx, data_setIntervalDS, [Data, ?BAD_INT, 2, 3, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int hours from arg2",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, ?BAD_INT, 3, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int minutes from arg3",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, ?BAD_INT, 4, 5])
),
?ASSERT_EX(
"Unable to retrieve int seconds from arg4",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, ?BAD_INT, 5])
),
?ASSERT_EX(
"Unable to retrieve int fseconds from arg5",
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, 4, ?BAD_INT])
),
?assertEqual(
ok,
dpiCall(TestCtx, data_setIntervalDS, [Data, 1, 2, 3, 4, 5])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_DS', 'DPI_NATIVE_TYPE_INTERVAL_DS',
1, 1, true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, data_setIntervalDS, [Data1, 1, 2, 3, 4, 5])
),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetIntervalYM(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIntervalYM, [?BAD_REF, 1, 2])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int years from arg1",
dpiCall(TestCtx, data_setIntervalYM, [Data, ?BAD_INT, 2])
),
?ASSERT_EX(
"Unable to retrieve int months from arg2",
dpiCall(TestCtx, data_setIntervalYM, [Data, 1, ?BAD_INT])
),
?assertEqual(ok,
dpiCall(TestCtx, data_setIntervalYM, [Data, 1, 2])
),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setIntervalYM, [Data1, 1, 2])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetInt64(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setInt64, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve int amount from arg1",
dpiCall(TestCtx, data_setInt64, [Data, ?BAD_INT])
),
?assertEqual(ok, dpiCall(TestCtx, data_setInt64, [Data, 1])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setInt64, [Data1, 1])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetDouble(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setDouble, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve double amount from arg1",
dpiCall(TestCtx, data_setDouble, [Data, ?BAD_FLOAT])
),
?assertEqual(ok, dpiCall(TestCtx, data_setDouble, [Data, 1.0])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setDouble, [Data1, 1.0])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataSetBytes(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setBytes, [?BAD_REF, <<"my string">>])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve binary data from arg1",
dpiCall(TestCtx, data_setBytes, [Data, badBinary])
),
?assertEqual(ok, dpiCall(TestCtx, data_setBytes, [Data, <<"my string">>])),
dpiCall(TestCtx, data_release, [Data]).
dataSetIsNull(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_setIsNull, [?BAD_REF, 1])
),
Data = dpiCall(TestCtx, data_ctor, []),
?ASSERT_EX(
"Unable to retrieve bool/atom isNull from arg1",
dpiCall(TestCtx, data_setIsNull, [Data, "not an atom"])
),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data, true])),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data, false])),
dpiCall(TestCtx, data_release, [Data]),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM',
1, 1, true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_setIsNull, [Data1, true])),
dpiCall(TestCtx, data_release, [Data1]),
dpiCall(TestCtx, var_release, [Var]).
dataGet(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_get, [?BAD_REF])
),
Types = [
{null, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM'},
{int, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64'},
{int, 'DPI_ORACLE_TYPE_NATIVE_UINT', 'DPI_NATIVE_TYPE_UINT64'},
{float, 'DPI_ORACLE_TYPE_NATIVE_FLOAT', 'DPI_NATIVE_TYPE_FLOAT'},
{float, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE'},
{double, 'DPI_ORACLE_TYPE_NATIVE_FLOAT', 'DPI_NATIVE_TYPE_FLOAT'},
{double, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE'},
{ts, 'DPI_ORACLE_TYPE_TIMESTAMP_TZ', 'DPI_NATIVE_TYPE_TIMESTAMP'},
{intvlds, 'DPI_ORACLE_TYPE_INTERVAL_DS', 'DPI_NATIVE_TYPE_INTERVAL_DS'},
{intvlym, 'DPI_ORACLE_TYPE_INTERVAL_YM', 'DPI_NATIVE_TYPE_INTERVAL_YM'},
{unsupported, 'DPI_ORACLE_TYPE_CLOB', 'DPI_NATIVE_TYPE_LOB'}
],
lists:foreach(
fun({Test, OraType, NativeType}) ->
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar,
[Conn, OraType, NativeType, 1, 0, false, false, null]
),
if Test == null -> dpiCall(TestCtx, data_setIsNull, [Data, true]);
true -> dpiCall(TestCtx, data_setIsNull, [Data, false])
end,
case Test of
null -> ?assertEqual(null, dpiCall(TestCtx, data_get, [Data]));
int ->
?assert(
is_integer(dpiCall(TestCtx, data_getInt64, [Data]))
),
?assert(is_integer(dpiCall(TestCtx, data_get, [Data])));
float -> ?assert(is_float(dpiCall(TestCtx, data_get, [Data])));
double ->
?assert(
is_float(dpiCall(TestCtx, data_getDouble, [Data]))
);
ts ->
#{
year := Year, month := Month, day := Day, hour := Hour,
minute := Minute, second := Second, fsecond := Fsecond,
tzHourOffset := TzHourOffset,
tzMinuteOffset := TzMinuteOffset
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Year)),
?assert(is_integer(Month)),
?assert(is_integer(Day)),
?assert(is_integer(Hour)),
?assert(is_integer(Minute)),
?assert(is_integer(Second)),
?assert(is_integer(Fsecond)),
?assert(is_integer(TzHourOffset)),
?assert(is_integer(TzMinuteOffset));
intvlds ->
#{
days := Days, hours := Hours, minutes := Minutes,
seconds := Seconds, fseconds := Fseconds
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Days)),
?assert(is_integer(Hours)),
?assert(is_integer(Minutes)),
?assert(is_integer(Seconds)),
?assert(is_integer(Fseconds));
intvlym ->
#{
years := Years,
months := Months
} = dpiCall(TestCtx, data_get, [Data]),
?assert(is_integer(Years)),
?assert(is_integer(Months));
unsupported ->
?ASSERT_EX(
"Unsupported nativeTypeNum",
dpiCall(TestCtx, data_get, [Data])
)
end,
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var])
end,
Types
).
dataGetBinary(#{session := Conn} = TestCtx) ->
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar, [
Conn, 'DPI_ORACLE_TYPE_NCHAR', 'DPI_NATIVE_TYPE_BYTES', 1, 100,
true, true, null
]
),
?assertEqual(ok,
dpiCall(TestCtx, var_setFromBytes, [Var, 0, <<"my string">>])
),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_binary(dpiCall(TestCtx, data_get, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var]).
dataGetRowid(#{session := Conn} = TestCtx) ->
Stmt = dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select rowid from dual">>, <<>>]
),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
dpiCall(TestCtx, stmt_fetch, [Stmt]),
#{data := Data} = dpiCall(TestCtx, stmt_getQueryValue, [Stmt, 1]),
?assert(is_binary(dpiCall(TestCtx, data_get, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
dataGetStmt(#{session := Conn} = TestCtx) ->
SQL = <<"
DECLARE
p_cursor SYS_REFCURSOR;
BEGIN
IF :choice > 0 THEN
OPEN p_cursor FOR SELECT 1 FROM dual;
:cursor := p_cursor;
ELSE
OPEN p_cursor FOR SELECT 2 FROM dual;
:cursor := p_cursor;
END IF;
END;
">>,
#{var := VarChoice, data := [DataChoice]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 1, 0,
false, false, null
]
),
#{var := VarStmt, data := [DataStmt]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_STMT', 'DPI_NATIVE_TYPE_STMT', 1, 0,
false, false, null
]
),
Stmt = dpiCall(TestCtx, conn_prepareStmt, [Conn, false, SQL, <<>>]),
ok = dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"choice">>, VarChoice]),
ok = dpiCall(TestCtx, stmt_bindByName, [Stmt, <<"cursor">>, VarStmt]),
first - time get
ok = dpiCall(TestCtx, data_setInt64, [DataChoice, 0]),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assert(is_reference(dpiCall(TestCtx, data_get, [DataStmt]))),
ok = dpiCall(TestCtx, data_setInt64, [DataChoice, 1]),
dpiCall(TestCtx, stmt_execute, [Stmt, []]),
?assert(is_reference(dpiCall(TestCtx, data_get, [DataStmt]))),
dpiCall(TestCtx, data_release, [DataChoice]),
dpiCall(TestCtx, var_release, [VarChoice]),
dpiCall(TestCtx, data_release, [DataStmt]),
dpiCall(TestCtx, var_release, [VarStmt]),
dpiCall(TestCtx, stmt_close, [Stmt, <<>>]).
dataGetInt64(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getInt64, [?BAD_REF])
),
Data = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data, true]),
?assertEqual(null, dpiCall(TestCtx, data_getInt64, [Data])),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_integer(dpiCall(TestCtx, data_getInt64, [Data]))),
dpiCall(TestCtx, data_release, [Data]).
dataGetDouble(TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getDouble, [?BAD_REF])
),
Data = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data, true]),
?assertEqual(null, dpiCall(TestCtx, data_getDouble, [Data])),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_float(dpiCall(TestCtx, data_getDouble, [Data]))),
dpiCall(TestCtx, data_release, [Data]).
no non - pointer test for this one
dataGetBytes(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data/ptr from arg0",
dpiCall(TestCtx, data_getBytes, [?BAD_REF])
),
#{var := Var, data := [Data]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_VARCHAR', 'DPI_NATIVE_TYPE_BYTES', 1, 1,
true, true, null
]
),
dpiCall(TestCtx, data_setIsNull, [Data, false]),
?assert(is_binary(dpiCall(TestCtx, data_getBytes, [Data]))),
dpiCall(TestCtx, data_release, [Data]),
dpiCall(TestCtx, var_release, [Var]),
Data1 = dpiCall(TestCtx, data_ctor, []),
dpiCall(TestCtx, data_setIsNull, [Data1, true]),
?assertEqual(null, dpiCall(TestCtx, data_getBytes, [Data1])),
dpiCall(TestCtx, data_release, [Data1]).
dataRelease(#{session := Conn} = TestCtx) ->
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_release, [?BAD_REF])
),
?ASSERT_EX(
"Unable to retrieve resource data from arg0",
dpiCall(TestCtx, data_release, [Conn])
),
Data = dpiCall(TestCtx, data_ctor, []),
?assertEqual(ok, dpiCall(TestCtx, data_release, [Data])),
#{var := Var, data := [Data1]} = dpiCall(
TestCtx, conn_newVar,
[
Conn, 'DPI_ORACLE_TYPE_NATIVE_INT', 'DPI_NATIVE_TYPE_INT64', 1, 1,
true, true, null
]
),
?assertEqual(ok, dpiCall(TestCtx, data_release, [Data1])),
dpiCall(TestCtx, var_release, [Var]).
resourceCounting(#{context := Context, session := Conn} = TestCtx) ->
#{tns := Tns, user := User, password := Password} = getConfig(),
Indices = lists:seq(1, 5),
#{
context := ICtxs,
variable := IVars,
connection := IConns,
data := IDatas,
statement := IStmts,
datapointer := IDataPtrs
} = InitialRC = dpiCall(TestCtx, resource_count, []),
Resources = [{
dpiCall(
TestCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
),
dpiCall(
TestCtx, conn_create, [
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
),
dpiCall(
TestCtx, conn_prepareStmt,
[Conn, false, <<"select * from dual">>, <<>>]
),
dpiCall(
TestCtx, conn_newVar,
[Conn, 'DPI_ORACLE_TYPE_NATIVE_DOUBLE', 'DPI_NATIVE_TYPE_DOUBLE',
1, 0, false, false, null]
),
dpiCall(TestCtx, data_ctor, [])
} || _ <- Indices],
#{
context := Ctxs,
variable := Vars,
connection := Conns,
data := Datas,
statement := Stmts,
datapointer := DataPtrs
} = dpiCall(TestCtx, resource_count, []),
?assertEqual(5, Ctxs - ICtxs),
?assertEqual(5, Vars - IVars),
?assertEqual(5, Conns - IConns),
?assertEqual(5, Stmts - IStmts),
?assertEqual(5, Datas - IDatas),
?assertEqual(5, DataPtrs - IDataPtrs),
lists:foreach(
fun({Ctx, LConn, Stmt, #{var := Var}, Data}) ->
ok = dpiCall(TestCtx, var_release, [Var]),
ok = dpiCall(TestCtx, stmt_close, [Stmt, <<>>]),
ok = dpiCall(TestCtx, conn_close, [LConn, [], <<>>]),
ok = dpiCall(TestCtx, context_destroy, [Ctx]),
ok = dpiCall(TestCtx, data_release, [Data])
end,
Resources
),
?assertEqual(InitialRC, dpiCall(TestCtx, resource_count, [])).
-define(SLAVE, oranif_slave).
setup(#{safe := false}) ->
ok = dpi:load_unsafe(),
#{safe => false};
setup(#{safe := true}) ->
SlaveNode = dpi:load(?SLAVE),
pong = net_adm:ping(SlaveNode),
#{safe => true, node => SlaveNode}.
setup_context(TestCtx) ->
SlaveCtx = setup(TestCtx),
maps:fold(
fun(K, V, _) ->
if V > 0 -> ?debugFmt("~p ~p = ~p", [?FUNCTION_NAME, K, V]);
true -> ok
end
end,
noacc,
dpiCall(SlaveCtx, resource_count, [])
),
SlaveCtx#{
context => dpiCall(
SlaveCtx, context_create, [?DPI_MAJOR_VERSION, ?DPI_MINOR_VERSION]
)
}.
setup_connecion(TestCtx) ->
ContextCtx = #{context := Context} = setup_context(TestCtx),
#{tns := Tns, user := User, password := Password} = getConfig(),
maps:fold(
fun
(_K, 0, _) -> ok;
(context, 1, _) -> ok;
(K, V, _) -> ?assertEqual({K, 0}, {K, V})
end,
noacc,
dpiCall(ContextCtx, resource_count, [])
),
ContextCtx#{
session => dpiCall(
ContextCtx, conn_create,
[
Context, User, Password, Tns,
#{encoding => "AL32UTF8", nencoding => "AL32UTF8"}, #{}
]
)
}.
cleanup(#{session := Connnnection} = Ctx) ->
dpiCall(Ctx, conn_close, [Connnnection, [], <<>>]),
maps:fold(
fun
(_K, 0, _) -> ok;
(context, 1, _) -> ok;
(K, V, _) -> ?debugFmt("~p ~p = ~p", [?FUNCTION_NAME, K, V])
end,
noacc,
dpiCall(Ctx, resource_count, [])
),
cleanup(maps:without([session], Ctx));
cleanup(#{context := Context} = Ctx) ->
dpiCall(Ctx, context_destroy, [Context]),
maps:fold(
fun
(_K, 0, _) -> ok;
(K, V, _) -> ?assertEqual({K, 0}, {K, V})
end,
noacc,
dpiCall(Ctx, resource_count, [])
),
cleanup(maps:without([context], Ctx));
cleanup(#{safe := true, node := SlaveNode}) ->
unloaded = dpi:unload(SlaveNode);
cleanup(_) -> ok.
Internal functions
dpiCall(#{safe := true, node := Node}, F, A) ->
case dpi:safe(Node, dpi, F, A) of
{error, _, _, _} = Error -> error(Error);
Result -> Result
end;
dpiCall(#{safe := false}, F, A) -> apply(dpi, F, A).
getConfig() ->
case file:get_cwd() of
{ok, Cwd} ->
ConnectConfigFile = filename:join(
lists:reverse(
["connect.config", "test"
| lists:reverse(filename:split(Cwd))]
)
),
case file:consult(ConnectConfigFile) of
{ok, [Params]} when is_map(Params) -> Params;
{ok, Params} ->
?debugFmt("bad config (expected map) ~p", [Params]),
error(badconfig);
{error, Reason} ->
?debugFmt("~p", [Reason]),
error(Reason)
end;
{error, Reason} ->
?debugFmt("~p", [Reason]),
error(Reason)
end.
-define(NO_CONTEXT_TESTS, [
?F(contextCreate),
?F(contextDestroy),
?F(contextGetClientVersion),
?F(connCreate)
]).
-define(AFTER_CONNECTION_TESTS, [
?F(connPrepareStmt),
?F(connNewVar),
?F(connCommit),
?F(connRollback),
?F(connPing),
?F(connClose),
?F(connGetServerVersion),
?F(connSetClientIdentifier),
?F(stmtExecute),
?F(stmtExecuteMany_varGetReturnedData),
?F(stmtFetch),
?F(stmtFetchRows),
?F(stmtSetFetchArraySize),
?F(stmtGetQueryValue),
?F(stmtGetQueryInfo),
?F(stmtGetInfo),
?F(stmtGetNumQueryColumns),
?F(stmtBindValueByPos),
?F(stmtBindValueByName),
?F(stmtBindByPos),
?F(stmtBindByName),
?F(stmtDefine),
?F(stmtDefineValue),
?F(stmtClose),
?F(varSetNumElementsInArray),
?F(varSetFromBytes),
?F(varRelease),
?F(dataSetTimestamp),
?F(dataSetIntervalDS),
?F(dataSetIntervalYM),
?F(dataSetInt64),
?F(dataSetDouble),
?F(dataSetBytes),
?F(dataSetIsNull),
?F(dataGet),
?F(dataGetBinary),
?F(dataGetRowid),
?F(dataGetStmt),
?F(dataGetInt64),
?F(dataGetDouble),
?F(dataGetBytes),
?F(dataRelease),
?F(resourceCounting)
]).
unsafe_no_context_test_() ->
{
setup,
fun() -> setup(#{safe => false}) end,
fun cleanup/1,
?W(?NO_CONTEXT_TESTS)
}.
unsafe_session_test_() ->
{
setup,
fun() -> setup_connecion(#{safe => false}) end,
fun cleanup/1,
?W(?AFTER_CONNECTION_TESTS)
}.
no_context_test_() ->
{
setup,
fun() -> setup(#{safe => true}) end,
fun cleanup/1,
?W(?NO_CONTEXT_TESTS)
}.
session_test_() ->
{
setup,
fun() -> setup_connecion(#{safe => true}) end,
fun cleanup/1,
?W(?AFTER_CONNECTION_TESTS)
}.
load_test() ->
This is a place holder to trigger the upgrade and unload calbacks of the
NIF code . This does n't test anything only ensures code coverage .
?assertEqual(ok, dpi:load_unsafe()),
c:c(dpi),
?assertEqual(ok, dpi:load_unsafe()),
code:purge(dpi),
code:delete(dpi),
code:purge(dpi).
slave_reuse_test() ->
Node = dpi:load(?SLAVE),
?assertEqual([Node], nodes(hidden)),
?assertEqual([self()], reg_pids(Node)),
?assertEqual(unloaded, dpi:unload(Node)),
?assertEqual([], reg_pids(Node)),
RxTO = 1000,
- first process which creates the slave node
Self = self(),
Pid1 = spawn(fun() -> slave_client_proc(Self) end),
Pid1 ! load,
?assertEqual(ok, receive {Pid1, loaded} -> ok after RxTO -> timeout end),
?assertEqual([Node], nodes(hidden)),
- create three more processes sharing the same slave node
Pids0 = [spawn(fun() -> slave_client_proc(Self) end) || _ <- lists:seq(1, 3)],
ok = lists:foreach(fun(Pid) -> Pid ! load end, Pids0),
?assertEqual(done,
(fun
WaitLoad([]) -> done;
WaitLoad(Workers) when length(Workers) > 0 ->
receive {Pid, loaded} -> WaitLoad(Workers -- [Pid])
after RxTO -> timeout
end
end)(Pids0)
),
Pids = [P1, P2, P3, P4] = lists:usort([Pid1 | Pids0]),
?assertEqual(Pids, lists:usort(reg_pids(Node))),
slave is still running after first process calls dpi : unload/1
P1 ! {unload, Node},
?assertEqual(ok, receive {P1, unloaded} -> ok after RxTO -> timeout end),
?assertEqual(lists:usort(Pids -- [P1]), lists:usort(reg_pids(Node))),
?assertEqual([Node], nodes(hidden)),
slave is still running after second process exists without
P2 ! exit,
?assertEqual(ok, receive {P2, exited} -> ok after RxTO -> timeout end),
?assertEqual(lists:usort(Pids -- [P1, P2]), lists:usort(reg_pids(Node))),
?assertEqual([Node], nodes(hidden)),
slave is still running after third process calls dpi : unload/1
P3 ! {unload, Node},
?assertEqual(ok, receive {P3, unloaded} -> ok after RxTO -> timeout end),
?assertEqual(
lists:usort(Pids -- [P1, P2, P3]),
lists:usort(reg_pids(Node))
),
?assertEqual([Node], nodes(hidden)),
P4 ! exit,
?assertEqual(ok, receive {P4, exited} -> ok after RxTO -> timeout end),
fun(Pid) -> ?assertEqual(false, is_process_alive(Pid)) end,
Pids
),
?assertEqual([Node], nodes(hidden)),
console cleanup simulation after last process carsh
?assertEqual(unloaded, dpi:unload(Node)),
?assertEqual([], reg_pids(Node)),
?assertEqual([], nodes(hidden)).
slave_client_proc(TestPid) ->
receive
load ->
dpi:load(?SLAVE),
TestPid ! {self(), loaded},
slave_client_proc(TestPid);
{unload, Node} ->
ok = dpi:unload(Node),
TestPid ! {self(), unloaded};
exit ->
TestPid ! {self(), exited}
end.
reg_pids(Node) ->
lists:filtermap(
fun
({dpi, N, SN, _} = Name) when N == Node, SN == node() ->
case global:whereis_name(Name) of
Pid when is_pid(Pid) -> {true, Pid};
_ -> false
end;
(_) -> false
end,
global:registered_names()
).
|
9df1715af62d22644082f56204c3bd2c4f0706bb8b64ab3bf06c2cb7f915b640 | adamschoenemann/clofrp | Fixtures.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
# LANGUAGE NamedFieldPuns #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE TypeApplications #
module Fixtures where
import CloFRP.QuasiQuoter
import NeatInterpolation
import qualified CloFRP.Parser.Expr as P
-- import qualified CloFRP.Parser.Type as P
import qualified CloFRP.Parser.Decl as P
import qualified CloFRP.Parser.Prog as P
import Data.Text (Text)
expr01 :: P.Expr
expr01 = [unsafeExpr|\x -> \y -> the (Nat) (x y True)|]
decl01 :: P.Decl
decl01 = [unsafeDecl|data Tree a = Leaf | Branch a (Tree a) (Tree a).|]
prog01, prog02 :: P.Prog
prog01 = [unsafeProg|
id : a -> a.
id = \x -> x.
twice : Nat -> Tuple Nat Nat.
twice = \x -> (x, x).
data Maybe a = Nothing | Just a.
data List a = Nil | Cons a (List a).
head : List a -> Maybe a.
head = \xs -> xs.
|]
prog02 = [unsafeProg|
data N = Z | S N.
plus : N -> N -> N.
plus = \m -> \n ->
case m of
| Z -> n
| S m' -> S (plus m' n)
end.
|]
replaceMin :: Text
replaceMin =
[text|
-- applicative structure
pure : forall (k : Clock) a. a -> |>k a.
pure = \x -> \\(af : k) -> x.
app : forall (k : Clock) a b. |>k (a -> b) -> |>k a -> |>k b.
app = \lf la -> \\(af : k) ->
let f = lf [af] in
let a = la [af] in
f a.
-- functor
map : forall (k : Clock) a b. (a -> b) -> |>k a -> |>k b.
map = \f la -> app (pure f) la.
fst : forall a b. (a, b) -> a.
fst = \x -> case x of | (y, z) -> y end.
snd : forall a b. (a, b) -> b.
snd = \x -> case x of | (y, z) -> z end.
feedback : forall (k : Clock) (b : Clock -> *) u. (|>k u -> (b k, u)) -> b k.
feedback = \f -> fst (fix (\x -> f (map snd x))). -- x has type |>k (b k, u)
data NatF f = Z | S f deriving Functor.
type Nat = Fix NatF.
z : Nat.
z = fold Z.
s : Nat -> Nat.
s = \x -> fold (S x).
data TreeF a f = Leaf a | Br f f deriving Functor.
type Tree a = Fix (TreeF a).
min : Nat -> Nat -> Nat.
min = primRec {NatF} (\m n ->
case m of
| Z -> fold Z
| S (m', r) -> fold (S (r n))
end
).
leaf : forall a. a -> Tree a.
leaf = \x -> fold (Leaf x).
br : forall a. Tree a -> Tree a -> Tree a.
br = \l r -> fold (Br l r).
data Delay a (k : Clock) = Delay (|>k a).
replaceMinBody : forall (k : Clock). Tree Nat -> |>k Nat -> (Delay (Tree Nat) k, Nat).
replaceMinBody = primRec {TreeF Nat} (\t m ->
case t of
| Leaf x -> (Delay (map leaf m), x)
| Br (l, lrec) (r, rrec) ->
: ( Delay ( ) k , )
: ( Delay ( ) k , )
let m' = min ml mr in
(Delay (app (map br l') r'), m')
end
).
replaceMinK : forall (k : Clock). Tree Nat -> Delay (Tree Nat) k.
replaceMinK = \t -> feedback (replaceMinBody t).
replaceMin' : Tree Nat -> Tree Nat.
replaceMin' = \t ->
let Delay t' = feedback (replaceMinBody t)
in t' [<>].
replaceMin : Tree Nat -> Tree Nat.
replaceMin = \t ->
let Delay t' = replaceMinK {K0} t
in t' [<>].
ofHeight : Nat -> Tree Nat.
ofHeight = \nat ->
fst (primRec {NatF} (\m n ->
case m of
| Z -> (leaf z, s n)
| S (m', r) ->
let (t1, n1) = r n in
let (t2, n2) = r n1
in (br t1 t2, n2)
end
) nat z).
main : Tree Nat.
main =
let five = s (s (s (s (s z))))
in replaceMin (ofHeight five).
|]
streamProcessing :: Text
streamProcessing =
[text|
data SPF i o (k : Clock) f
= Get (i -> f)
| Put o (|>k f)
deriving Functor.
type SP i o (k : Clock) = Fix (SPF i o k).
data CoSP i o = CoSP (forall (k : Clock). SP i o k).
step : forall (k : Clock) i o. SP i o k -> SPF i o k (Fix (SPF i o k)).
step = unfold.
data StreamF (k : Clock) a f = Cons a (|>k f).
type Stream (k : Clock) a = Fix (StreamF k a).
data CoStream a = Cos (forall (k : Clock). Stream k a).
hd : forall a. CoStream a -> a.
hd = \xs ->
let Cos s = xs in
let Cons x xs' = unfold s
in x.
-- see if you can do this better with let generalization
tl : forall a. CoStream a -> CoStream a.
tl = \x ->
let Cos s = x in
let r = (case unfold s of
| Cons x xs' -> xs'
end) : forall (k : Clock). |>k (Stream k a)
in Cos (r [<>]).
fst : forall a b. (a, b) -> a.
fst = \x -> case x of | (y, z) -> y end.
snd : forall a b. (a, b) -> b.
snd = \x -> case x of | (y, z) -> z end.
-- applicative structure
pure : forall (k : Clock) a. a -> |>k a.
pure = \x -> \\(af : k) -> x.
app : forall (k : Clock) a b. |>k (a -> b) -> |>k a -> |>k b.
app = \lf la -> \\(af : k) ->
let f = lf [af] in
let a = la [af] in
f a.
-- |>k functor
dmap : forall (k : Clock) a b. (a -> b) -> |>k a -> |>k b.
dmap = \f la -> app (pure f) la.
-- fixpoint above with full types
applyfix : forall ( k : Clock ) i ( SP i o k - > CoStream i - > CoStream o ) - > SP i o k - > CoStream i - > CoStream o.
-- applyfix = \rec ->
primRec { SPF i o k } ( \x s - >
-- case x of
| Get f - > let ( sp ' , ) = f ( hd s ) in g ( tl s )
-- | Put b sp ->
-- let sp1 = dmap fst sp in
cos b ( app ( app rec sp1 ) ( pure s ) )
-- end
-- ).
uncosp : forall i o. CoSP i o -> forall (k : Clock). SP i o k.
uncosp = \cosp -> let CoSP x = cosp in x.
-- it even works without annotations!
applyk : forall (k : Clock) i o. SP i o k -> CoStream i -> Stream k o.
applyk = fix (\rec ->
primRec {SPF i o k} (\x s ->
case x of
| Get f -> (snd (f (hd s))) (tl s)
| Put b sp ->
let sp1 = dmap fst sp in
fold (Cons b (app (app rec sp1) (pure s)))
end
)).
apply : forall i o. CoSP i o -> CoStream i -> CoStream o.
apply = \cosp xs ->
let CoSP sp = cosp
in Cos (applyk sp xs).
uncos : forall (k : Clock) a. CoStream a -> Stream k a.
uncos = \xs -> case xs of | Cos xs' -> xs' end.
spid : forall i. CoSP i i.
spid = CoSP (fix (\f -> fold (Get (\i -> fold (Put i f))))).
const : forall (k : Clock) a. a -> Stream k a.
const = \x -> fix (\f -> fold (Cons x f)).
data Unit = MkUnit.
main : Stream K0 Unit.
main = uncos (apply spid (Cos (const MkUnit))).
|]
| null | https://raw.githubusercontent.com/adamschoenemann/clofrp/c26f86aec2cdb8fa7fd317acd13f7d77af984bd3/test-suite/Fixtures.hs | haskell | # LANGUAGE OverloadedStrings #
import qualified CloFRP.Parser.Type as P
applicative structure
functor
x has type |>k (b k, u)
see if you can do this better with let generalization
applicative structure
|>k functor
fixpoint above with full types
applyfix = \rec ->
case x of
| Put b sp ->
let sp1 = dmap fst sp in
end
).
it even works without annotations! | # LANGUAGE QuasiQuotes #
# LANGUAGE NamedFieldPuns #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE TypeApplications #
module Fixtures where
import CloFRP.QuasiQuoter
import NeatInterpolation
import qualified CloFRP.Parser.Expr as P
import qualified CloFRP.Parser.Decl as P
import qualified CloFRP.Parser.Prog as P
import Data.Text (Text)
expr01 :: P.Expr
expr01 = [unsafeExpr|\x -> \y -> the (Nat) (x y True)|]
decl01 :: P.Decl
decl01 = [unsafeDecl|data Tree a = Leaf | Branch a (Tree a) (Tree a).|]
prog01, prog02 :: P.Prog
prog01 = [unsafeProg|
id : a -> a.
id = \x -> x.
twice : Nat -> Tuple Nat Nat.
twice = \x -> (x, x).
data Maybe a = Nothing | Just a.
data List a = Nil | Cons a (List a).
head : List a -> Maybe a.
head = \xs -> xs.
|]
prog02 = [unsafeProg|
data N = Z | S N.
plus : N -> N -> N.
plus = \m -> \n ->
case m of
| Z -> n
| S m' -> S (plus m' n)
end.
|]
replaceMin :: Text
replaceMin =
[text|
pure : forall (k : Clock) a. a -> |>k a.
pure = \x -> \\(af : k) -> x.
app : forall (k : Clock) a b. |>k (a -> b) -> |>k a -> |>k b.
app = \lf la -> \\(af : k) ->
let f = lf [af] in
let a = la [af] in
f a.
map : forall (k : Clock) a b. (a -> b) -> |>k a -> |>k b.
map = \f la -> app (pure f) la.
fst : forall a b. (a, b) -> a.
fst = \x -> case x of | (y, z) -> y end.
snd : forall a b. (a, b) -> b.
snd = \x -> case x of | (y, z) -> z end.
feedback : forall (k : Clock) (b : Clock -> *) u. (|>k u -> (b k, u)) -> b k.
data NatF f = Z | S f deriving Functor.
type Nat = Fix NatF.
z : Nat.
z = fold Z.
s : Nat -> Nat.
s = \x -> fold (S x).
data TreeF a f = Leaf a | Br f f deriving Functor.
type Tree a = Fix (TreeF a).
min : Nat -> Nat -> Nat.
min = primRec {NatF} (\m n ->
case m of
| Z -> fold Z
| S (m', r) -> fold (S (r n))
end
).
leaf : forall a. a -> Tree a.
leaf = \x -> fold (Leaf x).
br : forall a. Tree a -> Tree a -> Tree a.
br = \l r -> fold (Br l r).
data Delay a (k : Clock) = Delay (|>k a).
replaceMinBody : forall (k : Clock). Tree Nat -> |>k Nat -> (Delay (Tree Nat) k, Nat).
replaceMinBody = primRec {TreeF Nat} (\t m ->
case t of
| Leaf x -> (Delay (map leaf m), x)
| Br (l, lrec) (r, rrec) ->
: ( Delay ( ) k , )
: ( Delay ( ) k , )
let m' = min ml mr in
(Delay (app (map br l') r'), m')
end
).
replaceMinK : forall (k : Clock). Tree Nat -> Delay (Tree Nat) k.
replaceMinK = \t -> feedback (replaceMinBody t).
replaceMin' : Tree Nat -> Tree Nat.
replaceMin' = \t ->
let Delay t' = feedback (replaceMinBody t)
in t' [<>].
replaceMin : Tree Nat -> Tree Nat.
replaceMin = \t ->
let Delay t' = replaceMinK {K0} t
in t' [<>].
ofHeight : Nat -> Tree Nat.
ofHeight = \nat ->
fst (primRec {NatF} (\m n ->
case m of
| Z -> (leaf z, s n)
| S (m', r) ->
let (t1, n1) = r n in
let (t2, n2) = r n1
in (br t1 t2, n2)
end
) nat z).
main : Tree Nat.
main =
let five = s (s (s (s (s z))))
in replaceMin (ofHeight five).
|]
streamProcessing :: Text
streamProcessing =
[text|
data SPF i o (k : Clock) f
= Get (i -> f)
| Put o (|>k f)
deriving Functor.
type SP i o (k : Clock) = Fix (SPF i o k).
data CoSP i o = CoSP (forall (k : Clock). SP i o k).
step : forall (k : Clock) i o. SP i o k -> SPF i o k (Fix (SPF i o k)).
step = unfold.
data StreamF (k : Clock) a f = Cons a (|>k f).
type Stream (k : Clock) a = Fix (StreamF k a).
data CoStream a = Cos (forall (k : Clock). Stream k a).
hd : forall a. CoStream a -> a.
hd = \xs ->
let Cos s = xs in
let Cons x xs' = unfold s
in x.
tl : forall a. CoStream a -> CoStream a.
tl = \x ->
let Cos s = x in
let r = (case unfold s of
| Cons x xs' -> xs'
end) : forall (k : Clock). |>k (Stream k a)
in Cos (r [<>]).
fst : forall a b. (a, b) -> a.
fst = \x -> case x of | (y, z) -> y end.
snd : forall a b. (a, b) -> b.
snd = \x -> case x of | (y, z) -> z end.
pure : forall (k : Clock) a. a -> |>k a.
pure = \x -> \\(af : k) -> x.
app : forall (k : Clock) a b. |>k (a -> b) -> |>k a -> |>k b.
app = \lf la -> \\(af : k) ->
let f = lf [af] in
let a = la [af] in
f a.
dmap : forall (k : Clock) a b. (a -> b) -> |>k a -> |>k b.
dmap = \f la -> app (pure f) la.
applyfix : forall ( k : Clock ) i ( SP i o k - > CoStream i - > CoStream o ) - > SP i o k - > CoStream i - > CoStream o.
primRec { SPF i o k } ( \x s - >
| Get f - > let ( sp ' , ) = f ( hd s ) in g ( tl s )
cos b ( app ( app rec sp1 ) ( pure s ) )
uncosp : forall i o. CoSP i o -> forall (k : Clock). SP i o k.
uncosp = \cosp -> let CoSP x = cosp in x.
applyk : forall (k : Clock) i o. SP i o k -> CoStream i -> Stream k o.
applyk = fix (\rec ->
primRec {SPF i o k} (\x s ->
case x of
| Get f -> (snd (f (hd s))) (tl s)
| Put b sp ->
let sp1 = dmap fst sp in
fold (Cons b (app (app rec sp1) (pure s)))
end
)).
apply : forall i o. CoSP i o -> CoStream i -> CoStream o.
apply = \cosp xs ->
let CoSP sp = cosp
in Cos (applyk sp xs).
uncos : forall (k : Clock) a. CoStream a -> Stream k a.
uncos = \xs -> case xs of | Cos xs' -> xs' end.
spid : forall i. CoSP i i.
spid = CoSP (fix (\f -> fold (Get (\i -> fold (Put i f))))).
const : forall (k : Clock) a. a -> Stream k a.
const = \x -> fix (\f -> fold (Cons x f)).
data Unit = MkUnit.
main : Stream K0 Unit.
main = uncos (apply spid (Cos (const MkUnit))).
|]
|
f856858c064984cd2c2dca6ba5ef9b6851d692bebccedbd832fe4db231ad0d9b | reflectionalist/S9fES | soccat.scm | #! /usr/local/bin/s9 -f
; soccat -- connect to remote hosts
By , 2010
; Placed in the Public Domain
;
; Usage: soccat [-i] host port/service
;
; Read a request from default input and send it to the specific
; remote host. Simultaneously pass input from the remote host to
; the default output. In interactive mode reconnect automatically
; when the remote side breaks the connection.
;
; Options:
;
; -i interactive mode (reconnect automatically)
(load-from-library "read-line.scm")
(load-from-library "displaystar.scm")
(load-from-library "flush-output-port.scm")
(load-from-library "parse-optionsb.scm")
(define (soccat reconnect host port)
(let* ((s (sys:inet-connect host port))
(in (sys:make-input-port s))
(out (sys:make-output-port s)))
(let ((pid (sys:fork)))
(if (not (zero? pid))
(let out-loop ()
(if (sys:waitpid pid)
(begin (close-input-port in)
(close-output-port out)
(if reconnect
(soccat reconnect host port)
(sys:exit))))
(if (sys:select '(0 100000) '(0) '())
(let ((line (read-line)))
(if (eof-object? line)
(begin (sys:wait)
(sys:exit))
(begin (display* out line #\newline)
(flush-output-port out)))))
(out-loop))
(let in-loop ((line (read-line in)))
(if (eof-object? line)
(sys:exit)
(begin (display* line #\newline)
(flush-output-port)
(in-loop (read-line in)))))))))
(define show-help (option #\h #f))
(define interactive-mode (option #\i #f))
(define options `(,show-help
,interactive-mode))
(define (usage)
(display* "Usage: soccat [-i] host port" #\newline))
(let ((args (parse-options! (sys:command-line) options usage)))
(if (opt-val show-help)
(begin (display-usage
`(""
,usage
""
"Connect to remote hosts"
""
"-i interactive mode (reconnect automatically)"
""))
(sys:exit)))
(if (not (= 2 (length args)))
(begin (usage)
(sys:exit 1)))
(if (opt-val interactive-mode)
(display* "Interactive mode, send INTR to exit" #\newline))
(apply soccat (opt-val interactive-mode) args))
| null | https://raw.githubusercontent.com/reflectionalist/S9fES/0ade11593cf35f112e197026886fc819042058dd/prog/soccat.scm | scheme | soccat -- connect to remote hosts
Placed in the Public Domain
Usage: soccat [-i] host port/service
Read a request from default input and send it to the specific
remote host. Simultaneously pass input from the remote host to
the default output. In interactive mode reconnect automatically
when the remote side breaks the connection.
Options:
-i interactive mode (reconnect automatically) | #! /usr/local/bin/s9 -f
By , 2010
(load-from-library "read-line.scm")
(load-from-library "displaystar.scm")
(load-from-library "flush-output-port.scm")
(load-from-library "parse-optionsb.scm")
(define (soccat reconnect host port)
(let* ((s (sys:inet-connect host port))
(in (sys:make-input-port s))
(out (sys:make-output-port s)))
(let ((pid (sys:fork)))
(if (not (zero? pid))
(let out-loop ()
(if (sys:waitpid pid)
(begin (close-input-port in)
(close-output-port out)
(if reconnect
(soccat reconnect host port)
(sys:exit))))
(if (sys:select '(0 100000) '(0) '())
(let ((line (read-line)))
(if (eof-object? line)
(begin (sys:wait)
(sys:exit))
(begin (display* out line #\newline)
(flush-output-port out)))))
(out-loop))
(let in-loop ((line (read-line in)))
(if (eof-object? line)
(sys:exit)
(begin (display* line #\newline)
(flush-output-port)
(in-loop (read-line in)))))))))
(define show-help (option #\h #f))
(define interactive-mode (option #\i #f))
(define options `(,show-help
,interactive-mode))
(define (usage)
(display* "Usage: soccat [-i] host port" #\newline))
(let ((args (parse-options! (sys:command-line) options usage)))
(if (opt-val show-help)
(begin (display-usage
`(""
,usage
""
"Connect to remote hosts"
""
"-i interactive mode (reconnect automatically)"
""))
(sys:exit)))
(if (not (= 2 (length args)))
(begin (usage)
(sys:exit 1)))
(if (opt-val interactive-mode)
(display* "Interactive mode, send INTR to exit" #\newline))
(apply soccat (opt-val interactive-mode) args))
|
1490bc88b4512ae2214e4646926f3a1610cfd08441edd61d4af895e1bb159de0 | gsakkas/rite | 3573.ml |
let pipe fs =
let f a x = a x in let base x y = x y in List.fold_left f base fs;;
fix
let pipe fs = let f a x = x in let base x y = x y in List.fold_left f base fs ; ;
let pipe fs = let f a x = x in let base x y = x y in List.fold_left f base fs;;
*)
changed spans
( 3,15)-(3,18 )
x
VarG
(3,15)-(3,18)
x
VarG
*)
type error slice
( 3,3)-(3,68 )
( 3,9)-(3,18 )
( 3,11)-(3,18 )
( 3,15)-(3,16 )
( 3,15)-(3,18 )
( 3,44)-(3,58 )
( 3,44)-(3,68 )
( 3,59)-(3,60 )
(3,3)-(3,68)
(3,9)-(3,18)
(3,11)-(3,18)
(3,15)-(3,16)
(3,15)-(3,18)
(3,44)-(3,58)
(3,44)-(3,68)
(3,59)-(3,60)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/3573.ml | ocaml |
let pipe fs =
let f a x = a x in let base x y = x y in List.fold_left f base fs;;
fix
let pipe fs = let f a x = x in let base x y = x y in List.fold_left f base fs ; ;
let pipe fs = let f a x = x in let base x y = x y in List.fold_left f base fs;;
*)
changed spans
( 3,15)-(3,18 )
x
VarG
(3,15)-(3,18)
x
VarG
*)
type error slice
( 3,3)-(3,68 )
( 3,9)-(3,18 )
( 3,11)-(3,18 )
( 3,15)-(3,16 )
( 3,15)-(3,18 )
( 3,44)-(3,58 )
( 3,44)-(3,68 )
( 3,59)-(3,60 )
(3,3)-(3,68)
(3,9)-(3,18)
(3,11)-(3,18)
(3,15)-(3,16)
(3,15)-(3,18)
(3,44)-(3,58)
(3,44)-(3,68)
(3,59)-(3,60)
*)
| |
170b1cfd4132ee0556302984810fcfa79186309c712c2ea55d95bafe9add8386 | nmunro/cl-tutorials | main.lisp | (defpackage hangman
(:use :cl))
(in-package :hangman)
(defun pick-sitcom (sitcoms)
(nth (random (length sitcoms) (make-random-state t)) sitcoms))
(defun get-letter (guessed-letters)
(format t "Please enter a letter: ")
(let ((in (read-line)))
(cond
; If the user just hit enter
((= 0 (length in))
(get-letter g:uessed-letters))
If the first character entered has already been used
((member (char in 0) guessed-letters)
(get-letter guessed-letters))
Return the first character
(t (char in 0)))))
(defun scramble (sitcom guessed-letters)
(flet ((letter-or-underscore (letter)
(if (or (member letter guessed-letters) (equal letter #\Space))
letter
#\_)))
(map 'string #'letter-or-underscore sitcom)))
(defun game-over (lives scrambled-sitcom)
(cond
((or (string= scrambled-sitcom "") (<= lives 0)) "CPU")
((eq nil (position #\_ scrambled-sitcom)) "Player")
(t nil)))
(defun info (scrambled-sitcom lives guessed-letters)
(format nil "Lives: ~A~%Letters: ~{~A~^, ~}~%~A~%" lives guessed-letters scrambled-sitcom))
(defun game (&key (sitcom nil) (lives 10) (guessed-letters'()))
(unless sitcom
(let ((sitcom (pick-sitcom '("friends" "the big bang theory" "frasier" "cheers" "how i met your mother" "the it crowd"))))
(game :sitcom sitcom :lives lives :guessed-letters guessed-letters)))
(let ((game-over (game-over lives (scramble sitcom guessed-letters))))
(when game-over
(return-from game (format nil "Game over!" game-over)))
(format t "~A~%" (info (scramble sitcom guessed-letters) lives guessed-letters))
(let ((letter (get-letter guessed-letters)))
(if (equal nil (position letter sitcom))
(game :sitcom sitcom :lives (1- lives) :guessed-letters (cons letter guessed-letters))
(game :sitcom sitcom :lives lives :guessed-letters (cons letter guessed-letters))))))
| null | https://raw.githubusercontent.com/nmunro/cl-tutorials/e42f879edb01456f3cf0d159b0042e8e61f1b02e/3-hangman/src/main.lisp | lisp | If the user just hit enter | (defpackage hangman
(:use :cl))
(in-package :hangman)
(defun pick-sitcom (sitcoms)
(nth (random (length sitcoms) (make-random-state t)) sitcoms))
(defun get-letter (guessed-letters)
(format t "Please enter a letter: ")
(let ((in (read-line)))
(cond
((= 0 (length in))
(get-letter g:uessed-letters))
If the first character entered has already been used
((member (char in 0) guessed-letters)
(get-letter guessed-letters))
Return the first character
(t (char in 0)))))
(defun scramble (sitcom guessed-letters)
(flet ((letter-or-underscore (letter)
(if (or (member letter guessed-letters) (equal letter #\Space))
letter
#\_)))
(map 'string #'letter-or-underscore sitcom)))
(defun game-over (lives scrambled-sitcom)
(cond
((or (string= scrambled-sitcom "") (<= lives 0)) "CPU")
((eq nil (position #\_ scrambled-sitcom)) "Player")
(t nil)))
(defun info (scrambled-sitcom lives guessed-letters)
(format nil "Lives: ~A~%Letters: ~{~A~^, ~}~%~A~%" lives guessed-letters scrambled-sitcom))
(defun game (&key (sitcom nil) (lives 10) (guessed-letters'()))
(unless sitcom
(let ((sitcom (pick-sitcom '("friends" "the big bang theory" "frasier" "cheers" "how i met your mother" "the it crowd"))))
(game :sitcom sitcom :lives lives :guessed-letters guessed-letters)))
(let ((game-over (game-over lives (scramble sitcom guessed-letters))))
(when game-over
(return-from game (format nil "Game over!" game-over)))
(format t "~A~%" (info (scramble sitcom guessed-letters) lives guessed-letters))
(let ((letter (get-letter guessed-letters)))
(if (equal nil (position letter sitcom))
(game :sitcom sitcom :lives (1- lives) :guessed-letters (cons letter guessed-letters))
(game :sitcom sitcom :lives lives :guessed-letters (cons letter guessed-letters))))))
|
c24b2fbe19671098af55095433220af36d74d5a53e8116b47f8c3890e6a73609 | luqui/manifesto | Nav.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
module Nav where
import Grammar hiding (Tree)
import qualified Rank2
import qualified Data.Functor.Differentiable.Rank2 as D
import Data.Monoid (First(..))
type IsNode h = (D.Differentiable h, Rank2.Foldable h)
data Tree g l where
Tree :: (Locus h g, IsNode h) => h (Tree g) -> Tree g (L h)
-- Each level of the context stores the grammar associated with that level.
data Context g l h where
CxNil :: g h -> Context g (L h) h
CxCons :: (Locus h g, IsNode h) => Context g l h -> g h' -> D.D h (L h') (Tree g) -> Context g l h'
getContext :: Context g l h -> g h
getContext (CxNil g) = g
getContext (CxCons _ g _) = g
data Zipper g l where
Zipper :: Context g l h -> Tree g (L h) -> Zipper g l
down :: (Closed g) => Zipper g l -> [Zipper g l]
down (Zipper cx (Tree h)) = Rank2.foldMap (\(Pair loc (OnLabel g)) -> [Zipper (CxCons cx g (D.context loc)) (D.focus loc)])
(fromFirst (closed (getContext cx) (D.withLocs h)))
where
fromFirst (First (Just x)) = x
fromFirst _ = error "Incomplete grammar" -- There is probably a missing case in a chain of ≪|≫s.
up :: Zipper g l -> Maybe (Zipper g l)
up (Zipper (CxNil _) _) = Nothing
up (Zipper (CxCons cx _ d) t) = Just (Zipper cx (Tree (D.fill d t)))
| null | https://raw.githubusercontent.com/luqui/manifesto/09a70581376295c14d24f8cd0771941d7a613361/Nav.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
Each level of the context stores the grammar associated with that level.
There is probably a missing case in a chain of ≪|≫s. | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
module Nav where
import Grammar hiding (Tree)
import qualified Rank2
import qualified Data.Functor.Differentiable.Rank2 as D
import Data.Monoid (First(..))
type IsNode h = (D.Differentiable h, Rank2.Foldable h)
data Tree g l where
Tree :: (Locus h g, IsNode h) => h (Tree g) -> Tree g (L h)
data Context g l h where
CxNil :: g h -> Context g (L h) h
CxCons :: (Locus h g, IsNode h) => Context g l h -> g h' -> D.D h (L h') (Tree g) -> Context g l h'
getContext :: Context g l h -> g h
getContext (CxNil g) = g
getContext (CxCons _ g _) = g
data Zipper g l where
Zipper :: Context g l h -> Tree g (L h) -> Zipper g l
down :: (Closed g) => Zipper g l -> [Zipper g l]
down (Zipper cx (Tree h)) = Rank2.foldMap (\(Pair loc (OnLabel g)) -> [Zipper (CxCons cx g (D.context loc)) (D.focus loc)])
(fromFirst (closed (getContext cx) (D.withLocs h)))
where
fromFirst (First (Just x)) = x
up :: Zipper g l -> Maybe (Zipper g l)
up (Zipper (CxNil _) _) = Nothing
up (Zipper (CxCons cx _ d) t) = Just (Zipper cx (Tree (D.fill d t)))
|
2ec15c95e477f694474152d3ec073268883a4cdc941ac6a625eba17bb82f9575 | drewnoff/openintro-gorilla-incanter | project.clj | (defproject inf-for-categorical-data "0.1.0-SNAPSHOT"
:description "OenIntro lab 'Inference for Categorical data'"
:url "-repl.org/view.html?source=github&user=drewnoff&repo=openintro-gorilla-incanter&path=/inf-for-categorical-data/src/openintro/inf-for-categorical-data.clj"
:license {:name "CC-BY-SA"
:url "-sa/3.0/"}
:dependencies [[org.clojure/clojure "1.7.0"]
[incanter "1.5.5"]
[incanter-gorilla "0.1.0"]]
:plugins [[lein-gorilla "0.3.5"]])
| null | https://raw.githubusercontent.com/drewnoff/openintro-gorilla-incanter/cb9688d5624bb54649073fdf0e1672047dbff391/inf-for-categorical-data/project.clj | clojure | (defproject inf-for-categorical-data "0.1.0-SNAPSHOT"
:description "OenIntro lab 'Inference for Categorical data'"
:url "-repl.org/view.html?source=github&user=drewnoff&repo=openintro-gorilla-incanter&path=/inf-for-categorical-data/src/openintro/inf-for-categorical-data.clj"
:license {:name "CC-BY-SA"
:url "-sa/3.0/"}
:dependencies [[org.clojure/clojure "1.7.0"]
[incanter "1.5.5"]
[incanter-gorilla "0.1.0"]]
:plugins [[lein-gorilla "0.3.5"]])
| |
6163ac7cf0e86e751e04ad498f2f8f66e31a6a5b8a34353d0721eb94fd10281a | herd/herdtools7 | testHash.mli | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2013 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
(** Generate hash from a litmus test *)
(**************)
Digest Env
(**************)
type hinfo = { hash : string ; filename : string; }
type env = hinfo StringMap.t
exception Seen
val check_env : env -> string -> string -> string -> env
(*******************)
(* Compute digests *)
(*******************)
Digest of init ( shared with C digests )
val digest_init :
(string -> string -> unit) (* debug *) -> MiscParser.state -> string
Digest of meta - data ( shared with C digests )
val digest_info : MiscParser.info -> string
module Make :
functor (A:ArchBase.S) ->
sig
type init = MiscParser.state
type prog = (MiscParser.proc * A.pseudo list) list
type rlocations = MiscParser.RLocSet.t
val refresh_labels : string -> prog -> prog
val digest : MiscParser.info -> init -> prog -> rlocations -> string
end
| null | https://raw.githubusercontent.com/herd/herdtools7/c3b5079aed4bf9d92a5c7de04ef3638d6af0f8c0/lib/testHash.mli | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
* Generate hash from a litmus test
************
************
*****************
Compute digests
*****************
debug | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2013 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
Digest Env
type hinfo = { hash : string ; filename : string; }
type env = hinfo StringMap.t
exception Seen
val check_env : env -> string -> string -> string -> env
Digest of init ( shared with C digests )
val digest_init :
Digest of meta - data ( shared with C digests )
val digest_info : MiscParser.info -> string
module Make :
functor (A:ArchBase.S) ->
sig
type init = MiscParser.state
type prog = (MiscParser.proc * A.pseudo list) list
type rlocations = MiscParser.RLocSet.t
val refresh_labels : string -> prog -> prog
val digest : MiscParser.info -> init -> prog -> rlocations -> string
end
|
43baa4f17578b3fe6deff1638ab8588388dd4fa9a39272e20fc4f1599f04be80 | lilyball/projecteuler-ocaml | sieve.mli | type sieve
type primality = Prime | Composite | Unknown
val make : int -> sieve
val size : sieve -> int
val primality : int -> sieve -> primality
val mark_prime : int -> sieve -> unit
val find_prime : int -> sieve -> int
val iter : (int -> unit) -> sieve -> unit
(** iterates over all the primes in the sieve *)
val fold : (int -> 'a -> 'a) -> 'a -> sieve -> 'a
(** equivalent to a List.fold_left with a list of all primes in the sieve *)
exception Out_of_bounds
| null | https://raw.githubusercontent.com/lilyball/projecteuler-ocaml/a88ed8355b565ad0726cfcac4916d2b80512da7a/sieve.mli | ocaml | * iterates over all the primes in the sieve
* equivalent to a List.fold_left with a list of all primes in the sieve | type sieve
type primality = Prime | Composite | Unknown
val make : int -> sieve
val size : sieve -> int
val primality : int -> sieve -> primality
val mark_prime : int -> sieve -> unit
val find_prime : int -> sieve -> int
val iter : (int -> unit) -> sieve -> unit
val fold : (int -> 'a -> 'a) -> 'a -> sieve -> 'a
exception Out_of_bounds
|
7b645cc967bdf505fdf5ae329faab953f6b58806e1aa5cd8f55ab8f9fd2dae4b | racket/gui | gl-refresh.rkt | #lang racket/gui
(require sgl)
(define c%
(class canvas%
(inherit with-gl-context swap-gl-buffers)
(define/override (on-paint)
(with-gl-context
(lambda ()
(gl-clear-color (random) (random) (random) 1)
(gl-clear 'color-buffer-bit)
(swap-gl-buffers)
(gl-flush))))
(super-new (style '(gl no-autoclear)))))
(define f (new frame% [label ""] [width 100] [height 300]))
(define c (new c% [parent f]))
(new message% [parent f] [label "Canvas changes color on refresh,"])
(new message% [parent f] [label "so check that it's not too often."])
(new button% [parent f] [label "Refresh"] [callback (lambda (b e) (send c refresh))])
(send f show #t)
(module test racket/base)
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-test/tests/gracket/gl-refresh.rkt | racket | #lang racket/gui
(require sgl)
(define c%
(class canvas%
(inherit with-gl-context swap-gl-buffers)
(define/override (on-paint)
(with-gl-context
(lambda ()
(gl-clear-color (random) (random) (random) 1)
(gl-clear 'color-buffer-bit)
(swap-gl-buffers)
(gl-flush))))
(super-new (style '(gl no-autoclear)))))
(define f (new frame% [label ""] [width 100] [height 300]))
(define c (new c% [parent f]))
(new message% [parent f] [label "Canvas changes color on refresh,"])
(new message% [parent f] [label "so check that it's not too often."])
(new button% [parent f] [label "Refresh"] [callback (lambda (b e) (send c refresh))])
(send f show #t)
(module test racket/base)
| |
0ed5f12c48925210c59050ec573f24a3a60c845f5dc81d8b0178d27b7a8dadf4 | rtrusso/scp | build.scm | (need build/rules)
(define output-root "out/")
(define (out . args)
(path-append output-root (apply string-append args)))
(define (scm-command command)
(string-append "scm -b -r5 -q -lrun-scm.scm "
command))
(define (scm-interp-command input output)
(scm-command (string-append "scheme-interpreter.scm --conspiracy --run="
input
" --out="
output)))
(define (scm-ut-command ut-name)
(scm-interp-command (string-append "tests/" ut-name ".ss")
(out ut-name "-unittest.actual")))
(define (scm-ut-rule ut-name)
(new-rule (list (out ut-name "-unittest.actual"))
(list (string-append "tests/" ut-name ".ss")
"scheme-interpreter.scm")
(list (scm-ut-command ut-name))))
(define (diff-command before after output)
(string-append "diff --strip-trailing-cr "
before
" "
after
" >"
output))
(define (ut-diff-command ut-name)
(diff-command (out ut-name "-unittest.actual")
(string-append "tests/baseline/" ut-name "-unittest.actual")
(out ut-name "-unittest.test")))
(define (scm-ut-diff-rule ut-name)
(new-rule (list (out ut-name "-unittest.test"))
(list (string-append "tests/baseline/" ut-name "-unittest.actual")
(out ut-name "-unittest.actual"))
(list (ut-diff-command ut-name))))
(define (new-unit-test-rules ut-name)
(list (scm-ut-diff-rule ut-name)
(scm-ut-rule ut-name)))
(define (compile-java-command class-name)
(scm-command (string-append "java-compiler.scm --main="
class-name
" rtl/JavaRtl.java tests/"
class-name
".java --out="
(out class-name ".sasm"))))
(define (sasm-opt-command sasm-input)
(scm-command (string-append "sasm-opt.scm "
sasm-input
" --out="
sasm-input
"-opt")))
(define (java-sasm-opt-command class-name)
(sasm-opt-command (out class-name ".sasm")))
(define (new-compile-java-rule class-name)
(list
(new-rule (list (out class-name ".sasm"))
(list "rtl/JavaRtl.java"
(string-append "tests/" class-name ".java"))
(list (compile-java-command class-name)))
(new-rule (list (out class-name ".sasm-opt"))
(list (out class-name ".sasm"))
(list (java-sasm-opt-command class-name)))
))
(define (map-rules function objects)
(let loop ((objects objects)
(rules '()))
(if (null? objects)
rules
(loop (cdr objects)
(append rules (function (car objects)))))))
(define unit-tests
(list "list"
"fastset"
"genparse"
"genproc"
"pat"
"regex"
"sasm-insn"))
(define ut-rules
(map-rules new-unit-test-rules unit-tests))
(define java-class-names
(list "Arrays"
"Arrays"
"BinarySearch"
"BinaryTree"
"Bitwise"
"BubbleSort"
"CharString"
"Count"
"CtorTest"
"Factorial"
"LinearSearch"
"LinkedList"
"Messy"
"MyFactorial"
"NumberToString"
"ObjArray"
"OpEquals"
"OverrideTest"
"QuickSort"
"Rectangles"
"StaticMembers"
"StaticMethods"
"SubExp"
"TreeVisitor"
"TwoArgs"
))
(define java-rules
(map-rules new-compile-java-rule java-class-names))
(define project
(new-project
(append ut-rules
java-rules
'())))
(if (not (fs-exists? (read-fs "out")))
(begin (display ";; creating output directory")
(newline)
(create-directory "out")))
(build-project project)
| null | https://raw.githubusercontent.com/rtrusso/scp/2051e76df14bd36aef81aba519ffafa62b260f5c/src/build.scm | scheme | (need build/rules)
(define output-root "out/")
(define (out . args)
(path-append output-root (apply string-append args)))
(define (scm-command command)
(string-append "scm -b -r5 -q -lrun-scm.scm "
command))
(define (scm-interp-command input output)
(scm-command (string-append "scheme-interpreter.scm --conspiracy --run="
input
" --out="
output)))
(define (scm-ut-command ut-name)
(scm-interp-command (string-append "tests/" ut-name ".ss")
(out ut-name "-unittest.actual")))
(define (scm-ut-rule ut-name)
(new-rule (list (out ut-name "-unittest.actual"))
(list (string-append "tests/" ut-name ".ss")
"scheme-interpreter.scm")
(list (scm-ut-command ut-name))))
(define (diff-command before after output)
(string-append "diff --strip-trailing-cr "
before
" "
after
" >"
output))
(define (ut-diff-command ut-name)
(diff-command (out ut-name "-unittest.actual")
(string-append "tests/baseline/" ut-name "-unittest.actual")
(out ut-name "-unittest.test")))
(define (scm-ut-diff-rule ut-name)
(new-rule (list (out ut-name "-unittest.test"))
(list (string-append "tests/baseline/" ut-name "-unittest.actual")
(out ut-name "-unittest.actual"))
(list (ut-diff-command ut-name))))
(define (new-unit-test-rules ut-name)
(list (scm-ut-diff-rule ut-name)
(scm-ut-rule ut-name)))
(define (compile-java-command class-name)
(scm-command (string-append "java-compiler.scm --main="
class-name
" rtl/JavaRtl.java tests/"
class-name
".java --out="
(out class-name ".sasm"))))
(define (sasm-opt-command sasm-input)
(scm-command (string-append "sasm-opt.scm "
sasm-input
" --out="
sasm-input
"-opt")))
(define (java-sasm-opt-command class-name)
(sasm-opt-command (out class-name ".sasm")))
(define (new-compile-java-rule class-name)
(list
(new-rule (list (out class-name ".sasm"))
(list "rtl/JavaRtl.java"
(string-append "tests/" class-name ".java"))
(list (compile-java-command class-name)))
(new-rule (list (out class-name ".sasm-opt"))
(list (out class-name ".sasm"))
(list (java-sasm-opt-command class-name)))
))
(define (map-rules function objects)
(let loop ((objects objects)
(rules '()))
(if (null? objects)
rules
(loop (cdr objects)
(append rules (function (car objects)))))))
(define unit-tests
(list "list"
"fastset"
"genparse"
"genproc"
"pat"
"regex"
"sasm-insn"))
(define ut-rules
(map-rules new-unit-test-rules unit-tests))
(define java-class-names
(list "Arrays"
"Arrays"
"BinarySearch"
"BinaryTree"
"Bitwise"
"BubbleSort"
"CharString"
"Count"
"CtorTest"
"Factorial"
"LinearSearch"
"LinkedList"
"Messy"
"MyFactorial"
"NumberToString"
"ObjArray"
"OpEquals"
"OverrideTest"
"QuickSort"
"Rectangles"
"StaticMembers"
"StaticMethods"
"SubExp"
"TreeVisitor"
"TwoArgs"
))
(define java-rules
(map-rules new-compile-java-rule java-class-names))
(define project
(new-project
(append ut-rules
java-rules
'())))
(if (not (fs-exists? (read-fs "out")))
(begin (display ";; creating output directory")
(newline)
(create-directory "out")))
(build-project project)
| |
6c72bf060853ccacd1ddab7ad34c3cd4fb55b3bd68f310727dc3ef800e55bea3 | tel/saltine | ChaCha20Poly1305.hs | # LANGUAGE DeriveDataTypeable , , DeriveGeneric , ForeignFunctionInterface #
-- |
-- Module : Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305
Copyright : ( c ) 2021
License : MIT
--
-- Maintainer :
-- Stability : experimental
-- Portability : non-portable
--
module Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305 (
aead_chacha20poly1305_keybytes
, aead_chacha20poly1305_npubbytes
, aead_chacha20poly1305_abytes
, c_aead
, c_aead_open
, c_aead_detached
, c_aead_open_detached
, Key(..)
, Nonce(..)
) where
import Control.DeepSeq
import Crypto.Saltine.Class
import Crypto.Saltine.Core.Hash (shorthash)
import Crypto.Saltine.Internal.Hash (nullShKey)
import Crypto.Saltine.Internal.Util as U
import Data.ByteString (ByteString)
import Data.Data (Data, Typeable)
import Data.Hashable (Hashable)
import Data.Monoid
import Foreign.C
import Foreign.Ptr
import GHC.Generics (Generic)
import qualified Data.ByteString as S
-- | An opaque 'ChaCha20Poly1305' cryptographic key.
newtype Key = Key { unKey :: ByteString } deriving (Ord, Hashable, Data, Typeable, Generic, NFData)
instance Eq Key where
Key a == Key b = U.compare a b
instance Show Key where
show k = "AEAD.ChaCha20Poly1305.Key {hashesTo = \"" <> (bin2hex . shorthash nullShKey $ encode k) <> "\"}"
instance IsEncoding Key where
decode v = if S.length v == aead_chacha20poly1305_keybytes
then Just (Key v)
else Nothing
# INLINE decode #
encode (Key v) = v
# INLINE encode #
-- | An opaque 'ChaCha20Poly1305' nonce.
newtype Nonce = Nonce { unNonce :: ByteString } deriving (Eq, Ord, Hashable, Data, Typeable, Generic, NFData)
instance Show Nonce where
show k = "AEAD.ChaCha20Poly1305.Nonce " <> bin2hex (encode k)
instance IsEncoding Nonce where
decode v = if S.length v == aead_chacha20poly1305_npubbytes
then Just (Nonce v)
else Nothing
# INLINE decode #
encode (Nonce v) = v
# INLINE encode #
instance IsNonce Nonce where
zero = Nonce (S.replicate aead_chacha20poly1305_npubbytes 0)
nudge (Nonce n) = Nonce (nudgeBS n)
aead_chacha20poly1305_keybytes, aead_chacha20poly1305_abytes, aead_chacha20poly1305_npubbytes :: Int
-- | Size of a ChaCha20-Poly1305 key
aead_chacha20poly1305_keybytes = fromIntegral c_crypto_aead_chacha20poly1305_keybytes
-- | Size of a ChaCha20-Poly1305 nonce
aead_chacha20poly1305_npubbytes = fromIntegral c_crypto_aead_chacha20poly1305_npubbytes
-- | Size of a ChaCha20-Poly1305 authentication tag
aead_chacha20poly1305_abytes = fromIntegral c_crypto_aead_chacha20poly1305_abytes
-- src/libsodium/crypto_aead/xchacha20poly1305/sodium/aead_xchacha20poly1305.c
-- src/libsodium/include/sodium/crypto_aead_xchacha20poly1305.h
foreign import ccall "crypto_aead_chacha20poly1305_keybytes"
c_crypto_aead_chacha20poly1305_keybytes :: CSize
foreign import ccall "crypto_aead_chacha20poly1305_npubbytes"
c_crypto_aead_chacha20poly1305_npubbytes:: CSize
foreign import ccall "crypto_aead_chacha20poly1305_abytes"
c_crypto_aead_chacha20poly1305_abytes :: CSize
-- | The aead C API uses C strings. Always returns 0.
foreign import ccall "crypto_aead_chacha20poly1305_encrypt"
c_aead
:: Ptr CChar
-- ^ Cipher output buffer
-> Ptr CULLong
-- ^ Cipher output bytes used
-> Ptr CChar
-- ^ Constant message input buffer
-> CULLong
-- ^ Length of message input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
| The aead open C API uses C strings . Returns 0 if successful .
foreign import ccall "crypto_aead_chacha20poly1305_decrypt"
c_aead_open
:: Ptr CChar
-- ^ Message output buffer
-> Ptr CULLong
-- ^ Message output bytes used
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-- ^ Constant ciphertext input buffer
-> CULLong
-- ^ Length of ciphertext input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
-- | The aead C API uses C strings. Always returns 0.
foreign import ccall "crypto_aead_chacha20poly1305_encrypt_detached"
c_aead_detached
:: Ptr CChar
-- ^ Cipher output buffer
-> Ptr CChar
-- ^ Tag output buffer
-> Ptr CULLong
-- ^ Tag bytes used
-> Ptr CChar
-- ^ Constant message input buffer
-> CULLong
-- ^ Length of message input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
| The aead open C API uses C strings . Returns 0 if successful .
foreign import ccall "crypto_aead_chacha20poly1305_decrypt_detached"
c_aead_open_detached
:: Ptr CChar
-- ^ Message output buffer
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-- ^ Constant ciphertext input buffer
-> CULLong
-- ^ Length of ciphertext input buffer
-> Ptr CChar
-- ^ Constant tag input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
| null | https://raw.githubusercontent.com/tel/saltine/531997fb4b884bbc15eda56cca88d0f207c4329e/src/Crypto/Saltine/Internal/AEAD/ChaCha20Poly1305.hs | haskell | |
Module : Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305
Maintainer :
Stability : experimental
Portability : non-portable
| An opaque 'ChaCha20Poly1305' cryptographic key.
| An opaque 'ChaCha20Poly1305' nonce.
| Size of a ChaCha20-Poly1305 key
| Size of a ChaCha20-Poly1305 nonce
| Size of a ChaCha20-Poly1305 authentication tag
src/libsodium/crypto_aead/xchacha20poly1305/sodium/aead_xchacha20poly1305.c
src/libsodium/include/sodium/crypto_aead_xchacha20poly1305.h
| The aead C API uses C strings. Always returns 0.
^ Cipher output buffer
^ Cipher output bytes used
^ Constant message input buffer
^ Length of message input buffer
^ Constant aad input buffer
^ Length of aad input buffer
^ Constant nonce buffer
^ Constant key buffer
^ Message output buffer
^ Message output bytes used
^ Constant ciphertext input buffer
^ Length of ciphertext input buffer
^ Constant aad input buffer
^ Length of aad input buffer
^ Constant nonce buffer
^ Constant key buffer
| The aead C API uses C strings. Always returns 0.
^ Cipher output buffer
^ Tag output buffer
^ Tag bytes used
^ Constant message input buffer
^ Length of message input buffer
^ Constant aad input buffer
^ Length of aad input buffer
^ Constant nonce buffer
^ Constant key buffer
^ Message output buffer
^ Constant ciphertext input buffer
^ Length of ciphertext input buffer
^ Constant tag input buffer
^ Constant aad input buffer
^ Length of aad input buffer
^ Constant nonce buffer
^ Constant key buffer | # LANGUAGE DeriveDataTypeable , , DeriveGeneric , ForeignFunctionInterface #
Copyright : ( c ) 2021
License : MIT
module Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305 (
aead_chacha20poly1305_keybytes
, aead_chacha20poly1305_npubbytes
, aead_chacha20poly1305_abytes
, c_aead
, c_aead_open
, c_aead_detached
, c_aead_open_detached
, Key(..)
, Nonce(..)
) where
import Control.DeepSeq
import Crypto.Saltine.Class
import Crypto.Saltine.Core.Hash (shorthash)
import Crypto.Saltine.Internal.Hash (nullShKey)
import Crypto.Saltine.Internal.Util as U
import Data.ByteString (ByteString)
import Data.Data (Data, Typeable)
import Data.Hashable (Hashable)
import Data.Monoid
import Foreign.C
import Foreign.Ptr
import GHC.Generics (Generic)
import qualified Data.ByteString as S
newtype Key = Key { unKey :: ByteString } deriving (Ord, Hashable, Data, Typeable, Generic, NFData)
instance Eq Key where
Key a == Key b = U.compare a b
instance Show Key where
show k = "AEAD.ChaCha20Poly1305.Key {hashesTo = \"" <> (bin2hex . shorthash nullShKey $ encode k) <> "\"}"
instance IsEncoding Key where
decode v = if S.length v == aead_chacha20poly1305_keybytes
then Just (Key v)
else Nothing
# INLINE decode #
encode (Key v) = v
# INLINE encode #
newtype Nonce = Nonce { unNonce :: ByteString } deriving (Eq, Ord, Hashable, Data, Typeable, Generic, NFData)
instance Show Nonce where
show k = "AEAD.ChaCha20Poly1305.Nonce " <> bin2hex (encode k)
instance IsEncoding Nonce where
decode v = if S.length v == aead_chacha20poly1305_npubbytes
then Just (Nonce v)
else Nothing
# INLINE decode #
encode (Nonce v) = v
# INLINE encode #
instance IsNonce Nonce where
zero = Nonce (S.replicate aead_chacha20poly1305_npubbytes 0)
nudge (Nonce n) = Nonce (nudgeBS n)
aead_chacha20poly1305_keybytes, aead_chacha20poly1305_abytes, aead_chacha20poly1305_npubbytes :: Int
aead_chacha20poly1305_keybytes = fromIntegral c_crypto_aead_chacha20poly1305_keybytes
aead_chacha20poly1305_npubbytes = fromIntegral c_crypto_aead_chacha20poly1305_npubbytes
aead_chacha20poly1305_abytes = fromIntegral c_crypto_aead_chacha20poly1305_abytes
foreign import ccall "crypto_aead_chacha20poly1305_keybytes"
c_crypto_aead_chacha20poly1305_keybytes :: CSize
foreign import ccall "crypto_aead_chacha20poly1305_npubbytes"
c_crypto_aead_chacha20poly1305_npubbytes:: CSize
foreign import ccall "crypto_aead_chacha20poly1305_abytes"
c_crypto_aead_chacha20poly1305_abytes :: CSize
foreign import ccall "crypto_aead_chacha20poly1305_encrypt"
c_aead
:: Ptr CChar
-> Ptr CULLong
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> CULLong
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-> Ptr CChar
-> IO CInt
| The aead open C API uses C strings . Returns 0 if successful .
foreign import ccall "crypto_aead_chacha20poly1305_decrypt"
c_aead_open
:: Ptr CChar
-> Ptr CULLong
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> Ptr CChar
-> IO CInt
foreign import ccall "crypto_aead_chacha20poly1305_encrypt_detached"
c_aead_detached
:: Ptr CChar
-> Ptr CChar
-> Ptr CULLong
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> CULLong
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-> Ptr CChar
-> IO CInt
| The aead open C API uses C strings . Returns 0 if successful .
foreign import ccall "crypto_aead_chacha20poly1305_decrypt_detached"
c_aead_open_detached
:: Ptr CChar
-> Ptr CChar
^ Unused ' nsec ' value ( must be NULL )
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> Ptr CChar
-> CULLong
-> Ptr CChar
-> Ptr CChar
-> IO CInt
|
12b5118ad64d782681d0dc28c035df21c881731932361abca88d716275dadf18 | tonyg/kali-scheme | graph.scm | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
; Code to print out module dependencies in a format readable by the
graph layout program AT&T DOT Release 1.0 . ( for information on DOT call
the AT&T Software Technology Center Common Support Hotline ( 908 ) 582 - 7009 )
; Follow link script up to the actual linking
;(load-configuration "scheme/interfaces.scm")
;(load-configuration "scheme/packages.scm")
;(flatload initial-structures)
;(load "build/initial.scm")
;
; Load this and run it
;(load "scheme/debug/graph.scm")
;(dependency-graph (initial-packages)
; (map structure-package (list scheme-level-1 scheme-level-0))
; "graph.dot")
;
; Run the graph layout program
setenv SDE_LICENSE_FILE /pls / local / lib / DOT / LICENSE.dot
/pls / local / lib / DOT / dot -Tps graph.dot -o graph.ps
; Returns a list of the packages in the initial system.
(define (initial-packages)
(map (lambda (p)
(structure-package (cdr p)))
(append (struct-list scheme
environments
module-system
ensures-loaded
packages
packages-internal)
(desirable-structures))))
; Write the dependency graph found by rooting from PACKAGES to FILENAME.
Packages in the list IGNORE are ignored .
;
; Each configuration file's packages are done as a separate subgraph.
(define (dependency-graph packages ignore filename)
(call-with-output-file filename
(lambda (out)
(display prelude out)
(newline out)
(let ((subgraphs (do-next-package packages ignore '() ignore out)))
(for-each (lambda (sub)
(note-subgraph sub out))
subgraphs)
(display "}" out)
(newline out)))))
Do the first not - yet - done package , returning the subgraphs if there are
no packages left . TO - DO , DONE , and IGNORE are lists of packages .
SUBGRAPHS is an a - list indexed by source - file - name .
(define (do-next-package to-do done subgraphs ignore out)
(let loop ((to-do to-do))
(if (null? to-do)
subgraphs
(let ((package (car to-do)))
(if (memq package done)
(loop (cdr to-do))
(do-package package (cdr to-do) (cons package done)
subgraphs ignore out))))))
; Find the correct subgraph, add PACKAGE to it, note any edges, and continue
; with the rest of the graph.
(define (do-package package to-do done subgraphs ignore out)
(let* ((source-file (package-file-name package))
(opens (map structure-package
((package-opens-thunk package))))
(old-subgraph (assq source-file subgraphs))
(subgraph (or old-subgraph
(list source-file))))
(set-cdr! subgraph (cons package (cdr subgraph)))
(do-edges package opens source-file ignore out)
(do-next-package (append opens to-do)
done
(if old-subgraph
subgraphs
(cons subgraph subgraphs))
ignore
out)))
; Add an edge from each package in OPENS to PACKAGE, provided that the
two were defined in the same file .
(define (do-edges package opens source-file ignore out)
(let loop ((opens opens) (done ignore))
(if (not (null? opens))
(loop (cdr opens)
(let ((p (car opens)))
(if (or (memq p done)
(not (string=? source-file (package-file-name p))))
done
(begin
(note-edge p package out)
(cons p done))))))))
; Writing out the package name as a string (actually, its the name of
the first of the package 's clients ) .
(define (package-name package out)
(let ((clients (population->list (package-clients package))))
(write-char #\" out)
(display (structure-name (car clients)) out)
(write-char #\" out)))
Header for DOT files
(define prelude
"digraph G {
orientation=landscape;
size =\"10,7.5\";
page =\"8.5,11\";
ratio =fill;")
; Writing out edges and subgraphs
(define (note-edge from to out)
(display " " out)
(package-name from out)
(display " -> " out)
(package-name to out)
(write-char #\; out)
(newline out))
(define (note-subgraph subgraph out)
(display " subgraph \"cluster_" out)
(display (car subgraph) out)
(display "\" { label=\"" out)
(display (car subgraph) out)
(display "\"; " out)
(for-each (lambda (p)
(package-name p out)
(display "; " out))
(cdr subgraph))
(display "}" out)
(newline out))
| null | https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/scheme/debug/graph.scm | scheme | Code to print out module dependencies in a format readable by the
Follow link script up to the actual linking
(load-configuration "scheme/interfaces.scm")
(load-configuration "scheme/packages.scm")
(flatload initial-structures)
(load "build/initial.scm")
Load this and run it
(load "scheme/debug/graph.scm")
(dependency-graph (initial-packages)
(map structure-package (list scheme-level-1 scheme-level-0))
"graph.dot")
Run the graph layout program
Returns a list of the packages in the initial system.
Write the dependency graph found by rooting from PACKAGES to FILENAME.
Each configuration file's packages are done as a separate subgraph.
Find the correct subgraph, add PACKAGE to it, note any edges, and continue
with the rest of the graph.
Add an edge from each package in OPENS to PACKAGE, provided that the
Writing out the package name as a string (actually, its the name of
")
Writing out edges and subgraphs
out)
" out) | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
graph layout program AT&T DOT Release 1.0 . ( for information on DOT call
the AT&T Software Technology Center Common Support Hotline ( 908 ) 582 - 7009 )
setenv SDE_LICENSE_FILE /pls / local / lib / DOT / LICENSE.dot
/pls / local / lib / DOT / dot -Tps graph.dot -o graph.ps
(define (initial-packages)
(map (lambda (p)
(structure-package (cdr p)))
(append (struct-list scheme
environments
module-system
ensures-loaded
packages
packages-internal)
(desirable-structures))))
Packages in the list IGNORE are ignored .
(define (dependency-graph packages ignore filename)
(call-with-output-file filename
(lambda (out)
(display prelude out)
(newline out)
(let ((subgraphs (do-next-package packages ignore '() ignore out)))
(for-each (lambda (sub)
(note-subgraph sub out))
subgraphs)
(display "}" out)
(newline out)))))
Do the first not - yet - done package , returning the subgraphs if there are
no packages left . TO - DO , DONE , and IGNORE are lists of packages .
SUBGRAPHS is an a - list indexed by source - file - name .
(define (do-next-package to-do done subgraphs ignore out)
(let loop ((to-do to-do))
(if (null? to-do)
subgraphs
(let ((package (car to-do)))
(if (memq package done)
(loop (cdr to-do))
(do-package package (cdr to-do) (cons package done)
subgraphs ignore out))))))
(define (do-package package to-do done subgraphs ignore out)
(let* ((source-file (package-file-name package))
(opens (map structure-package
((package-opens-thunk package))))
(old-subgraph (assq source-file subgraphs))
(subgraph (or old-subgraph
(list source-file))))
(set-cdr! subgraph (cons package (cdr subgraph)))
(do-edges package opens source-file ignore out)
(do-next-package (append opens to-do)
done
(if old-subgraph
subgraphs
(cons subgraph subgraphs))
ignore
out)))
two were defined in the same file .
(define (do-edges package opens source-file ignore out)
(let loop ((opens opens) (done ignore))
(if (not (null? opens))
(loop (cdr opens)
(let ((p (car opens)))
(if (or (memq p done)
(not (string=? source-file (package-file-name p))))
done
(begin
(note-edge p package out)
(cons p done))))))))
the first of the package 's clients ) .
(define (package-name package out)
(let ((clients (population->list (package-clients package))))
(write-char #\" out)
(display (structure-name (car clients)) out)
(write-char #\" out)))
Header for DOT files
(define prelude
"digraph G {
(define (note-edge from to out)
(display " " out)
(package-name from out)
(display " -> " out)
(package-name to out)
(newline out))
(define (note-subgraph subgraph out)
(display " subgraph \"cluster_" out)
(display (car subgraph) out)
(display "\" { label=\"" out)
(display (car subgraph) out)
(for-each (lambda (p)
(package-name p out)
(display "; " out))
(cdr subgraph))
(display "}" out)
(newline out))
|
68f3822b5cbb87c4463e0c9bf44fafe09bb7a098f8f9fb7a8cf1e890cb4c17d8 | uim/uim | annotation-filter.scm | ;;; annotation-filter.scm: generic filter for uim
;;;
Copyright ( c ) 2010 - 2013 uim Project
;;;
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
1 . Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
3 . Neither the name of authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
;;; ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
;;; OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
;;; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
;;; OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
;;; SUCH DAMAGE.
;;;;
(require-extension (srfi 1 2))
(require "socket.scm")
(require "fileio.scm")
(require "process.scm")
;;
;; annotation-filter format
;;
;; query_message = "GET\t" query "\t" charset "\n"
;; result_messages = ( result_messages result_message | result_message ) ".\n"
;; result_message = <any characters> "\n"
;; quit_message = "QUIT\n"
;;
(define annotation-filter-socket-pair #f)
(define (annotation-filter-open-with-unix-domain-socket)
(and-let* ((fd (unix-domain-socket-connect annotation-filter-unix-domain-socket-path)))
(cons fd fd)))
(define (annotation-filter-open-with-tcp-socket)
(and-let* ((fd (tcp-connect annotation-filter-tcpserver-name
annotation-filter-tcpserver-port)))
(cons fd fd)))
(define (annotation-filter-open-with-pipe)
(process-io annotation-filter-command))
(define (annotation-filter-init)
(and (not (string=? "" annotation-filter-command))
(let ((fds (cond ((eq? annotation-filter-server-setting? 'unixdomain)
(annotation-filter-open-with-unix-domain-socket))
((eq? annotation-filter-server-setting? 'tcpserver)
(annotation-filter-open-with-tcp-socket))
((eq? annotation-filter-server-setting? 'pipe)
(annotation-filter-open-with-pipe))
(else
(uim-notify-fatal (N_ "Custom filter connection is not defined"))
#f))))
(if fds
(set! annotation-filter-socket-pair (cons
(open-file-port (car fds))
(open-file-port (cdr fds))))
(set! annotation-filter-socket-pair #f)))))
(define (annotation-filter-read-message iport)
(let loop ((line (file-read-line iport))
(rest ""))
(if (or (not line)
(eof-object? line)
(string=? "." line))
rest
(loop (file-read-line iport) (string-append rest line)))))
(define (annotation-filter-get-text text enc)
(or (and annotation-filter-socket-pair
(and-let* ((iport (car annotation-filter-socket-pair))
(oport (cdr annotation-filter-socket-pair)))
(file-display (format "GET\t~a\t~a\n" text enc) oport)
(annotation-filter-read-message iport)))
""))
(define (annotation-filter-release)
(and annotation-filter-socket-pair
(and-let* ((iport (car annotation-filter-socket-pair))
(oport (cdr annotation-filter-socket-pair)))
(file-display "QUIT\n" oport)
(if (not (equal? iport oport))
(close-file-port oport))
(close-file-port iport)))
#t)
| null | https://raw.githubusercontent.com/uim/uim/d1ac9d9315ff8c57c713b502544fef9b3a83b3e5/scm/annotation-filter.scm | scheme | annotation-filter.scm: generic filter for uim
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
annotation-filter format
query_message = "GET\t" query "\t" charset "\n"
result_messages = ( result_messages result_message | result_message ) ".\n"
result_message = <any characters> "\n"
quit_message = "QUIT\n"
| Copyright ( c ) 2010 - 2013 uim Project
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of authors nor the names of its contributors
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
(require-extension (srfi 1 2))
(require "socket.scm")
(require "fileio.scm")
(require "process.scm")
(define annotation-filter-socket-pair #f)
(define (annotation-filter-open-with-unix-domain-socket)
(and-let* ((fd (unix-domain-socket-connect annotation-filter-unix-domain-socket-path)))
(cons fd fd)))
(define (annotation-filter-open-with-tcp-socket)
(and-let* ((fd (tcp-connect annotation-filter-tcpserver-name
annotation-filter-tcpserver-port)))
(cons fd fd)))
(define (annotation-filter-open-with-pipe)
(process-io annotation-filter-command))
(define (annotation-filter-init)
(and (not (string=? "" annotation-filter-command))
(let ((fds (cond ((eq? annotation-filter-server-setting? 'unixdomain)
(annotation-filter-open-with-unix-domain-socket))
((eq? annotation-filter-server-setting? 'tcpserver)
(annotation-filter-open-with-tcp-socket))
((eq? annotation-filter-server-setting? 'pipe)
(annotation-filter-open-with-pipe))
(else
(uim-notify-fatal (N_ "Custom filter connection is not defined"))
#f))))
(if fds
(set! annotation-filter-socket-pair (cons
(open-file-port (car fds))
(open-file-port (cdr fds))))
(set! annotation-filter-socket-pair #f)))))
(define (annotation-filter-read-message iport)
(let loop ((line (file-read-line iport))
(rest ""))
(if (or (not line)
(eof-object? line)
(string=? "." line))
rest
(loop (file-read-line iport) (string-append rest line)))))
(define (annotation-filter-get-text text enc)
(or (and annotation-filter-socket-pair
(and-let* ((iport (car annotation-filter-socket-pair))
(oport (cdr annotation-filter-socket-pair)))
(file-display (format "GET\t~a\t~a\n" text enc) oport)
(annotation-filter-read-message iport)))
""))
(define (annotation-filter-release)
(and annotation-filter-socket-pair
(and-let* ((iport (car annotation-filter-socket-pair))
(oport (cdr annotation-filter-socket-pair)))
(file-display "QUIT\n" oport)
(if (not (equal? iport oport))
(close-file-port oport))
(close-file-port iport)))
#t)
|
efadc40d66ead249dc1e6cbf0676b29b39770721cf5a2deb7a77d6eba95a1883 | RBornat/jape | sequent.mli |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Symboltype
open Mappingfuns
open Termtype
open Idclass
type seq = Seq of (string * term * term) (* stile, left, right *)
val describeSeqs : (idclass * string * idclass) list -> unit
val getsemanticturnstile : string -> string option
val setsemanticturnstile : string -> string -> unit
val parseSeq : unit -> seq
val canstartSeq : symbol -> bool
val resetsyntaxandturnstiles : unit -> unit
val string_of_seq : seq -> string
val invisbracketedstring_of_seq : bool -> seq -> string
val debugstring_of_seq : seq -> string
val elementstring_of_seq : seq -> string
val catelim_string_of_seq : seq -> string list -> string list
val catelim_invisbracketedstring_of_seq : bool -> seq -> string list -> string list
val catelim_debugstring_of_seq : seq -> string list -> string list
val catelim_elementstring_of_seq : seq -> string list -> string list
val catelim_separatedstring_of_seq : string -> seq -> string list -> string list
val alwaysshowturnstile : bool ref
val sequent_of_string : string -> seq
val seqexplode : seq -> string * term * term
val seq_entrails: seq -> string * idclass * element list * idclass * element list
val seq_lefts: seq -> element list
val seq_rights: seq -> element list
val seqvars : (term -> 'a list) -> ('a list -> 'a list -> 'a list) -> seq -> 'a list
val seqVIDs : seq -> vid list
val eqseqs : seq * seq -> bool
val seqmatch : bool -> seq -> seq -> (term, term) mapping -> (term, term) mapping option
val seqmatchvars : bool -> (term -> bool) -> seq -> seq -> (term, term) mapping
-> (term, term) mapping option
val remapseq : (term, term) mapping -> seq -> seq
val mkSeq : string * element list * element list -> seq
val maxseqresnum : seq -> int
val getsyntacticturnstiles : unit -> string list
val pushSyntax : string -> unit
val popSyntax : unit -> unit
val popAllSyntaxes : unit -> unit
| null | https://raw.githubusercontent.com/RBornat/jape/afe9f207e89e965636b43ef8fad38fd1f69737ae/distrib/camlengine/sequent.mli | ocaml | stile, left, right |
Copyright ( C ) 2003 - 19
This file is part of the proof engine , which is part of .
is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
( or look at ) .
Copyright (C) 2003-19 Richard Bornat & Bernard Sufrin
This file is part of the jape proof engine, which is part of jape.
Jape is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Jape is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jape; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
(or look at ).
*)
open Symboltype
open Mappingfuns
open Termtype
open Idclass
val describeSeqs : (idclass * string * idclass) list -> unit
val getsemanticturnstile : string -> string option
val setsemanticturnstile : string -> string -> unit
val parseSeq : unit -> seq
val canstartSeq : symbol -> bool
val resetsyntaxandturnstiles : unit -> unit
val string_of_seq : seq -> string
val invisbracketedstring_of_seq : bool -> seq -> string
val debugstring_of_seq : seq -> string
val elementstring_of_seq : seq -> string
val catelim_string_of_seq : seq -> string list -> string list
val catelim_invisbracketedstring_of_seq : bool -> seq -> string list -> string list
val catelim_debugstring_of_seq : seq -> string list -> string list
val catelim_elementstring_of_seq : seq -> string list -> string list
val catelim_separatedstring_of_seq : string -> seq -> string list -> string list
val alwaysshowturnstile : bool ref
val sequent_of_string : string -> seq
val seqexplode : seq -> string * term * term
val seq_entrails: seq -> string * idclass * element list * idclass * element list
val seq_lefts: seq -> element list
val seq_rights: seq -> element list
val seqvars : (term -> 'a list) -> ('a list -> 'a list -> 'a list) -> seq -> 'a list
val seqVIDs : seq -> vid list
val eqseqs : seq * seq -> bool
val seqmatch : bool -> seq -> seq -> (term, term) mapping -> (term, term) mapping option
val seqmatchvars : bool -> (term -> bool) -> seq -> seq -> (term, term) mapping
-> (term, term) mapping option
val remapseq : (term, term) mapping -> seq -> seq
val mkSeq : string * element list * element list -> seq
val maxseqresnum : seq -> int
val getsyntacticturnstiles : unit -> string list
val pushSyntax : string -> unit
val popSyntax : unit -> unit
val popAllSyntaxes : unit -> unit
|
bfcde5393c88562698ab7e2b812afe1b9188e95475fdd362bc7d332896c037cc | Vaguery/klapaucius | string_test.clj | (ns push.type.base.string_test
(:use midje.sweet)
(:use [push.util.test-helpers])
(:use [push.type.item.string])
)
(fact "string-type has :name ':string'"
(:name string-type) => :string)
(fact "string-type has the correct :recognizer"
(:recognizer (:router string-type)) => (exactly string?))
(fact "string-type has the expected :attributes"
(:attributes string-type) =>
(contains #{:equatable :comparable :movable :string :visible}))
(fact "string-type knows the :equatable instructions"
(keys (:instructions string-type)) =>
(contains [:string-equal? :string-notequal?] :in-any-order :gaps-ok))
(fact "string-type knows the :visible instructions"
(keys (:instructions string-type)) =>
(contains [:string-stackdepth :string-empty?] :in-any-order :gaps-ok))
(fact "string-type knows the :movable instructions"
(keys (:instructions string-type)) =>
(contains [:string-shove :string-pop :string-dup :string-rotate :string-yank :string-yankdup :string-flush :string-swap] :in-any-order :gaps-ok))
(fact "string-type knows the :printable instructions"
(keys (:instructions string-type)) => (contains [:string-print]))
(fact "string-type knows the :returnable instructions"
(keys (:instructions string-type)) => (contains [:string-return]))
;;; utilities
(fact "I can escape a whole bunch of bad characters using `str-to-pattern`"
(re-pattern "Ǚ(ͧȈȊȣ͵·ċ(") => (throws #"Unclosed group near")
(re-pattern (str-to-pattern "Ǚ(ͧȈȊȣ͵·ċ(")) => #"Ǚ\(ͧȈȊȣ͵·ċ\("
(re-pattern "ƥ{Ƀί") => (throws #"Illegal repetition near index")
(re-pattern (str-to-pattern "ƥ{Ƀί")) => #"ƥ\{Ƀί"
(re-pattern "ʦ͌̀ĩȌϗE̜Ɓ[ÃǶϞǼ͐÷") => (throws #"Unclosed character class near")
(re-pattern (str-to-pattern "ʦ͌̀ĩȌϗE̜Ɓ[ÃǶϞǼ͐÷")) => #"ʦ͌̀ĩȌϗE̜Ɓ\[ÃǶϞǼ͐÷"
(re-pattern "+̠Sʠńə˧¶˧ſǺε") => (throws #"Dangling")
(re-pattern (str-to-pattern "+̠Sʠńə˧¶˧ſǺε")) => #"\+̠Sʠńə˧¶˧ſǺε"
(re-pattern "́\\") => (throws #"Unexpected internal error")
(re-pattern (str-to-pattern "́\\")) => #"́\\"
)
(fact "`explosive-replacement?` checks for more patterns after replacement than before"
(explosive-replacement? "abc" "abcabc" "ab") => true
(explosive-replacement? "abc" "abcabc" "x") => false
(explosive-replacement? "abc" "aabbcc" "abc") => false
(explosive-replacement? "aaa" "aaaa" "a") => true
(explosive-replacement? "aaa" "aaa" "a") => false
(explosive-replacement? "aaa" "aa" "a") => false
)
| null | https://raw.githubusercontent.com/Vaguery/klapaucius/17b55eb76feaa520a85d4df93597cccffe6bdba4/test/push/type/base/string_test.clj | clojure | utilities | (ns push.type.base.string_test
(:use midje.sweet)
(:use [push.util.test-helpers])
(:use [push.type.item.string])
)
(fact "string-type has :name ':string'"
(:name string-type) => :string)
(fact "string-type has the correct :recognizer"
(:recognizer (:router string-type)) => (exactly string?))
(fact "string-type has the expected :attributes"
(:attributes string-type) =>
(contains #{:equatable :comparable :movable :string :visible}))
(fact "string-type knows the :equatable instructions"
(keys (:instructions string-type)) =>
(contains [:string-equal? :string-notequal?] :in-any-order :gaps-ok))
(fact "string-type knows the :visible instructions"
(keys (:instructions string-type)) =>
(contains [:string-stackdepth :string-empty?] :in-any-order :gaps-ok))
(fact "string-type knows the :movable instructions"
(keys (:instructions string-type)) =>
(contains [:string-shove :string-pop :string-dup :string-rotate :string-yank :string-yankdup :string-flush :string-swap] :in-any-order :gaps-ok))
(fact "string-type knows the :printable instructions"
(keys (:instructions string-type)) => (contains [:string-print]))
(fact "string-type knows the :returnable instructions"
(keys (:instructions string-type)) => (contains [:string-return]))
(fact "I can escape a whole bunch of bad characters using `str-to-pattern`"
(re-pattern "Ǚ(ͧȈȊȣ͵·ċ(") => (throws #"Unclosed group near")
(re-pattern (str-to-pattern "Ǚ(ͧȈȊȣ͵·ċ(")) => #"Ǚ\(ͧȈȊȣ͵·ċ\("
(re-pattern "ƥ{Ƀί") => (throws #"Illegal repetition near index")
(re-pattern (str-to-pattern "ƥ{Ƀί")) => #"ƥ\{Ƀί"
(re-pattern "ʦ͌̀ĩȌϗE̜Ɓ[ÃǶϞǼ͐÷") => (throws #"Unclosed character class near")
(re-pattern (str-to-pattern "ʦ͌̀ĩȌϗE̜Ɓ[ÃǶϞǼ͐÷")) => #"ʦ͌̀ĩȌϗE̜Ɓ\[ÃǶϞǼ͐÷"
(re-pattern "+̠Sʠńə˧¶˧ſǺε") => (throws #"Dangling")
(re-pattern (str-to-pattern "+̠Sʠńə˧¶˧ſǺε")) => #"\+̠Sʠńə˧¶˧ſǺε"
(re-pattern "́\\") => (throws #"Unexpected internal error")
(re-pattern (str-to-pattern "́\\")) => #"́\\"
)
(fact "`explosive-replacement?` checks for more patterns after replacement than before"
(explosive-replacement? "abc" "abcabc" "ab") => true
(explosive-replacement? "abc" "abcabc" "x") => false
(explosive-replacement? "abc" "aabbcc" "abc") => false
(explosive-replacement? "aaa" "aaaa" "a") => true
(explosive-replacement? "aaa" "aaa" "a") => false
(explosive-replacement? "aaa" "aa" "a") => false
)
|
00ae1abd4638a7127878490e52d2253e562441c2e86d0dc5687b65b2d4031e2f | bobzhang/fan | metafn.ml |
open Astfn
%fans{keep off;
derive
(MetaObj
MetaExpr
class primitive = object
method int _loc (i:int) = %ep{$int':i}
method int32 _loc (i:int32) = %ep{$int32':i}
method int64 _loc (i:int64) = %ep{$int64':i}
method nativeint _loc (i:nativeint) = %ep{$nativeint':i}
method float _loc (i:float) = %ep{$flo':i}
method string _loc (i:string) = %ep{$str':i}
method char _loc (i:char) = %ep{$chr':i}
method unit _loc (_:unit) = (`Unit _loc : Astf.ep)
(*default use [meta_loc] for expession*)
method loc _ loc ( _ l : ) : ep= ` Lid ( _ loc , ! Locf.name )
method ant (_loc:loc) (x:ant) = (x:> Astf.ep)
method bool _loc x = (`Bool (_loc,x) : Astf.ep)
end;;
(* FIXME -- nested include the error message is confusing *)
%ocaml{ %include{ "astfn.ml" };; };;
(* local variables: *)
compile - command : " cd .. & & pmake main_annot / fanAstN.cmo "
(* end: *)
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/main/metafn.ml | ocaml | default use [meta_loc] for expession
FIXME -- nested include the error message is confusing
local variables:
end: |
open Astfn
%fans{keep off;
derive
(MetaObj
MetaExpr
class primitive = object
method int _loc (i:int) = %ep{$int':i}
method int32 _loc (i:int32) = %ep{$int32':i}
method int64 _loc (i:int64) = %ep{$int64':i}
method nativeint _loc (i:nativeint) = %ep{$nativeint':i}
method float _loc (i:float) = %ep{$flo':i}
method string _loc (i:string) = %ep{$str':i}
method char _loc (i:char) = %ep{$chr':i}
method unit _loc (_:unit) = (`Unit _loc : Astf.ep)
method loc _ loc ( _ l : ) : ep= ` Lid ( _ loc , ! Locf.name )
method ant (_loc:loc) (x:ant) = (x:> Astf.ep)
method bool _loc x = (`Bool (_loc,x) : Astf.ep)
end;;
%ocaml{ %include{ "astfn.ml" };; };;
compile - command : " cd .. & & pmake main_annot / fanAstN.cmo "
|
2a46b362e61720e2d2f6a6a8f9bbbb24ac57b2c72b874cb91d1a1b6efc690a9e | gfour/gic | add90.hs | result :: Int
result = repeat2 10000 addsx2 30;
addsx2 :: Int -> Int
addsx2 z = f sq z ;
sq :: Int -> Int
sq c = c * c ;
add :: Int -> Int -> Int
add a b = a + b ;
f :: (Int -> Int) -> Int -> Int
f s x = if (x <= 0) then s x else f (add (s x)) (x-1) ;
repeat2 :: Int -> (Int -> Int) -> Int -> Int
repeat2 n f a = repeat_aux n f a 0 ;
repeat_aux :: Int -> (Int -> Int) -> Int -> Int -> Int
repeat_aux n f a b = if n > 0 then repeat_aux (n-1) f a (b + f a - b) else b
| null | https://raw.githubusercontent.com/gfour/gic/d5f2e506b31a1a28e02ca54af9610b3d8d618e9a/Examples/Data/add90.hs | haskell | result :: Int
result = repeat2 10000 addsx2 30;
addsx2 :: Int -> Int
addsx2 z = f sq z ;
sq :: Int -> Int
sq c = c * c ;
add :: Int -> Int -> Int
add a b = a + b ;
f :: (Int -> Int) -> Int -> Int
f s x = if (x <= 0) then s x else f (add (s x)) (x-1) ;
repeat2 :: Int -> (Int -> Int) -> Int -> Int
repeat2 n f a = repeat_aux n f a 0 ;
repeat_aux :: Int -> (Int -> Int) -> Int -> Int -> Int
repeat_aux n f a b = if n > 0 then repeat_aux (n-1) f a (b + f a - b) else b
| |
a455def0f03b507bff901d855a5057c07fb1be90bf6b1c408557d7f0b274a2e7 | egison/typed-egison | MathOutput.hs | |
Module : Language . . MathOutput
Copyright : : MIT
This module provides utility functions .
Module : Language.Egison.MathOutput
Copyright : Satoshi Egi
Licence : MIT
This module provides utility functions.
-}
module Language.Egison.MathOutput (mathExprToHaskell, mathExprToAsciiMath, mathExprToLatex) where
import Control.Monad
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
mathExprToHaskell :: String -> String
mathExprToHaskell input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#haskell\"" ++ show val ++ "\""
mathExprToAsciiMath :: String -> String
mathExprToAsciiMath input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#asciimath\"" ++ showMathExprAsciiMath val ++ "\""
mathExprToLatex :: String -> String
mathExprToLatex input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#latex|" ++ showMathExprLatex val ++ "|#"
data MathExpr = Atom String
| NegativeAtom String
| Plus [MathExpr]
| Multiply [MathExpr]
| Power MathExpr MathExpr
| Func MathExpr [MathExpr]
| Tensor [MathExpr] [MathIndex]
| Tuple [MathExpr]
| Collection [MathExpr]
| Exp MathExpr
| Quote MathExpr
| Partial String [String]
deriving (Eq, Show)
data MathIndex = Super MathExpr
| Sub MathExpr
deriving (Eq, Show)
--
-- Show (AsciiMath)
--
showMathIndexAsciiMath :: MathIndex -> String
showMathIndexAsciiMath (Super a) = showMathExprAsciiMath a
showMathIndexAsciiMath (Sub a) = showMathExprAsciiMath a
showMathExprAsciiMath :: MathExpr -> String
showMathExprAsciiMath (Atom func) = func
showMathExprAsciiMath (NegativeAtom func) = "-" ++ func
showMathExprAsciiMath (Plus []) = ""
showMathExprAsciiMath (Plus (x:xs)) = showMathExprAsciiMath x ++ showMathExprAsciiMathForPlus xs
where
showMathExprAsciiMathForPlus :: [MathExpr] -> String
showMathExprAsciiMathForPlus [] = ""
showMathExprAsciiMathForPlus ((NegativeAtom a):xs) = " - " ++ a ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMathForPlus ((Multiply (NegativeAtom a:ys)):xs) = " - " ++ showMathExprAsciiMath (Multiply ((Atom a):ys)) ++ " " ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMathForPlus (x:xs) = showMathExprAsciiMath x ++ " + " ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMath (Multiply []) = ""
showMathExprAsciiMath (Multiply [a]) = showMathExprAsciiMath a
showMathExprAsciiMath (Multiply (NegativeAtom "1":lvs)) = "-" ++ showMathExprAsciiMath (Multiply lvs)
showMathExprAsciiMath (Multiply lvs) = showMathExprAsciiMath' (head lvs) ++ " " ++ showMathExprAsciiMath (Multiply (tail lvs))
showMathExprAsciiMath (Power lv1 lv2) = showMathExprAsciiMath lv1 ++ "^" ++ showMathExprAsciiMath lv2
showMathExprAsciiMath (Func f lvs) = case f of
Atom "/" -> if length lvs == 2 then "frac{" ++ showMathExprAsciiMath (head lvs) ++ "}{" ++ showMathExprAsciiMath (lvs !! 1) ++ "}"
else showMathExprAsciiMath f ++ "(" ++ showMathExprAsciiMathArg lvs ++ ")"
_ -> showMathExprAsciiMath f ++ "(" ++ showMathExprAsciiMathArg lvs ++ ")"
showMathExprAsciiMath (Tensor lvs mis)
| null mis = "(" ++ showMathExprAsciiMathArg lvs ++ ")"
| not (any isSub mis) = "(" ++ showMathExprAsciiMathArg lvs ++ ")^(" ++ showMathExprAsciiMathIndices mis ++ ")"
| not (any (not . isSub) mis) = "(" ++ showMathExprAsciiMathArg lvs ++ ")_(" ++ showMathExprAsciiMathIndices mis ++ ")"
| otherwise = "(" ++ showMathExprAsciiMathArg lvs ++ ")_(" ++ showMathExprAsciiMathIndices (filter isSub mis) ++ ")^(" ++ showMathExprAsciiMathIndices (filter (not . isSub) mis) ++ ")"
showMathExprAsciiMath (Tuple lvs) = "(" ++ showMathExprAsciiMathArg lvs ++ ")"
showMathExprAsciiMath (Collection lvs) = "{" ++ showMathExprAsciiMathArg lvs ++ "}"
showMathExprAsciiMath (Exp x) = "e^(" ++ showMathExprAsciiMath x ++ ")"
isSub :: MathIndex -> Bool
isSub x = case x of
Sub _ -> True
_ -> False
showMathExprAsciiMath' :: MathExpr -> String
showMathExprAsciiMath' (Plus lvs) = "(" ++ showMathExprAsciiMath (Plus lvs) ++ ")"
showMathExprAsciiMath' val = showMathExprAsciiMath val
showMathExprAsciiMathArg :: [MathExpr] -> String
showMathExprAsciiMathArg [] = ""
showMathExprAsciiMathArg [a] = showMathExprAsciiMath a
showMathExprAsciiMathArg lvs = showMathExprAsciiMath (head lvs) ++ ", " ++ (showMathExprAsciiMathArg (tail lvs))
showMathExprAsciiMathIndices :: [MathIndex] -> String
showMathExprAsciiMathIndices [a] = showMathIndexAsciiMath a
showMathExprAsciiMathIndices lvs = showMathIndexAsciiMath (head lvs) ++ showMathExprAsciiMathIndices (tail lvs)
--
-- Show (Latex)
--
showMathExprLatex :: MathExpr -> String
showMathExprLatex (Atom a) = a
showMathExprLatex (Partial a is) = a ++ "_{" ++ concat is ++ "}"
showMathExprLatex (NegativeAtom a) = "-" ++ a
showMathExprLatex (Plus []) = ""
showMathExprLatex (Plus (x:xs)) = showMathExprLatex x ++ showMathExprLatexForPlus xs
where
showMathExprLatexForPlus :: [MathExpr] -> String
showMathExprLatexForPlus [] = ""
showMathExprLatexForPlus ((NegativeAtom a):xs) = " - " ++ a ++ showMathExprLatexForPlus xs
showMathExprLatexForPlus ((Multiply (NegativeAtom a:ys)):xs) = " - " ++ showMathExprLatex (Multiply ((Atom a):ys)) ++ showMathExprLatexForPlus xs
showMathExprLatexForPlus (x:xs) = " + " ++ showMathExprLatex x ++ showMathExprLatexForPlus xs
showMathExprLatex (Multiply []) = ""
showMathExprLatex (Multiply [x]) = showMathExprLatex x
showMathExprLatex (Multiply (Atom "1":xs)) = showMathExprLatex (Multiply xs)
showMathExprLatex (Multiply (NegativeAtom "1":xs)) = "-" ++ showMathExprLatex (Multiply xs)
showMathExprLatex (Multiply (x:xs)) = showMathExprLatex' x ++ " " ++ showMathExprLatex (Multiply xs)
showMathExprLatex (Power lv1 lv2) = showMathExprLatex lv1 ++ "^" ++ showMathExprLatex lv2
showMathExprLatex (Func (Atom "sqrt") [x]) = "\\sqrt{" ++ showMathExprLatex x ++ "}"
showMathExprLatex (Func (Atom "rt") [x, y]) = "\\sqrt[" ++ showMathExprLatex x ++ "]{" ++ showMathExprLatex y ++ "}"
showMathExprLatex (Func (Atom "/") [x, y]) = "\\frac{" ++ showMathExprLatex x ++ "}{" ++ showMathExprLatex y ++ "}"
showMathExprLatex (Func f xs) = showMathExprLatex f ++ "(" ++ showMathExprLatexArg xs ", " ++ ")"
showMathExprLatex (Tensor xs mis) = case head xs of
Tensor _ _ -> "\\begin{pmatrix} " ++ showMathExprLatexVectors xs ++ "\\end{pmatrix}" ++ showMathExprLatexScript mis
_ -> "\\begin{pmatrix} " ++ showMathExprLatexVectors xs ++ "\\end{pmatrix}" ++ showMathExprLatexScript mis
showMathExprLatex (Tuple xs) = "(" ++ showMathExprLatexArg xs ", " ++ ")"
showMathExprLatex (Collection xs) = "\\{" ++ showMathExprLatexArg xs ", " ++ "\\}"
showMathExprLatex (Exp x) = "e^{" ++ showMathExprLatex x ++ "}"
showMathExprLatex (Quote x) = "(" ++ showMathExprLatex x ++ ")"
showMathExprLatex' :: MathExpr -> String
showMathExprLatex' (Plus xs) = "(" ++ showMathExprLatex (Plus xs) ++ ")"
showMathExprLatex' x = showMathExprLatex x
showMathExprLatexArg :: [MathExpr] -> String -> String
showMathExprLatexArg [] _ = ""
showMathExprLatexArg [a] _ = showMathExprLatex a
showMathExprLatexArg lvs s = showMathExprLatex (head lvs) ++ s ++ showMathExprLatexArg (tail lvs) s
showMathExprLatexSuper :: MathIndex -> String
showMathExprLatexSuper (Super (Atom "#")) = "\\#"
showMathExprLatexSuper (Super x) = showMathExprLatex x
showMathExprLatexSuper (Sub x) = "\\;"
showMathExprLatexSub :: MathIndex -> String
showMathExprLatexSub (Sub (Atom "#")) = "\\#"
showMathExprLatexSub (Sub x) = showMathExprLatex x
showMathExprLatexSub (Super x) = "\\;"
showMathExprLatexScript :: [MathIndex] -> String
showMathExprLatexScript [] = ""
showMathExprLatexScript is = "_{" ++ concat (map showMathExprLatexSub is) ++ "}^{" ++ concat (map showMathExprLatexSuper is) ++ "}"
showMathExprLatexVectors :: [MathExpr] -> String
showMathExprLatexVectors [] = ""
showMathExprLatexVectors (Tensor lvs []:r) = showMathExprLatexArg lvs " & " ++ " \\\\ " ++ showMathExprLatexVectors r
showMathExprLatexVectors lvs = showMathExprLatexArg lvs " \\\\ " ++ "\\\\ "
--
Parser
--
spaces :: Parser ()
spaces = skipMany1 space
spaces0 :: Parser ()
spaces0 = skipMany space
symbol :: Parser Char
symbol = oneOf "!$%&*+-/:<=>?@#"
parseAtom :: Parser MathExpr
parseAtom = do
first <- letter <|> symbol <|> digit
rest <- many (letter <|> digit <|> symbol)
let atom = first : rest
option (Atom atom) $ do is <- many1 (char '|' >> many digit)
return $ Partial atom is
parseNegativeAtom :: Parser MathExpr
parseNegativeAtom = do
char '-'
first <- letter <|> symbol <|> digit
rest <- many (letter <|> digit <|> symbol)
let atom = first : rest
return $ NegativeAtom atom
parseList :: Parser [MathExpr]
parseList = sepEndBy parseExpr spaces
parseScript :: Parser MathIndex
parseScript = (Sub <$> (char '_' >> parseExpr)) <|> (Super <$> (char '~' >> parseExpr))
parsePlus :: Parser MathExpr
parsePlus = do
string "(+"
spaces
xs <- parseList
char ')'
return $ Plus xs
parseMultiply :: Parser MathExpr
parseMultiply = do
string "(*"
spaces
xs <- parseList
char ')'
return $ Multiply xs
parseFunction :: Parser MathExpr
parseFunction = do
char '('
func <- parseAtom
spaces
xs <- parseList
char ')'
return $ Func func xs
parseTensor :: Parser MathExpr
parseTensor = do
string "[|"
spaces0
xs <- parseList
spaces0
string "|]"
ys <- many parseScript
return $ Tensor xs ys
parseTuple :: Parser MathExpr
parseTuple = do
char '['
xs <- parseList
char ']'
return $ Tuple xs
parseCollection :: Parser MathExpr
parseCollection = do
char '{'
xs <- parseList
char '}'
return $ Collection xs
parseExp :: Parser MathExpr
parseExp = do
string "(exp"
spaces
x <- parseExpr
char ')'
return $ Exp x
parseQuote :: Parser MathExpr
parseQuote = do
char '\''
x <- parseExpr'
return $ Quote x
parseExpr' :: Parser MathExpr
parseExpr' = parseNegativeAtom
<|> parseAtom
<|> parseQuote
<|> try parseExp
<|> try parsePlus
<|> try parseMultiply
<|> try parseFunction
<|> try parseTensor
<|> try parseTuple
<|> try parseCollection
parseExpr :: Parser MathExpr
parseExpr = do
x <- parseExpr'
option x $ Power x <$> try (char '^' >> parseExpr')
| null | https://raw.githubusercontent.com/egison/typed-egison/42c94a916226436b65b623fdcb8a8d3b7010c984/hs-src/Language/Egison/MathOutput.hs | haskell |
Show (AsciiMath)
Show (Latex)
| |
Module : Language . . MathOutput
Copyright : : MIT
This module provides utility functions .
Module : Language.Egison.MathOutput
Copyright : Satoshi Egi
Licence : MIT
This module provides utility functions.
-}
module Language.Egison.MathOutput (mathExprToHaskell, mathExprToAsciiMath, mathExprToLatex) where
import Control.Monad
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
mathExprToHaskell :: String -> String
mathExprToHaskell input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#haskell\"" ++ show val ++ "\""
mathExprToAsciiMath :: String -> String
mathExprToAsciiMath input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#asciimath\"" ++ showMathExprAsciiMath val ++ "\""
mathExprToLatex :: String -> String
mathExprToLatex input = case parse parseExpr "math-expr" input of
Left err -> input
Right val -> "#latex|" ++ showMathExprLatex val ++ "|#"
data MathExpr = Atom String
| NegativeAtom String
| Plus [MathExpr]
| Multiply [MathExpr]
| Power MathExpr MathExpr
| Func MathExpr [MathExpr]
| Tensor [MathExpr] [MathIndex]
| Tuple [MathExpr]
| Collection [MathExpr]
| Exp MathExpr
| Quote MathExpr
| Partial String [String]
deriving (Eq, Show)
data MathIndex = Super MathExpr
| Sub MathExpr
deriving (Eq, Show)
showMathIndexAsciiMath :: MathIndex -> String
showMathIndexAsciiMath (Super a) = showMathExprAsciiMath a
showMathIndexAsciiMath (Sub a) = showMathExprAsciiMath a
showMathExprAsciiMath :: MathExpr -> String
showMathExprAsciiMath (Atom func) = func
showMathExprAsciiMath (NegativeAtom func) = "-" ++ func
showMathExprAsciiMath (Plus []) = ""
showMathExprAsciiMath (Plus (x:xs)) = showMathExprAsciiMath x ++ showMathExprAsciiMathForPlus xs
where
showMathExprAsciiMathForPlus :: [MathExpr] -> String
showMathExprAsciiMathForPlus [] = ""
showMathExprAsciiMathForPlus ((NegativeAtom a):xs) = " - " ++ a ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMathForPlus ((Multiply (NegativeAtom a:ys)):xs) = " - " ++ showMathExprAsciiMath (Multiply ((Atom a):ys)) ++ " " ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMathForPlus (x:xs) = showMathExprAsciiMath x ++ " + " ++ showMathExprAsciiMathForPlus xs
showMathExprAsciiMath (Multiply []) = ""
showMathExprAsciiMath (Multiply [a]) = showMathExprAsciiMath a
showMathExprAsciiMath (Multiply (NegativeAtom "1":lvs)) = "-" ++ showMathExprAsciiMath (Multiply lvs)
showMathExprAsciiMath (Multiply lvs) = showMathExprAsciiMath' (head lvs) ++ " " ++ showMathExprAsciiMath (Multiply (tail lvs))
showMathExprAsciiMath (Power lv1 lv2) = showMathExprAsciiMath lv1 ++ "^" ++ showMathExprAsciiMath lv2
showMathExprAsciiMath (Func f lvs) = case f of
Atom "/" -> if length lvs == 2 then "frac{" ++ showMathExprAsciiMath (head lvs) ++ "}{" ++ showMathExprAsciiMath (lvs !! 1) ++ "}"
else showMathExprAsciiMath f ++ "(" ++ showMathExprAsciiMathArg lvs ++ ")"
_ -> showMathExprAsciiMath f ++ "(" ++ showMathExprAsciiMathArg lvs ++ ")"
showMathExprAsciiMath (Tensor lvs mis)
| null mis = "(" ++ showMathExprAsciiMathArg lvs ++ ")"
| not (any isSub mis) = "(" ++ showMathExprAsciiMathArg lvs ++ ")^(" ++ showMathExprAsciiMathIndices mis ++ ")"
| not (any (not . isSub) mis) = "(" ++ showMathExprAsciiMathArg lvs ++ ")_(" ++ showMathExprAsciiMathIndices mis ++ ")"
| otherwise = "(" ++ showMathExprAsciiMathArg lvs ++ ")_(" ++ showMathExprAsciiMathIndices (filter isSub mis) ++ ")^(" ++ showMathExprAsciiMathIndices (filter (not . isSub) mis) ++ ")"
showMathExprAsciiMath (Tuple lvs) = "(" ++ showMathExprAsciiMathArg lvs ++ ")"
showMathExprAsciiMath (Collection lvs) = "{" ++ showMathExprAsciiMathArg lvs ++ "}"
showMathExprAsciiMath (Exp x) = "e^(" ++ showMathExprAsciiMath x ++ ")"
isSub :: MathIndex -> Bool
isSub x = case x of
Sub _ -> True
_ -> False
showMathExprAsciiMath' :: MathExpr -> String
showMathExprAsciiMath' (Plus lvs) = "(" ++ showMathExprAsciiMath (Plus lvs) ++ ")"
showMathExprAsciiMath' val = showMathExprAsciiMath val
showMathExprAsciiMathArg :: [MathExpr] -> String
showMathExprAsciiMathArg [] = ""
showMathExprAsciiMathArg [a] = showMathExprAsciiMath a
showMathExprAsciiMathArg lvs = showMathExprAsciiMath (head lvs) ++ ", " ++ (showMathExprAsciiMathArg (tail lvs))
showMathExprAsciiMathIndices :: [MathIndex] -> String
showMathExprAsciiMathIndices [a] = showMathIndexAsciiMath a
showMathExprAsciiMathIndices lvs = showMathIndexAsciiMath (head lvs) ++ showMathExprAsciiMathIndices (tail lvs)
showMathExprLatex :: MathExpr -> String
showMathExprLatex (Atom a) = a
showMathExprLatex (Partial a is) = a ++ "_{" ++ concat is ++ "}"
showMathExprLatex (NegativeAtom a) = "-" ++ a
showMathExprLatex (Plus []) = ""
showMathExprLatex (Plus (x:xs)) = showMathExprLatex x ++ showMathExprLatexForPlus xs
where
showMathExprLatexForPlus :: [MathExpr] -> String
showMathExprLatexForPlus [] = ""
showMathExprLatexForPlus ((NegativeAtom a):xs) = " - " ++ a ++ showMathExprLatexForPlus xs
showMathExprLatexForPlus ((Multiply (NegativeAtom a:ys)):xs) = " - " ++ showMathExprLatex (Multiply ((Atom a):ys)) ++ showMathExprLatexForPlus xs
showMathExprLatexForPlus (x:xs) = " + " ++ showMathExprLatex x ++ showMathExprLatexForPlus xs
showMathExprLatex (Multiply []) = ""
showMathExprLatex (Multiply [x]) = showMathExprLatex x
showMathExprLatex (Multiply (Atom "1":xs)) = showMathExprLatex (Multiply xs)
showMathExprLatex (Multiply (NegativeAtom "1":xs)) = "-" ++ showMathExprLatex (Multiply xs)
showMathExprLatex (Multiply (x:xs)) = showMathExprLatex' x ++ " " ++ showMathExprLatex (Multiply xs)
showMathExprLatex (Power lv1 lv2) = showMathExprLatex lv1 ++ "^" ++ showMathExprLatex lv2
showMathExprLatex (Func (Atom "sqrt") [x]) = "\\sqrt{" ++ showMathExprLatex x ++ "}"
showMathExprLatex (Func (Atom "rt") [x, y]) = "\\sqrt[" ++ showMathExprLatex x ++ "]{" ++ showMathExprLatex y ++ "}"
showMathExprLatex (Func (Atom "/") [x, y]) = "\\frac{" ++ showMathExprLatex x ++ "}{" ++ showMathExprLatex y ++ "}"
showMathExprLatex (Func f xs) = showMathExprLatex f ++ "(" ++ showMathExprLatexArg xs ", " ++ ")"
showMathExprLatex (Tensor xs mis) = case head xs of
Tensor _ _ -> "\\begin{pmatrix} " ++ showMathExprLatexVectors xs ++ "\\end{pmatrix}" ++ showMathExprLatexScript mis
_ -> "\\begin{pmatrix} " ++ showMathExprLatexVectors xs ++ "\\end{pmatrix}" ++ showMathExprLatexScript mis
showMathExprLatex (Tuple xs) = "(" ++ showMathExprLatexArg xs ", " ++ ")"
showMathExprLatex (Collection xs) = "\\{" ++ showMathExprLatexArg xs ", " ++ "\\}"
showMathExprLatex (Exp x) = "e^{" ++ showMathExprLatex x ++ "}"
showMathExprLatex (Quote x) = "(" ++ showMathExprLatex x ++ ")"
showMathExprLatex' :: MathExpr -> String
showMathExprLatex' (Plus xs) = "(" ++ showMathExprLatex (Plus xs) ++ ")"
showMathExprLatex' x = showMathExprLatex x
showMathExprLatexArg :: [MathExpr] -> String -> String
showMathExprLatexArg [] _ = ""
showMathExprLatexArg [a] _ = showMathExprLatex a
showMathExprLatexArg lvs s = showMathExprLatex (head lvs) ++ s ++ showMathExprLatexArg (tail lvs) s
showMathExprLatexSuper :: MathIndex -> String
showMathExprLatexSuper (Super (Atom "#")) = "\\#"
showMathExprLatexSuper (Super x) = showMathExprLatex x
showMathExprLatexSuper (Sub x) = "\\;"
showMathExprLatexSub :: MathIndex -> String
showMathExprLatexSub (Sub (Atom "#")) = "\\#"
showMathExprLatexSub (Sub x) = showMathExprLatex x
showMathExprLatexSub (Super x) = "\\;"
showMathExprLatexScript :: [MathIndex] -> String
showMathExprLatexScript [] = ""
showMathExprLatexScript is = "_{" ++ concat (map showMathExprLatexSub is) ++ "}^{" ++ concat (map showMathExprLatexSuper is) ++ "}"
showMathExprLatexVectors :: [MathExpr] -> String
showMathExprLatexVectors [] = ""
showMathExprLatexVectors (Tensor lvs []:r) = showMathExprLatexArg lvs " & " ++ " \\\\ " ++ showMathExprLatexVectors r
showMathExprLatexVectors lvs = showMathExprLatexArg lvs " \\\\ " ++ "\\\\ "
Parser
spaces :: Parser ()
spaces = skipMany1 space
spaces0 :: Parser ()
spaces0 = skipMany space
symbol :: Parser Char
symbol = oneOf "!$%&*+-/:<=>?@#"
parseAtom :: Parser MathExpr
parseAtom = do
first <- letter <|> symbol <|> digit
rest <- many (letter <|> digit <|> symbol)
let atom = first : rest
option (Atom atom) $ do is <- many1 (char '|' >> many digit)
return $ Partial atom is
parseNegativeAtom :: Parser MathExpr
parseNegativeAtom = do
char '-'
first <- letter <|> symbol <|> digit
rest <- many (letter <|> digit <|> symbol)
let atom = first : rest
return $ NegativeAtom atom
parseList :: Parser [MathExpr]
parseList = sepEndBy parseExpr spaces
parseScript :: Parser MathIndex
parseScript = (Sub <$> (char '_' >> parseExpr)) <|> (Super <$> (char '~' >> parseExpr))
parsePlus :: Parser MathExpr
parsePlus = do
string "(+"
spaces
xs <- parseList
char ')'
return $ Plus xs
parseMultiply :: Parser MathExpr
parseMultiply = do
string "(*"
spaces
xs <- parseList
char ')'
return $ Multiply xs
parseFunction :: Parser MathExpr
parseFunction = do
char '('
func <- parseAtom
spaces
xs <- parseList
char ')'
return $ Func func xs
parseTensor :: Parser MathExpr
parseTensor = do
string "[|"
spaces0
xs <- parseList
spaces0
string "|]"
ys <- many parseScript
return $ Tensor xs ys
parseTuple :: Parser MathExpr
parseTuple = do
char '['
xs <- parseList
char ']'
return $ Tuple xs
parseCollection :: Parser MathExpr
parseCollection = do
char '{'
xs <- parseList
char '}'
return $ Collection xs
parseExp :: Parser MathExpr
parseExp = do
string "(exp"
spaces
x <- parseExpr
char ')'
return $ Exp x
parseQuote :: Parser MathExpr
parseQuote = do
char '\''
x <- parseExpr'
return $ Quote x
parseExpr' :: Parser MathExpr
parseExpr' = parseNegativeAtom
<|> parseAtom
<|> parseQuote
<|> try parseExp
<|> try parsePlus
<|> try parseMultiply
<|> try parseFunction
<|> try parseTensor
<|> try parseTuple
<|> try parseCollection
parseExpr :: Parser MathExpr
parseExpr = do
x <- parseExpr'
option x $ Power x <$> try (char '^' >> parseExpr')
|
d9947abb164a87e9606c08dadffedf0d366d6ef2aba2c78f797fa44d095ce89e | kappelmann/eidi2_repetitorium_tum | SparseVectorTest.ml | open SparseVector
let _ = if empty<>[] then failwith "Your empty method is not working correctly"
let v = [1;2;3;0;1;0;5;(-10)]
let v = sb_vektor v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let b = v=expected
let _ = if not b then failwith "Your sb_vektor method is not working correctly"
let t = set 3 0 v
let _ = if t<>v then failwith "Your set method is not working correctly"
let t = set 3 17 v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10));(3,17)]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = set 0 0 v
let expected = [(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = mul 0 v
let _ = if t<>[] then failwith "Your mul method is not working correctly"
let t = mul 2 v
let expected = [(0,2);(1,4);(2,6);(4,2);(6,10);(7,(-20))]
let _ = if t<>expected then failwith "Your mul method is not working correctly"
let w = sb_vektor [(-1);9;3;0;1;0]
let res = add_sb_vektor v w
let expected = [(1,11);(2,6);(4,2);(6,5);(7,(-10))]
let b = res=expected
let _ = if not b then failwith "Your add_sb_vektor method is not working correctly"
let res = mul_sb_vektor v w
let expected = 1*(-1)+2*9+3*3+1*1
let b = res=expected
let _ = if b then print_string "Everything is working. Nice job! ;) +5 points" else failwith "Your add_sb_vektor method is not working correctly"
| null | https://raw.githubusercontent.com/kappelmann/eidi2_repetitorium_tum/1d16bbc498487a85960e0d83152249eb13944611/2016/sparse_vector/exercises/SparseVectorTest.ml | ocaml | open SparseVector
let _ = if empty<>[] then failwith "Your empty method is not working correctly"
let v = [1;2;3;0;1;0;5;(-10)]
let v = sb_vektor v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let b = v=expected
let _ = if not b then failwith "Your sb_vektor method is not working correctly"
let t = set 3 0 v
let _ = if t<>v then failwith "Your set method is not working correctly"
let t = set 3 17 v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10));(3,17)]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = set 0 0 v
let expected = [(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = mul 0 v
let _ = if t<>[] then failwith "Your mul method is not working correctly"
let t = mul 2 v
let expected = [(0,2);(1,4);(2,6);(4,2);(6,10);(7,(-20))]
let _ = if t<>expected then failwith "Your mul method is not working correctly"
let w = sb_vektor [(-1);9;3;0;1;0]
let res = add_sb_vektor v w
let expected = [(1,11);(2,6);(4,2);(6,5);(7,(-10))]
let b = res=expected
let _ = if not b then failwith "Your add_sb_vektor method is not working correctly"
let res = mul_sb_vektor v w
let expected = 1*(-1)+2*9+3*3+1*1
let b = res=expected
let _ = if b then print_string "Everything is working. Nice job! ;) +5 points" else failwith "Your add_sb_vektor method is not working correctly"
| |
0164ce3125c241773fb0fc3f0e68f577522f9ca03d3773f350341a98cdf178da | rabbitmq/rabbitmq-management | rabbit_mgmt_wm_topic_permission.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_mgmt_wm_topic_permission).
-export([init/2, resource_exists/2, to_json/2,
content_types_provided/2, content_types_accepted/2,
is_authorized/2, allowed_methods/2, accept_content/2,
delete_resource/2]).
-export([variances/2]).
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
%%--------------------------------------------------------------------
init(Req, _State) ->
{cowboy_rest, rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE), #context{}}.
variances(Req, Context) ->
{[<<"accept-encoding">>, <<"origin">>], Req, Context}.
content_types_provided(ReqData, Context) ->
{[{<<"application/json">>, to_json}], ReqData, Context}.
content_types_accepted(ReqData, Context) ->
{[{'*', accept_content}], ReqData, Context}.
allowed_methods(ReqData, Context) ->
{[<<"HEAD">>, <<"GET">>, <<"PUT">>, <<"DELETE">>, <<"OPTIONS">>], ReqData, Context}.
resource_exists(ReqData, Context) ->
{case topic_perms(ReqData) of
none -> false;
not_found -> false;
_ -> true
end, ReqData, Context}.
to_json(ReqData, Context) ->
rabbit_mgmt_util:reply(topic_perms(ReqData), ReqData, Context).
accept_content(ReqData0, Context = #context{user = #user{username = Username}}) ->
case topic_perms(ReqData0) of
not_found ->
rabbit_mgmt_util:bad_request(vhost_or_user_not_found,
ReqData0, Context);
_ ->
User = rabbit_mgmt_util:id(user, ReqData0),
VHost = rabbit_mgmt_util:id(vhost, ReqData0),
rabbit_mgmt_util:with_decode(
[exchange, write, read], ReqData0, Context,
fun([Exchange, Write, Read], _, ReqData) ->
rabbit_auth_backend_internal:set_topic_permissions(
User, VHost, Exchange, Write, Read, Username),
{true, ReqData, Context}
end)
end.
delete_resource(ReqData, Context = #context{user = #user{username = Username}}) ->
User = rabbit_mgmt_util:id(user, ReqData),
VHost = rabbit_mgmt_util:id(vhost, ReqData),
case rabbit_mgmt_util:id(exchange, ReqData) of
none ->
rabbit_auth_backend_internal:clear_topic_permissions(User, VHost, Username);
Exchange ->
rabbit_auth_backend_internal:clear_topic_permissions(User, VHost, Exchange,
Username)
end,
{true, ReqData, Context}.
is_authorized(ReqData, Context) ->
rabbit_mgmt_util:is_authorized_admin(ReqData, Context).
%%--------------------------------------------------------------------
topic_perms(ReqData) ->
User = rabbit_mgmt_util:id(user, ReqData),
case rabbit_auth_backend_internal:lookup_user(User) of
{ok, _} ->
case rabbit_mgmt_util:vhost(ReqData) of
not_found ->
not_found;
VHost ->
rabbit_mgmt_util:catch_no_such_user_or_vhost(
fun() ->
Perms =
rabbit_auth_backend_internal:list_user_vhost_topic_permissions(
User, VHost),
case Perms of
[] -> none;
TopicPermissions -> [[{user, User}, {vhost, VHost} | TopicPermission]
|| TopicPermission <- TopicPermissions]
end
end,
fun() -> not_found end)
end;
{error, _} ->
not_found
end.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-management/543906f01ccd0344aff648f21bb6b5156b2a2ca2/src/rabbit_mgmt_wm_topic_permission.erl | erlang |
--------------------------------------------------------------------
-------------------------------------------------------------------- | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_mgmt_wm_topic_permission).
-export([init/2, resource_exists/2, to_json/2,
content_types_provided/2, content_types_accepted/2,
is_authorized/2, allowed_methods/2, accept_content/2,
delete_resource/2]).
-export([variances/2]).
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
-include_lib("rabbit_common/include/rabbit.hrl").
init(Req, _State) ->
{cowboy_rest, rabbit_mgmt_headers:set_common_permission_headers(Req, ?MODULE), #context{}}.
variances(Req, Context) ->
{[<<"accept-encoding">>, <<"origin">>], Req, Context}.
content_types_provided(ReqData, Context) ->
{[{<<"application/json">>, to_json}], ReqData, Context}.
content_types_accepted(ReqData, Context) ->
{[{'*', accept_content}], ReqData, Context}.
allowed_methods(ReqData, Context) ->
{[<<"HEAD">>, <<"GET">>, <<"PUT">>, <<"DELETE">>, <<"OPTIONS">>], ReqData, Context}.
resource_exists(ReqData, Context) ->
{case topic_perms(ReqData) of
none -> false;
not_found -> false;
_ -> true
end, ReqData, Context}.
to_json(ReqData, Context) ->
rabbit_mgmt_util:reply(topic_perms(ReqData), ReqData, Context).
accept_content(ReqData0, Context = #context{user = #user{username = Username}}) ->
case topic_perms(ReqData0) of
not_found ->
rabbit_mgmt_util:bad_request(vhost_or_user_not_found,
ReqData0, Context);
_ ->
User = rabbit_mgmt_util:id(user, ReqData0),
VHost = rabbit_mgmt_util:id(vhost, ReqData0),
rabbit_mgmt_util:with_decode(
[exchange, write, read], ReqData0, Context,
fun([Exchange, Write, Read], _, ReqData) ->
rabbit_auth_backend_internal:set_topic_permissions(
User, VHost, Exchange, Write, Read, Username),
{true, ReqData, Context}
end)
end.
delete_resource(ReqData, Context = #context{user = #user{username = Username}}) ->
User = rabbit_mgmt_util:id(user, ReqData),
VHost = rabbit_mgmt_util:id(vhost, ReqData),
case rabbit_mgmt_util:id(exchange, ReqData) of
none ->
rabbit_auth_backend_internal:clear_topic_permissions(User, VHost, Username);
Exchange ->
rabbit_auth_backend_internal:clear_topic_permissions(User, VHost, Exchange,
Username)
end,
{true, ReqData, Context}.
is_authorized(ReqData, Context) ->
rabbit_mgmt_util:is_authorized_admin(ReqData, Context).
topic_perms(ReqData) ->
User = rabbit_mgmt_util:id(user, ReqData),
case rabbit_auth_backend_internal:lookup_user(User) of
{ok, _} ->
case rabbit_mgmt_util:vhost(ReqData) of
not_found ->
not_found;
VHost ->
rabbit_mgmt_util:catch_no_such_user_or_vhost(
fun() ->
Perms =
rabbit_auth_backend_internal:list_user_vhost_topic_permissions(
User, VHost),
case Perms of
[] -> none;
TopicPermissions -> [[{user, User}, {vhost, VHost} | TopicPermission]
|| TopicPermission <- TopicPermissions]
end
end,
fun() -> not_found end)
end;
{error, _} ->
not_found
end.
|
ce75e23ac67b4cea35d81fc4fbfce30bad1b0d0f0f5e0364a1fa374d78b8accb | jordanthayer/ocaml-search | restart_chooser.ml |
little module for choosing actions . Extremely basic value
iteration . Useful for solving a K - arm bandit problem , used here to
select restart predicates for a beam search .
Randomly selects actions for the first k*5 actions , then selects
randomly with chance 1 / n otherwise selects greedily .
little module for choosing actions. Extremely basic value
iteration. Useful for solving a K-arm bandit problem, used here to
select restart predicates for a beam search.
Randomly selects actions for the first k*5 actions, then selects
randomly with chance 1/n otherwise selects greedily.
*)
type action_chooser =
{ num_actions : int;
mutable random_tries : int;
mutable total_calls : float;
action_counts : float array;
action_results : float array;
action_average : float array;
mutable best_action : int; }
let create k =
{num_actions = k;
random_tries = 0;
total_calls = 0.;
action_counts = Array.make k 0.0;
action_results = Array.make k 0.0;
action_average = Array.make k 0.0;
best_action = 0; }
let choose_action ac =
if(ac.random_tries < ac.num_actions * 5) then
(ac.random_tries <- ac.random_tries + 1;(**)
(Math.random_int ()) mod ac.num_actions)
else if (Math.true_with_prob (1. /. ac.total_calls)) then
(ac.random_tries <- ac.random_tries + 1;
(Math.random_int ()) mod ac.num_actions)
else
ac.best_action
let register_result ac id value =
assert (id < ac.num_actions);
assert (id >= 0);
ac.total_calls <- ac.total_calls +. 1.;
ac.action_counts.(id) <- ac.action_counts.(id) +. 1.0;
ac.action_results.(id) <- ac.action_results.(id) +. value;
ac.action_average.(id) <- ac.action_results.(id) /. ac.action_counts.(id);
ac.best_action <- Wrarray.max_index_by (fun n -> n) ac.action_results
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/search/beam/restart_chooser.ml | ocaml |
little module for choosing actions . Extremely basic value
iteration . Useful for solving a K - arm bandit problem , used here to
select restart predicates for a beam search .
Randomly selects actions for the first k*5 actions , then selects
randomly with chance 1 / n otherwise selects greedily .
little module for choosing actions. Extremely basic value
iteration. Useful for solving a K-arm bandit problem, used here to
select restart predicates for a beam search.
Randomly selects actions for the first k*5 actions, then selects
randomly with chance 1/n otherwise selects greedily.
*)
type action_chooser =
{ num_actions : int;
mutable random_tries : int;
mutable total_calls : float;
action_counts : float array;
action_results : float array;
action_average : float array;
mutable best_action : int; }
let create k =
{num_actions = k;
random_tries = 0;
total_calls = 0.;
action_counts = Array.make k 0.0;
action_results = Array.make k 0.0;
action_average = Array.make k 0.0;
best_action = 0; }
let choose_action ac =
if(ac.random_tries < ac.num_actions * 5) then
(Math.random_int ()) mod ac.num_actions)
else if (Math.true_with_prob (1. /. ac.total_calls)) then
(ac.random_tries <- ac.random_tries + 1;
(Math.random_int ()) mod ac.num_actions)
else
ac.best_action
let register_result ac id value =
assert (id < ac.num_actions);
assert (id >= 0);
ac.total_calls <- ac.total_calls +. 1.;
ac.action_counts.(id) <- ac.action_counts.(id) +. 1.0;
ac.action_results.(id) <- ac.action_results.(id) +. value;
ac.action_average.(id) <- ac.action_results.(id) /. ac.action_counts.(id);
ac.best_action <- Wrarray.max_index_by (fun n -> n) ac.action_results
| |
2dd4c8b02bc4ac043d36e090e77faddc66bdc63c2fa17ab133f83a8634eec503 | racket/libs | info.rkt | #lang setup/infotab
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
;; THIS FILE IS AUTO-GENERATED FROM racket/src/native-libs/install.rkt
(define install-platform "ppc-macosx")
(define copy-foreign-libs '("PSMTabBarControl.framework" "libatk-1.0.0.dylib"))
(define compile-omit-paths '("PSMTabBarControl.framework"))
| null | https://raw.githubusercontent.com/racket/libs/ebcea119197dc0cb86be1ccbbfbe5806f7280976/gui-ppc-macosx/racket/gui/info.rkt | racket | THIS FILE IS AUTO-GENERATED FROM racket/src/native-libs/install.rkt | #lang setup/infotab
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
(define install-platform "ppc-macosx")
(define copy-foreign-libs '("PSMTabBarControl.framework" "libatk-1.0.0.dylib"))
(define compile-omit-paths '("PSMTabBarControl.framework"))
|
069ffb1afe9843a27c06a47c25b568032211ad19339846d75554884af3e12213 | xapi-project/ocaml-qmp | common.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
type t = {
verbose: bool;
debug: bool;
socket: string;
}
let make verbose debug socket = { verbose; debug; socket }
let to_string x = Printf.sprintf "{ verbose = %b; debug = %b; socket = %s }" x.verbose x.debug x.socket
let print oc x = output_string oc (to_string x)
| null | https://raw.githubusercontent.com/xapi-project/ocaml-qmp/1036d976dc0ef8ca75507fed195a1c03e4091fe9/cli/common.ml | ocaml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
type t = {
verbose: bool;
debug: bool;
socket: string;
}
let make verbose debug socket = { verbose; debug; socket }
let to_string x = Printf.sprintf "{ verbose = %b; debug = %b; socket = %s }" x.verbose x.debug x.socket
let print oc x = output_string oc (to_string x)
| |
47dae7da76413d097c2d780fd876538b61d1fe414b8c9ac2fdb62d081df4d561 | GaloisInc/macaw | ARMSemantics.hs | {-# OPTIONS_GHC -w -ddump-splices -ddump-to-file -dth-dec-file #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - unused - matches #
module Data.Macaw.ARM.Semantics.ARMSemantics
( execInstruction
)
where
import qualified Data.ByteString as BS
import qualified Data.List as L
import Data.Macaw.ARM.ARMReg ( locToRegTH )
import Data.Macaw.ARM.Arch ( a32InstructionMatcher )
import Data.Macaw.ARM.Semantics.TH ( armAppEvaluator, armNonceAppEval, loadSemantics, armTranslateType )
import qualified Data.Macaw.CFG as MC
import Data.Macaw.SemMC.Generator ( Generator )
import Data.Macaw.SemMC.TH ( MacawTHConfig(..), genExecInstruction, MacawSemMC(..) )
import qualified Data.Macaw.Types as MT
import Data.Parameterized.Classes ( showF )
import qualified Data.Parameterized.Map as MapF
import qualified Data.Parameterized.Nonce as PN
import Data.Parameterized.Some ( Some(..), mapSome )
import Data.Proxy ( Proxy(..) )
must be present to supply definitions for genExecInstruction output
import qualified What4.Expr.Builder as WEB
import qualified Language.Haskell.TH as TH
import qualified SemMC.Architecture.AArch32 as ARMSem
import SemMC.Architecture.ARM.Opcodes ( ASLSemantics(..), allA32OpcodeInfo )
import qualified SemMC.Formula as SF
import qualified What4.Expr.Builder as WEB
import qualified What4.Interface as S
execInstruction :: MC.Value ARMSem.AArch32 ids (MT.BVType 32)
-> Instruction
-> Maybe (Generator ARMSem.AArch32 ids s ())
execInstruction =
$(do Some ng <- TH.runIO PN.newIONonceGenerator
sym <- TH.runIO (WEB.newExprBuilder WEB.FloatIEEERepr MacawSemMC ng)
sem <- TH.runIO (loadSemantics sym)
let
aconv :: (MapF.Pair (Opcode Operand) x)
-> (MapF.Pair (ARMSem.ARMOpcode ARMSem.ARMOperand) x)
aconv (MapF.Pair o b) = MapF.Pair (ARMSem.A32Opcode o) b
let notVecOpc :: forall tps . ARMSem.ARMOpcode ARMSem.ARMOperand tps -> Bool
notVecOpc opc = not ("V" `L.isPrefixOf` showF opc)
let notVecLib :: forall sym . Some (SF.FunctionFormula sym) -> Bool
notVecLib (Some lf) =
case lf of
SF.FunctionFormula { SF.ffName = nm } ->
not ("df_V" `L.isInfixOf` nm)
let thConf = MacawTHConfig { locationTranslator = locToRegTH
, nonceAppTranslator = armNonceAppEval
, appTranslator = armAppEvaluator MC.LittleEndian
, instructionMatchHook = 'a32InstructionMatcher
, archEndianness = MC.LittleEndian
, operandTypeQ = [t| Operand |]
, archTypeQ = [t| ARMSem.AArch32 |]
, genLibraryFunction = notVecLib
, genOpcodeCase = notVecOpc
, archTranslateType = armTranslateType
}
genExecInstruction (Proxy @ARMSem.AArch32)
thConf
(MapF.fromList (fmap aconv (MapF.toList (a32Semantics sem))))
allA32OpcodeInfo
(funSemantics sem)
)
| null | https://raw.githubusercontent.com/GaloisInc/macaw/d1d71fd973f802483e93dffc968dfbdde12fab59/macaw-aarch32/src/Data/Macaw/ARM/Semantics/ARMSemantics.hs | haskell | # OPTIONS_GHC -w -ddump-splices -ddump-to-file -dth-dec-file #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes # | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - unused - matches #
module Data.Macaw.ARM.Semantics.ARMSemantics
( execInstruction
)
where
import qualified Data.ByteString as BS
import qualified Data.List as L
import Data.Macaw.ARM.ARMReg ( locToRegTH )
import Data.Macaw.ARM.Arch ( a32InstructionMatcher )
import Data.Macaw.ARM.Semantics.TH ( armAppEvaluator, armNonceAppEval, loadSemantics, armTranslateType )
import qualified Data.Macaw.CFG as MC
import Data.Macaw.SemMC.Generator ( Generator )
import Data.Macaw.SemMC.TH ( MacawTHConfig(..), genExecInstruction, MacawSemMC(..) )
import qualified Data.Macaw.Types as MT
import Data.Parameterized.Classes ( showF )
import qualified Data.Parameterized.Map as MapF
import qualified Data.Parameterized.Nonce as PN
import Data.Parameterized.Some ( Some(..), mapSome )
import Data.Proxy ( Proxy(..) )
must be present to supply definitions for genExecInstruction output
import qualified What4.Expr.Builder as WEB
import qualified Language.Haskell.TH as TH
import qualified SemMC.Architecture.AArch32 as ARMSem
import SemMC.Architecture.ARM.Opcodes ( ASLSemantics(..), allA32OpcodeInfo )
import qualified SemMC.Formula as SF
import qualified What4.Expr.Builder as WEB
import qualified What4.Interface as S
execInstruction :: MC.Value ARMSem.AArch32 ids (MT.BVType 32)
-> Instruction
-> Maybe (Generator ARMSem.AArch32 ids s ())
execInstruction =
$(do Some ng <- TH.runIO PN.newIONonceGenerator
sym <- TH.runIO (WEB.newExprBuilder WEB.FloatIEEERepr MacawSemMC ng)
sem <- TH.runIO (loadSemantics sym)
let
aconv :: (MapF.Pair (Opcode Operand) x)
-> (MapF.Pair (ARMSem.ARMOpcode ARMSem.ARMOperand) x)
aconv (MapF.Pair o b) = MapF.Pair (ARMSem.A32Opcode o) b
let notVecOpc :: forall tps . ARMSem.ARMOpcode ARMSem.ARMOperand tps -> Bool
notVecOpc opc = not ("V" `L.isPrefixOf` showF opc)
let notVecLib :: forall sym . Some (SF.FunctionFormula sym) -> Bool
notVecLib (Some lf) =
case lf of
SF.FunctionFormula { SF.ffName = nm } ->
not ("df_V" `L.isInfixOf` nm)
let thConf = MacawTHConfig { locationTranslator = locToRegTH
, nonceAppTranslator = armNonceAppEval
, appTranslator = armAppEvaluator MC.LittleEndian
, instructionMatchHook = 'a32InstructionMatcher
, archEndianness = MC.LittleEndian
, operandTypeQ = [t| Operand |]
, archTypeQ = [t| ARMSem.AArch32 |]
, genLibraryFunction = notVecLib
, genOpcodeCase = notVecOpc
, archTranslateType = armTranslateType
}
genExecInstruction (Proxy @ARMSem.AArch32)
thConf
(MapF.fromList (fmap aconv (MapF.toList (a32Semantics sem))))
allA32OpcodeInfo
(funSemantics sem)
)
|
d09d3484e779ffdb4b7e50b202f187a4c5744f95daf40e461abaa6ee266c641c | hstreamdb/hstream | Error.hs | module DiffFlow.Error where
import Control.Exception
import Data.Text (Text)
data DiffFlowError
= BasicTypesError Text
| BuildGraphError Text
| RunShardError Text
| ImpossibleError
| UnknownError Text
deriving Show
instance Exception DiffFlowError
| null | https://raw.githubusercontent.com/hstreamdb/hstream/95d4c142cce13c595716ce05d1a8ed3a7e417e51/hstream-diffflow/src/DiffFlow/Error.hs | haskell | module DiffFlow.Error where
import Control.Exception
import Data.Text (Text)
data DiffFlowError
= BasicTypesError Text
| BuildGraphError Text
| RunShardError Text
| ImpossibleError
| UnknownError Text
deriving Show
instance Exception DiffFlowError
| |
0e4f3763460f92bc18f94b3b57ef37d84ce8f4dfc1b302fef9922778881f38e4 | janestreet/shexp | prim.ml | open Import
module Args = struct
module Spec = struct
module Arg = struct
type 'a t =
| A of ('a -> Sexp.t)
| L of string * ('a -> Sexp.t)
| O of string * ('a -> Sexp.t) * 'a
end
type 'a t =
| [] : 'a t
| ( :: ) : 'a Arg.t * 'b t -> ('a -> 'b) t
end
We use this rather than a generic list like structure to make [ apply ] faster . [ A0 ] is
not a constant for optimization purposes : matching on a [ t ] generates one less
conditional jump .
not a constant for optimization purposes: matching on a [t] generates one less
conditional jump. *)
type ('a, 'b) t =
| A0 : unit -> ('a, 'a) t
| A1 : 'a -> ('a -> 'b, 'b) t
| A2 : 'a * 'b -> ('a -> 'b -> 'c, 'c) t
| A3 : 'a * 'b * 'c -> ('a -> 'b -> 'c -> 'd, 'd) t
| A4 : 'a * 'b * 'c * 'd -> ('a -> 'b -> 'c -> 'd -> 'e, 'e) t
| A5 : 'a * 'b * 'c * 'd * 'e -> ('a -> 'b -> 'c -> 'd -> 'e -> 'f, 'f) t
let apply : type env a b. (env -> a) -> env -> (a, b) t -> b =
fun f env t ->
match t with
| A0 () -> f env
| A1 a -> f env a
| A2 (a, b) -> f env a b
| A3 (a, b, c) -> f env a b c
| A4 (a, b, c, d) -> f env a b c d
| A5 (a, b, c, d, e) -> f env a b c d e
;;
let sexps : type a b. a Spec.t -> (a, b) t -> Sexp.t list =
fun spec args ->
let open Spec in
let arg (arg : _ Spec.Arg.t) x acc : Sexp.t list =
match arg with
| A f -> f x :: acc
| L (name, f) -> List [ Atom name; f x ] :: acc
| O (name, f, default) ->
if x = default then acc else List [ Atom name; f x ] :: acc
in
match spec, args with
| [], A0 () -> []
| [ sa ], A1 a -> arg sa a []
| [ sa; sb ], A2 (a, b) -> arg sa a @@ arg sb b []
| [ sa; sb; sc ], A3 (a, b, c) -> arg sa a @@ arg sb b @@ arg sc c []
| [ sa; sb; sc; sd ], A4 (a, b, c, d) ->
arg sa a @@ arg sb b @@ arg sc c @@ arg sd d []
| [ sa; sb; sc; sd; se ], A5 (a, b, c, d, e) ->
arg sa a @@ arg sb b @@ arg sc c @@ arg sd d @@ arg se e []
| _ -> invalid_arg "Shexp_process.Prim.Args.sexps"
;;
end
module Result_spec = struct
type 'a t =
| Unit : unit t
| Env : Env.t t
| F : ('a -> Sexp.t) -> 'a t
let sexp : type a. a t -> a -> Sexp.t option =
fun t x ->
match t with
| Unit -> None
| Env -> None
| F f -> Some (f x)
;;
end
type ('a, 'b) t =
{ name : string
; args : 'a Args.Spec.t
; result : 'b Result_spec.t
; run : Env.t -> 'a
}
let make name args result run = { name; args; result; run }
let run t env args = Args.apply t.run env args
let sexp_of_call t args = Sexp.List (Atom t.name :: Args.sexps t.args args)
let sexp_of_result t x = Result_spec.sexp t.result x
| null | https://raw.githubusercontent.com/janestreet/shexp/635989a9065f94e309707f113d6647dc62d6932f/process-lib/src/prim.ml | ocaml | open Import
module Args = struct
module Spec = struct
module Arg = struct
type 'a t =
| A of ('a -> Sexp.t)
| L of string * ('a -> Sexp.t)
| O of string * ('a -> Sexp.t) * 'a
end
type 'a t =
| [] : 'a t
| ( :: ) : 'a Arg.t * 'b t -> ('a -> 'b) t
end
We use this rather than a generic list like structure to make [ apply ] faster . [ A0 ] is
not a constant for optimization purposes : matching on a [ t ] generates one less
conditional jump .
not a constant for optimization purposes: matching on a [t] generates one less
conditional jump. *)
type ('a, 'b) t =
| A0 : unit -> ('a, 'a) t
| A1 : 'a -> ('a -> 'b, 'b) t
| A2 : 'a * 'b -> ('a -> 'b -> 'c, 'c) t
| A3 : 'a * 'b * 'c -> ('a -> 'b -> 'c -> 'd, 'd) t
| A4 : 'a * 'b * 'c * 'd -> ('a -> 'b -> 'c -> 'd -> 'e, 'e) t
| A5 : 'a * 'b * 'c * 'd * 'e -> ('a -> 'b -> 'c -> 'd -> 'e -> 'f, 'f) t
let apply : type env a b. (env -> a) -> env -> (a, b) t -> b =
fun f env t ->
match t with
| A0 () -> f env
| A1 a -> f env a
| A2 (a, b) -> f env a b
| A3 (a, b, c) -> f env a b c
| A4 (a, b, c, d) -> f env a b c d
| A5 (a, b, c, d, e) -> f env a b c d e
;;
let sexps : type a b. a Spec.t -> (a, b) t -> Sexp.t list =
fun spec args ->
let open Spec in
let arg (arg : _ Spec.Arg.t) x acc : Sexp.t list =
match arg with
| A f -> f x :: acc
| L (name, f) -> List [ Atom name; f x ] :: acc
| O (name, f, default) ->
if x = default then acc else List [ Atom name; f x ] :: acc
in
match spec, args with
| [], A0 () -> []
| [ sa ], A1 a -> arg sa a []
| [ sa; sb ], A2 (a, b) -> arg sa a @@ arg sb b []
| [ sa; sb; sc ], A3 (a, b, c) -> arg sa a @@ arg sb b @@ arg sc c []
| [ sa; sb; sc; sd ], A4 (a, b, c, d) ->
arg sa a @@ arg sb b @@ arg sc c @@ arg sd d []
| [ sa; sb; sc; sd; se ], A5 (a, b, c, d, e) ->
arg sa a @@ arg sb b @@ arg sc c @@ arg sd d @@ arg se e []
| _ -> invalid_arg "Shexp_process.Prim.Args.sexps"
;;
end
module Result_spec = struct
type 'a t =
| Unit : unit t
| Env : Env.t t
| F : ('a -> Sexp.t) -> 'a t
let sexp : type a. a t -> a -> Sexp.t option =
fun t x ->
match t with
| Unit -> None
| Env -> None
| F f -> Some (f x)
;;
end
type ('a, 'b) t =
{ name : string
; args : 'a Args.Spec.t
; result : 'b Result_spec.t
; run : Env.t -> 'a
}
let make name args result run = { name; args; result; run }
let run t env args = Args.apply t.run env args
let sexp_of_call t args = Sexp.List (Atom t.name :: Args.sexps t.args args)
let sexp_of_result t x = Result_spec.sexp t.result x
| |
a9c5ba7f20f10d3e233523dbe91eabeb927cf391058c5594f4a3a4b29db1eff4 | rpeszek/typed-encoding | UTF8.hs | # LANGUAGE DataKinds #
--{-# LANGUAGE KindSignatures #-}
# LANGUAGE PolyKinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
--{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PartialTypeSignatures #
{ - # LANGUAGE TypeApplications # - }
| ' UTF-8 ' encoding with additional assumption of conforming to Unicode . D76 .
--
@"r - UTF-8"@ basically defines restriction on @ByteString@ that is needed for
-- conversion to @Text@ to work.
--
@since 0.1.0.0
module Data.TypedEncoding.Instances.Restriction.UTF8 (
module Data.TypedEncoding.Instances.Restriction.UTF8
-- * reexported for backward compatibility, will be removed in the future
, implVerifyR
) where
import Data.TypedEncoding.Instances.Support
import Data.Proxy
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Lazy.Encoding as TEL
import Data.Either
-- $setup
-- >>> :set -XScopedTypeVariables -XKindSignatures -XMultiParamTypeClasses -XDataKinds -XPolyKinds -XPartialTypeSignatures -XFlexibleInstances -XTypeApplications
-- >>> import Test.QuickCheck
-- >>> import Test.QuickCheck.Instances.Text()
-- >>> import Test.QuickCheck.Instances.ByteString()
> > > import Data .
-- >>> let emptyUTF8B = unsafeSetPayload () "" :: Enc '["r-UTF8"] () B.ByteString
-- >>> :{
-- instance Arbitrary (Enc '["r-UTF8"] () B.ByteString) where
-- arbitrary = fmap (fromRight emptyUTF8B)
. flip suchThat isRight
. fmap ( encodeFAll @'["r - UTF8 " ] ) @ ( ) . toEncoding ( ) ) $ arbitrary
-- :}
-----------------
-- Encodings --
-----------------
prxyUtf8 = Proxy :: Proxy "r-UTF8"
| UTF8 encodings are defined for ByteString only as that would not make much sense for Text
--
> > > _ encodings . toEncoding ( ) $ " \xc3\xb1 " : : Either EncodeEx ( Enc ' [ " r - UTF8 " ] ( ) B.ByteString )
Right ( UnsafeMkEnc Proxy ( ) " \195\177 " )
--
> > > _ encodings . toEncoding ( ) $ " \xc3\x28 " : : Either EncodeEx ( Enc ' [ " r - UTF8 " ] ( ) B.ByteString )
-- Left (EncodeEx "r-UTF8" (Cannot decode byte '\xc3': ...
--
-- Following test uses 'verEncoding' helper that checks that bytes are encoded as Right iff they are valid UTF8 bytes
--
-- >>> :{
-- quickCheck $ \(b :: B.ByteString) -> verEncoding b $ fmap (
-- fromEncoding
. decodeAll @'["r - UTF8 " ]
) . encodeFAll @'["r - UTF8 " ] )
-- . toEncoding () $ b
-- :}
+ + + OK , passed 100 tests .
instance Encode (Either EncodeEx) "r-UTF8" "r-UTF8" c B.ByteString where
encoding = encUTF8B
instance Encode (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString where
encoding = encUTF8BL :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString
-- using lazy decoding to detect errors seems to be the fastest option that is not super hard to code
encUTF8B :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c B.ByteString
encUTF8B = _implEncodingEx (implVerifyR (TEL.decodeUtf8' . BL.fromStrict))
encUTF8BL :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString
encUTF8BL = _implEncodingEx (implVerifyR TEL.decodeUtf8')
-- * Decoding
instance (Applicative f) => Decode f "r-UTF8" "r-UTF8" c str where
decoding = decAnyR
instance (RecreateErr f, Applicative f) => Validate f "r-UTF8" "r-UTF8" c B.ByteString where
validation = validR encUTF8B
instance (RecreateErr f, Applicative f) => Validate f "r-UTF8" "r-UTF8" c BL.ByteString where
validation = validR encUTF8BL
--- Utilities ---
| helper function checks that given ByteString ,
-- if is encoded as Left is must be not Utf8 decodable
is is encoded as Right is must be Utf8 encodable
verEncoding :: B.ByteString -> Either err B.ByteString -> Bool
verEncoding bs (Left _) = isLeft . TE.decodeUtf8' $ bs
verEncoding bs (Right _) = isRight . TE.decodeUtf8' $ bs
| null | https://raw.githubusercontent.com/rpeszek/typed-encoding/441f9f3bbf849f485f82eae66402ee2fd7b47a34/src/Data/TypedEncoding/Instances/Restriction/UTF8.hs | haskell | {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
conversion to @Text@ to work.
* reexported for backward compatibility, will be removed in the future
$setup
>>> :set -XScopedTypeVariables -XKindSignatures -XMultiParamTypeClasses -XDataKinds -XPolyKinds -XPartialTypeSignatures -XFlexibleInstances -XTypeApplications
>>> import Test.QuickCheck
>>> import Test.QuickCheck.Instances.Text()
>>> import Test.QuickCheck.Instances.ByteString()
>>> let emptyUTF8B = unsafeSetPayload () "" :: Enc '["r-UTF8"] () B.ByteString
>>> :{
instance Arbitrary (Enc '["r-UTF8"] () B.ByteString) where
arbitrary = fmap (fromRight emptyUTF8B)
:}
---------------
Encodings --
---------------
Left (EncodeEx "r-UTF8" (Cannot decode byte '\xc3': ...
Following test uses 'verEncoding' helper that checks that bytes are encoded as Right iff they are valid UTF8 bytes
>>> :{
quickCheck $ \(b :: B.ByteString) -> verEncoding b $ fmap (
fromEncoding
. toEncoding () $ b
:}
using lazy decoding to detect errors seems to be the fastest option that is not super hard to code
* Decoding
- Utilities ---
if is encoded as Left is must be not Utf8 decodable | # LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE PartialTypeSignatures #
{ - # LANGUAGE TypeApplications # - }
| ' UTF-8 ' encoding with additional assumption of conforming to Unicode . D76 .
@"r - UTF-8"@ basically defines restriction on @ByteString@ that is needed for
@since 0.1.0.0
module Data.TypedEncoding.Instances.Restriction.UTF8 (
module Data.TypedEncoding.Instances.Restriction.UTF8
, implVerifyR
) where
import Data.TypedEncoding.Instances.Support
import Data.Proxy
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Lazy.Encoding as TEL
import Data.Either
> > > import Data .
. flip suchThat isRight
. fmap ( encodeFAll @'["r - UTF8 " ] ) @ ( ) . toEncoding ( ) ) $ arbitrary
prxyUtf8 = Proxy :: Proxy "r-UTF8"
| UTF8 encodings are defined for ByteString only as that would not make much sense for Text
> > > _ encodings . toEncoding ( ) $ " \xc3\xb1 " : : Either EncodeEx ( Enc ' [ " r - UTF8 " ] ( ) B.ByteString )
Right ( UnsafeMkEnc Proxy ( ) " \195\177 " )
> > > _ encodings . toEncoding ( ) $ " \xc3\x28 " : : Either EncodeEx ( Enc ' [ " r - UTF8 " ] ( ) B.ByteString )
. decodeAll @'["r - UTF8 " ]
) . encodeFAll @'["r - UTF8 " ] )
+ + + OK , passed 100 tests .
instance Encode (Either EncodeEx) "r-UTF8" "r-UTF8" c B.ByteString where
encoding = encUTF8B
instance Encode (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString where
encoding = encUTF8BL :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString
encUTF8B :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c B.ByteString
encUTF8B = _implEncodingEx (implVerifyR (TEL.decodeUtf8' . BL.fromStrict))
encUTF8BL :: Encoding (Either EncodeEx) "r-UTF8" "r-UTF8" c BL.ByteString
encUTF8BL = _implEncodingEx (implVerifyR TEL.decodeUtf8')
instance (Applicative f) => Decode f "r-UTF8" "r-UTF8" c str where
decoding = decAnyR
instance (RecreateErr f, Applicative f) => Validate f "r-UTF8" "r-UTF8" c B.ByteString where
validation = validR encUTF8B
instance (RecreateErr f, Applicative f) => Validate f "r-UTF8" "r-UTF8" c BL.ByteString where
validation = validR encUTF8BL
| helper function checks that given ByteString ,
is is encoded as Right is must be Utf8 encodable
verEncoding :: B.ByteString -> Either err B.ByteString -> Bool
verEncoding bs (Left _) = isLeft . TE.decodeUtf8' $ bs
verEncoding bs (Right _) = isRight . TE.decodeUtf8' $ bs
|
196193f8dffe6b52ec869727f28f70e23a63dd22bfd3db1228afcab8d534d714 | thoughtstem/racket-blocks | main.rkt | #lang racket
(module reader syntax/module-reader
racket-bricks/racket-bricks-module
#:wrapper1 (lambda (t)
(define exp-t (t))
(define (brick-snip? b)
(and
(object? b)
(member 'code-s (field-names b))))
(define (replace-easter-eggs2 syn)
(define thing (if (syntax? syn)
(syntax->datum syn)
syn))
;is-a? doesn't work? For some reason the class is returned as #f from
; (object-info thing)
(cond [(brick-snip? thing)
(datum->syntax (list-ref exp-t 0)
(read (open-input-string (get-field code-s thing))))]
[(list? thing) (map replace-easter-eggs2 thing)]
[else syn]))
(map replace-easter-eggs2 exp-t))
(require 2htdp/image
racket/class))
| null | https://raw.githubusercontent.com/thoughtstem/racket-blocks/e3653cac8500883e91feefd6462cb413751d0c16/racket-bricks/main.rkt | racket | is-a? doesn't work? For some reason the class is returned as #f from
(object-info thing) | #lang racket
(module reader syntax/module-reader
racket-bricks/racket-bricks-module
#:wrapper1 (lambda (t)
(define exp-t (t))
(define (brick-snip? b)
(and
(object? b)
(member 'code-s (field-names b))))
(define (replace-easter-eggs2 syn)
(define thing (if (syntax? syn)
(syntax->datum syn)
syn))
(cond [(brick-snip? thing)
(datum->syntax (list-ref exp-t 0)
(read (open-input-string (get-field code-s thing))))]
[(list? thing) (map replace-easter-eggs2 thing)]
[else syn]))
(map replace-easter-eggs2 exp-t))
(require 2htdp/image
racket/class))
|
3551fc36fc28c5d89c786c9bddc765c726e16371e5d911644d6dd81ae2a6a217 | unnohideyuki/bunny | sample321.hs | main = print $ 2^3 `div` 2^2 `div` 2
| null | https://raw.githubusercontent.com/unnohideyuki/bunny/501856ff48f14b252b674585f25a2bf3801cb185/compiler/test/samples/sample321.hs | haskell | main = print $ 2^3 `div` 2^2 `div` 2
| |
957ee7be39bb4aef1e90273bc127683b15680f38f3f5fef9a8bf60501dc76fce | VisionsGlobalEmpowerment/webchange | onset_and_rime.clj | (ns webchange.templates.library.onset-and-rime
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]))
(def template-options
[{:type "note"
:text "Bring together onset and rime to form a word. Choose the words and writing used below along with an image for the word. Create as many rounds as you’d like."}
{:type "onset-rime-rounds"}])
(def m {:id 25
:name "Onset and rime"
:tags ["Guided Practice" "onset and rime" "blending"]
:description "Orally blend the onsets, rimes, and phonemes of words and orally delete the onsets of words, with the support of pictures or objects."
:options {:left-text {:label "Left cloud"
:placeholder "Left cloud"
:type "string"}
:right-text {:label "Right cloud"
:placeholder "Right cloud"
:type "string"}
:whole-text {:label "Whole word cloud"
:placeholder "Whole word cloud"
:type "string"}
:image {:label "Image at result"
:type "image"
:options {:max-width 100
:max-height 100
:min-height 50
:min-width 50}}}
:actions {:add-ball {:title "Add word",
:options {:left-text {:label "Left cloud"
:placeholder "Left cloud"
:type "string"}
:right-text {:label "Right cloud"
:placeholder "Right cloud"
:type "string"}
:whole-text {:label "Whole word cloud"
:placeholder "Whole word cloud"
:type "string"}
:image {:label "Image at result"
:type "image"
:options {:max-width 100
:max-height 100
:min-height 50
:min-width 50}}}}
:template-options {:title "Template Options"
:options template-options}}})
(def glow-color 0x2a76ff)
(def glow-strength 2)
(def cloud-states {:highlighted-0 {:glow-pulsation {:duration 200
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-1 {:glow-pulsation {:duration 160
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-2 {:glow-pulsation {:duration 130
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-3 {:glow-pulsation {:duration 100
:min-value 0
:max-value glow-strength
:color glow-color}}
:not-highlighted {:glow-pulsation false}})
(def t {:assets [{:url "/raw/img/onset-and-rime/background.png", :size 10 :type "image"}
{:url "/raw/img/onset-and-rime/cloud.png", :size 1, :type "image"}],
:objects {:background {:type "background", :src "/raw/img/onset-and-rime/background.png"},
:senoravaca {:type "animation"
:x 1100
:y 970
:name "senoravaca"
:anim "idle"
:speed 0.3
:skin "vaca"
:scale-x 0.75 :scale-y 0.75
:editable? {:select true :drag true :show-in-tree? true}
:start true
:scene-name "senoravaca"}
:cloud-left-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:x 0
:y 0
:states cloud-states}
:cloud-left-text--1 {:type "text"
:text ""
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-left--1 {:type "group"
:x 25
:y 176
:transition "cloud-left--1"
:children ["cloud-left-img--1"
"cloud-left-text--1"]},
:cloud-right-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition "cloud-right-img"
:states cloud-states
:x 0
:y 0}
:cloud-right-text--1 {:type "text"
:text ""
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-right--1 {:type "group"
:x 1214
:y 176
:transition "cloud-right--1"
:children ["cloud-right-img--1"
"cloud-right-text--1"]}
:moving-clouds--1 {:type "group"
:visible true
:transition "moving-clouds--1"
:children ["cloud-right--1"
"cloud-left--1"]
:states {:hide {:visible false} :show {:visible true}}}
:cloud-center-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition "cloud-center-img"
:x 0
:y 0}
:cloud-center-text-img--1 {:type "image"
:src ""
:transition "cloud-center-text-img"
:x 400
:y 120}
:cloud-center-text--1 {:type "text"
:text ""
:x 250
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-center--1 {:type "group"
:x 600
:y 176
:transition "cloud-center--1"
:visible false
:opacity 0
:children ["cloud-center-img--1"
"cloud-center-text--1"
"cloud-center-text-img--1"]
:states {:hide {:visible false} :show {:visible true}}}}
:scene-objects [["background"] ["senoravaca"] ["cloud-center--1" "moving-clouds--1"]],
:actions {:sync-highlights {:type "parallel"
:data [{:type "state"
:from-var [{:var-name "next-cloud-img" :action-property "target"}
{:template "highlighted-%" :var-name "step-counter" :action-property "id"}]}
{:type "state" :id "not-highlighted"
:from-var [{:var-name "prev-cloud-img" :action-property "target"}]}
{:type "state" :id "disable"
:from-var [{:var-name "prev-cloud" :action-property "target"}]}
{:type "state" :id "enable"
:from-var [{:var-name "next-cloud" :action-property "target"}]}]}
:init-scene-cloud-vars {:type "sequence-data"
:data [{:type "set-variable" :var-name "cloud-left-x" :var-value 25}
{:type "set-variable" :var-name "cloud-right-x" :var-value 1214}
{:type "set-variable" :var-name "next-cloud"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}]}
:update-left {:type "sequence-data"
:data [{:type "calc" :var-name "cloud-left-x" :operation "plus" :value-1 140
:from-var [{:var-name "cloud-left-x" :action-property "value-2"}]}
{:type "set-variable" :var-name "next-cloud" :var-value "cloud-right"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud" :var-value "cloud-left"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "action" :id "sync-highlights"}]}
:update-right {:type "sequence-data"
:data [{:type "calc" :var-name "cloud-right-x" :operation "plus" :value-1 -140
:from-var [{:var-name "cloud-right-x" :action-property "value-2"}]}
{:type "set-variable" :var-name "next-cloud"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "action" :id "sync-highlights"}]}
:animate-clouds {:type "sequence-data"
:data [{:type "transition"
:to {:duration 0.5}
:from-var [{:var-name "cloud-left-x" :action-property "to.x"}
{:template "cloud-left-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "transition"
:transition-id "cloud-right"
:to {:duration 0.5}
:from-var [{:var-name "cloud-right-x" :action-property "to.x"}
{:template "cloud-right-%" :var-name "unique-suffix" :action-property "transition-id"}]}]}
:shake-step-left {:type "sequence-data"
:data [{:type "transition",
:from-var [{:var-name "cloud-left-x"
:action-property "to.x"
:offset 20}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}
{:type "transition",
:from-var [{:var-name "cloud-left-x"
:action-property "to.x"
:offset 0}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}]}
:shake-left {:type "sequence-data"
:data [{:type "action" :id "shake-step-left"}
{:type "action" :id "shake-step-left"}
{:type "action" :id "shake-step-left"}]}
:cloud-left-click-check {:type "test-var-scalar",
:success "cloud-left-clicked",
:var-name "left-click-unlocked"
:value true}
:cloud-left-clicked {:type "test-var-scalar",
:success "cloud-left-clicked-correct",
:fail "shake-left",
:var-name "next-cloud"
:from-params [{:param-property "target", :action-property "value"}]}
:cloud-left-clicked-correct {:type "sequence-data"
:data [{:type "action" :id "update-left"}
{:type "action"
:from-var [{:template "cloud-left-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "action" :id "animate-clouds"}]}
:shake-step-right {:type "sequence-data"
:data [{:type "transition",
:from-var [{:var-name "cloud-right-x"
:action-property "to.x"
:offset 20}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}
{:type "transition",
:from-var [{:var-name "cloud-right-x"
:action-property "to.x"
:offset 0}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}]}
:shake-right {:type "sequence-data"
:data [{:type "action" :id "shake-step-right"}
{:type "action" :id "shake-step-right"}
{:type "action" :id "shake-step-right"}]}
:cloud-right-clicked {:type "test-var-scalar",
:success "cloud-right-clicked-correct",
:fail "shake-right",
:var-name "next-cloud"
:from-params [{:param-property "target", :action-property "value"}]}
:cloud-right-clicked-correct {:type "parallel"
:data [{:type "action"
:from-var [{:template "cloud-right-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "set-variable" :var-name "left-click-unlocked" :var-value false}
{:type "sequence-data"
:data [{:type "action" :id "update-right"}
{:type "counter" :counter-action "increase" :counter-id "step-counter"}
{:type "action" :id "animate-clouds"}]}
{:type "sequence-data"
:data [{:type "test-var-inequality"
:var-name "step-counter",
:value 3,
:inequality ">=",
:success "finish-step",}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}]}]}
:finish-step {:type "sequence-data"
:unique-tag "finish-step"
:data [{:type "action" :id "animate-finish-step"}
{:type "action"
:from-var [{:template "correct-answer-dialog-%"
:var-name "unique-suffix"
:action-property "id"}]}
{:type "action" :id "next-step"}]}
:animate-finish-step {:type "sequence-data"
:data [{:type "transition" :to {:opacity 0 :duration 0.1}
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "state" :id "hide"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}
{:type "state" :id "show"
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "target"}]}
{:type "transition" :to {:opacity 1 :duration 1}
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "action"
:from-var [{:template "cloud-center-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "empty" :duration 2000}
{:type "transition" :to {:opacity 0 :duration 0.01}
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "state" :id "hide"
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "target"}]}]}
:next-step {:type "sequence-data"
:data [{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "step-counter"}
{:type "counter" :counter-action "increase" :counter-id "goal-counter"}
{:type "action" :id "check-scene-finished"}]}
:check-scene-finished {:type "test-var-inequality"
:var-name "goal-counter",
:value 0,
:inequality ">=",
:success "finish-scene",
:fail "init-next",}
:init-next {:type "sequence-data"
:data [{:type "counter" :counter-action "increase" :counter-id "unique-suffix"}
{:type "action" :id "init-scene-cloud-vars"}
{:type "action" :id "sync-highlights"}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}
{:type "state" :id "show"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}]}
:introduce-task {:type "parallel"
:data [{:type "action" :id "intro-dialog"}
{:type "sequence-data"
:data [{:type "set-variable" :var-name "unique-suffix" :var-value -1}
{:type "action" :id "init-scene-cloud-vars"}
{:type "set-variable" :var-name "step-counter" :var-value 0}
{:type "action" :id "sync-highlights"}
{:type "empty" :duration 500}
{:type "set-variable" :var-name "step-counter" :var-value 0}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 410}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 320}
{:type "set-variable" :var-name "step-counter" :var-value 1}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 230}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 140}
{:type "set-variable" :var-name "step-counter" :var-value 2}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 50}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 10}
{:type "action" :id "animate-finish-step"}]}]}
:cloud-center-dialog--1 {:type "empty" :duration 0}
:init-scene {:type "sequence-data"
:data [{:type "start-activity"},
{:type "action" :id "introduce-task"}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}
{:type "set-variable" :var-name "unique-suffix" :var-value 0}
{:type "action" :id "init-scene-cloud-vars"}
{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "goal-counter"}
{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "step-counter"}
{:type "action" :id "sync-highlights"}
{:type "state" :id "show"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}]}
:intro-dialog {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text "New action", :audio nil}]}],
:phrase "intro",
:phrase-description "Activity Introduction"}
:finish-scene {:type "sequence-data",
:data [{:type "action" :id "finish-dialog"}
{:type "finish-activity"}]}
:finish-dialog {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text "New action", :audio nil}]}],
:phrase "Task Completion",
:phrase-description "Task Completion"}}
:triggers {:start {:on "start" :action "init-scene"}}
:metadata {:autostart true}})
(defn add-cloud
[suffix args]
[{(common/make-name-unique-by-suffix "cloud-left-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:left-text args), :audio nil}]}],
:phrase "cloud-left-dialog",
:phrase-description "Cloud left dialog"}
(common/make-name-unique-by-suffix "cloud-right-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:right-text args), :audio nil}]}],
:phrase "cloud-right-dialog",
:phrase-description "Cloud right dialog"}
(common/make-name-unique-by-suffix "cloud-center-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:whole-text args), :audio nil}]}],
:phrase "cloud-center-dialog",
:phrase-description "Cloud center dialog"}
(common/make-name-unique-by-suffix "correct-answer-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence",
:phrase-text "New action",
:audio nil}]}],
:phrase "correct-answer",
:phrase-description "Correct answer"}}
{(common/make-name-unique-by-suffix "cloud-left-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:x 0
:states cloud-states,
:y 0}
(common/make-name-unique-by-suffix "cloud-left-text" suffix) {:type "text"
:text (:left-text args)
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-left" suffix) {:type "group"
:x 25
:y 176
:transition (common/make-name-unique-by-suffix "cloud-left" suffix)
:children [(common/make-name-unique-by-suffix "cloud-left-img" suffix)
(common/make-name-unique-by-suffix "cloud-left-text" suffix)]
:states {:disable {:interactive false}
:enable {:interactive true}}
:actions {:click {:type "action"
:id "cloud-left-click-check"
:on "click"
:params {:target (common/make-name-unique-by-suffix "cloud-left" suffix)
:target-img (common/make-name-unique-by-suffix "cloud-left-img" suffix)}}}},
(common/make-name-unique-by-suffix "cloud-right-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition (common/make-name-unique-by-suffix "cloud-right-img" suffix)
:states cloud-states
:x 0
:y 0}
(common/make-name-unique-by-suffix "cloud-right-text" suffix) {:type "text"
:text (:right-text args)
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-right" suffix) {:type "group"
:x 1214
:y 176
:transition (common/make-name-unique-by-suffix "cloud-right" suffix)
:children [(common/make-name-unique-by-suffix "cloud-right-img" suffix)
(common/make-name-unique-by-suffix "cloud-right-text" suffix)]
:states {:disable {:interactive false}
:enable {:interactive true}}
:actions {:click {:type "action"
:id "cloud-right-clicked"
:on "click"
:params {:target (common/make-name-unique-by-suffix "cloud-right" suffix)
:target-img (common/make-name-unique-by-suffix "cloud-right-img" suffix)}}}}
(common/make-name-unique-by-suffix "moving-clouds" suffix) {:type "group"
:visible false
:transition (common/make-name-unique-by-suffix "moving-clouds" suffix)
:children [(common/make-name-unique-by-suffix "cloud-right" suffix)
(common/make-name-unique-by-suffix "cloud-left" suffix)]
:states {:hide {:visible false} :show {:visible true}}}
(common/make-name-unique-by-suffix "cloud-center-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition (common/make-name-unique-by-suffix "cloud-center-img" suffix)
:x 0
:y 0}
(common/make-name-unique-by-suffix "cloud-center-text-img" suffix) {:type "image"
:src (get-in args [:image :src])
:transition (common/make-name-unique-by-suffix "cloud-center-text-img" suffix)
:x 400
:y 120}
(common/make-name-unique-by-suffix "cloud-center-text" suffix) {:type "text"
:text (:whole-text args)
:x 250
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-center" suffix) {:type "group"
:x 600
:y 176
:transition (common/make-name-unique-by-suffix "cloud-center" suffix)
:visible false
:opacity 0
:children [(common/make-name-unique-by-suffix "cloud-center-img" suffix)
(common/make-name-unique-by-suffix "cloud-center-text" suffix)
(common/make-name-unique-by-suffix "cloud-center-text-img" suffix)]
:states {:hide {:visible false} :show {:visible true}}}}
[(common/make-name-unique-by-suffix "cloud-center" suffix) (common/make-name-unique-by-suffix "moving-clouds" suffix)]])
(defn- set-data
[activity-data args]
(-> activity-data
(assoc-in [:objects :cloud-left-text--1 :text] (:left-text args))
(assoc-in [:objects :cloud-right-text--1 :text] (:right-text args))
(assoc-in [:objects :cloud-center-text--1 :text] (:whole-text args))
(assoc-in [:objects :cloud-center-text-img--1 :src] (get-in args [:image :src]))))
(defn create-activity
[args]
(-> (common/init-metadata m t args)
(set-data args)
(assoc-in [:metadata :saved-props :template-options] (select-keys args [:left-text :right-text :whole-text :image]))))
(defn- add-word
[scene args]
(let [suffix (common/get-unique-suffix scene)
[actions objects scene-objects] (add-cloud suffix args)]
(-> scene
(update-in [:objects] merge objects)
(update-in [:actions] merge actions)
(update-in [:actions :check-scene-finished :value] inc)
(common/add-scene-object scene-objects)
(common/add-track-actions (vec (map name (keys actions))) "dialog" (str "Word " (inc (common/get-unique-suffix scene))))
(common/update-unique-suffix))))
(defn- add-word-action
[scene args]
(let [suffix (common/get-unique-suffix scene)]
(-> scene
(add-word args)
(update-in [:metadata :saved-props :template-options :rounds] concat [(assoc args
:id suffix)]))))
(defn- delete-round
[activity-data round-id]
(let [[actions objects scene-objects] (add-cloud round-id {})
action-names (keys actions)
object-names (keys objects)]
(-> activity-data
(common/remove-objects object-names)
(common/remove-actions action-names))))
(defn- delete-rounds
[activity-data rounds]
(reduce delete-round activity-data rounds))
(defn- edit-round
[activity-data {:keys [id left-text right-text whole-text image] :as round}]
(let [left-text-name (common/make-name-unique-by-suffix "cloud-left-text" id)
right-text-name (common/make-name-unique-by-suffix "cloud-right-text" id)
center-img-name (common/make-name-unique-by-suffix "cloud-center-text-img" id)
center-text-name (common/make-name-unique-by-suffix "cloud-center-text" id)]
(-> activity-data
(assoc-in [:objects left-text-name :text] left-text)
(assoc-in [:objects right-text-name :text] right-text)
(assoc-in [:objects center-text-name :text] whole-text)
(assoc-in [:objects center-img-name :src] (:src image)))))
(defn- edit-rounds
[activity-data rounds]
(reduce edit-round activity-data rounds))
(defn- add-rounds
[activity-data rounds]
(reduce add-word activity-data rounds))
(defn- process-rounds
[activity-data {:keys [rounds delete-last-round] :as args}]
(let [prev-rounds-number (-> activity-data
(get-in [:metadata :saved-props :template-options :rounds])
(count))
new-rounds-number (count rounds)
rounds-to-delete (concat (if delete-last-round
[prev-rounds-number]
[])
(range new-rounds-number prev-rounds-number))
rounds-to-edit (->> rounds
(take prev-rounds-number)
(drop-last (if delete-last-round 1 0)))
rounds-to-add (->> rounds
(drop (count rounds-to-edit)))]
(-> activity-data
(delete-rounds rounds-to-delete)
(edit-rounds rounds-to-edit)
(add-rounds rounds-to-add)
(assoc-in [:actions :check-scene-finished :value] new-rounds-number)
(assoc-in [:metadata :unique-suffix] new-rounds-number)
(assoc-in [:metadata :saved-props :template-options :rounds] rounds))))
(defn- template-options
[activity-data args]
(-> activity-data
(set-data args)
(process-rounds args)))
(defn- update-activity
[old-data {:keys [action-name] :as args}]
(case (keyword action-name)
:add-ball (add-word-action old-data args)
:template-options (template-options old-data args)))
(core/register-template
m create-activity update-activity)
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/118ba5ee407ba1261bac40a6ba5729ccda6e8150/src/clj/webchange/templates/library/onset_and_rime.clj | clojure | (ns webchange.templates.library.onset-and-rime
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]))
(def template-options
[{:type "note"
:text "Bring together onset and rime to form a word. Choose the words and writing used below along with an image for the word. Create as many rounds as you’d like."}
{:type "onset-rime-rounds"}])
(def m {:id 25
:name "Onset and rime"
:tags ["Guided Practice" "onset and rime" "blending"]
:description "Orally blend the onsets, rimes, and phonemes of words and orally delete the onsets of words, with the support of pictures or objects."
:options {:left-text {:label "Left cloud"
:placeholder "Left cloud"
:type "string"}
:right-text {:label "Right cloud"
:placeholder "Right cloud"
:type "string"}
:whole-text {:label "Whole word cloud"
:placeholder "Whole word cloud"
:type "string"}
:image {:label "Image at result"
:type "image"
:options {:max-width 100
:max-height 100
:min-height 50
:min-width 50}}}
:actions {:add-ball {:title "Add word",
:options {:left-text {:label "Left cloud"
:placeholder "Left cloud"
:type "string"}
:right-text {:label "Right cloud"
:placeholder "Right cloud"
:type "string"}
:whole-text {:label "Whole word cloud"
:placeholder "Whole word cloud"
:type "string"}
:image {:label "Image at result"
:type "image"
:options {:max-width 100
:max-height 100
:min-height 50
:min-width 50}}}}
:template-options {:title "Template Options"
:options template-options}}})
(def glow-color 0x2a76ff)
(def glow-strength 2)
(def cloud-states {:highlighted-0 {:glow-pulsation {:duration 200
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-1 {:glow-pulsation {:duration 160
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-2 {:glow-pulsation {:duration 130
:min-value 0
:max-value glow-strength
:color glow-color}}
:highlighted-3 {:glow-pulsation {:duration 100
:min-value 0
:max-value glow-strength
:color glow-color}}
:not-highlighted {:glow-pulsation false}})
(def t {:assets [{:url "/raw/img/onset-and-rime/background.png", :size 10 :type "image"}
{:url "/raw/img/onset-and-rime/cloud.png", :size 1, :type "image"}],
:objects {:background {:type "background", :src "/raw/img/onset-and-rime/background.png"},
:senoravaca {:type "animation"
:x 1100
:y 970
:name "senoravaca"
:anim "idle"
:speed 0.3
:skin "vaca"
:scale-x 0.75 :scale-y 0.75
:editable? {:select true :drag true :show-in-tree? true}
:start true
:scene-name "senoravaca"}
:cloud-left-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:x 0
:y 0
:states cloud-states}
:cloud-left-text--1 {:type "text"
:text ""
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-left--1 {:type "group"
:x 25
:y 176
:transition "cloud-left--1"
:children ["cloud-left-img--1"
"cloud-left-text--1"]},
:cloud-right-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition "cloud-right-img"
:states cloud-states
:x 0
:y 0}
:cloud-right-text--1 {:type "text"
:text ""
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-right--1 {:type "group"
:x 1214
:y 176
:transition "cloud-right--1"
:children ["cloud-right-img--1"
"cloud-right-text--1"]}
:moving-clouds--1 {:type "group"
:visible true
:transition "moving-clouds--1"
:children ["cloud-right--1"
"cloud-left--1"]
:states {:hide {:visible false} :show {:visible true}}}
:cloud-center-img--1 {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition "cloud-center-img"
:x 0
:y 0}
:cloud-center-text-img--1 {:type "image"
:src ""
:transition "cloud-center-text-img"
:x 400
:y 120}
:cloud-center-text--1 {:type "text"
:text ""
:x 250
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
:cloud-center--1 {:type "group"
:x 600
:y 176
:transition "cloud-center--1"
:visible false
:opacity 0
:children ["cloud-center-img--1"
"cloud-center-text--1"
"cloud-center-text-img--1"]
:states {:hide {:visible false} :show {:visible true}}}}
:scene-objects [["background"] ["senoravaca"] ["cloud-center--1" "moving-clouds--1"]],
:actions {:sync-highlights {:type "parallel"
:data [{:type "state"
:from-var [{:var-name "next-cloud-img" :action-property "target"}
{:template "highlighted-%" :var-name "step-counter" :action-property "id"}]}
{:type "state" :id "not-highlighted"
:from-var [{:var-name "prev-cloud-img" :action-property "target"}]}
{:type "state" :id "disable"
:from-var [{:var-name "prev-cloud" :action-property "target"}]}
{:type "state" :id "enable"
:from-var [{:var-name "next-cloud" :action-property "target"}]}]}
:init-scene-cloud-vars {:type "sequence-data"
:data [{:type "set-variable" :var-name "cloud-left-x" :var-value 25}
{:type "set-variable" :var-name "cloud-right-x" :var-value 1214}
{:type "set-variable" :var-name "next-cloud"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}]}
:update-left {:type "sequence-data"
:data [{:type "calc" :var-name "cloud-left-x" :operation "plus" :value-1 140
:from-var [{:var-name "cloud-left-x" :action-property "value-2"}]}
{:type "set-variable" :var-name "next-cloud" :var-value "cloud-right"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud" :var-value "cloud-left"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "action" :id "sync-highlights"}]}
:update-right {:type "sequence-data"
:data [{:type "calc" :var-name "cloud-right-x" :operation "plus" :value-1 -140
:from-var [{:var-name "cloud-right-x" :action-property "value-2"}]}
{:type "set-variable" :var-name "next-cloud"
:from-var [{:template "cloud-left-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud"
:from-var [{:template "cloud-right-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "prev-cloud-img"
:from-var [{:template "cloud-right-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "set-variable" :var-name "next-cloud-img"
:from-var [{:template "cloud-left-img-%" :var-name "unique-suffix" :action-property "var-value"}]}
{:type "action" :id "sync-highlights"}]}
:animate-clouds {:type "sequence-data"
:data [{:type "transition"
:to {:duration 0.5}
:from-var [{:var-name "cloud-left-x" :action-property "to.x"}
{:template "cloud-left-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "transition"
:transition-id "cloud-right"
:to {:duration 0.5}
:from-var [{:var-name "cloud-right-x" :action-property "to.x"}
{:template "cloud-right-%" :var-name "unique-suffix" :action-property "transition-id"}]}]}
:shake-step-left {:type "sequence-data"
:data [{:type "transition",
:from-var [{:var-name "cloud-left-x"
:action-property "to.x"
:offset 20}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}
{:type "transition",
:from-var [{:var-name "cloud-left-x"
:action-property "to.x"
:offset 0}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}]}
:shake-left {:type "sequence-data"
:data [{:type "action" :id "shake-step-left"}
{:type "action" :id "shake-step-left"}
{:type "action" :id "shake-step-left"}]}
:cloud-left-click-check {:type "test-var-scalar",
:success "cloud-left-clicked",
:var-name "left-click-unlocked"
:value true}
:cloud-left-clicked {:type "test-var-scalar",
:success "cloud-left-clicked-correct",
:fail "shake-left",
:var-name "next-cloud"
:from-params [{:param-property "target", :action-property "value"}]}
:cloud-left-clicked-correct {:type "sequence-data"
:data [{:type "action" :id "update-left"}
{:type "action"
:from-var [{:template "cloud-left-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "action" :id "animate-clouds"}]}
:shake-step-right {:type "sequence-data"
:data [{:type "transition",
:from-var [{:var-name "cloud-right-x"
:action-property "to.x"
:offset 20}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}
{:type "transition",
:from-var [{:var-name "cloud-right-x"
:action-property "to.x"
:offset 0}]
:to {:duration 0.1},
:from-params [{:param-property "target", :action-property "transition-id"}]}]}
:shake-right {:type "sequence-data"
:data [{:type "action" :id "shake-step-right"}
{:type "action" :id "shake-step-right"}
{:type "action" :id "shake-step-right"}]}
:cloud-right-clicked {:type "test-var-scalar",
:success "cloud-right-clicked-correct",
:fail "shake-right",
:var-name "next-cloud"
:from-params [{:param-property "target", :action-property "value"}]}
:cloud-right-clicked-correct {:type "parallel"
:data [{:type "action"
:from-var [{:template "cloud-right-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "set-variable" :var-name "left-click-unlocked" :var-value false}
{:type "sequence-data"
:data [{:type "action" :id "update-right"}
{:type "counter" :counter-action "increase" :counter-id "step-counter"}
{:type "action" :id "animate-clouds"}]}
{:type "sequence-data"
:data [{:type "test-var-inequality"
:var-name "step-counter",
:value 3,
:inequality ">=",
:success "finish-step",}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}]}]}
:finish-step {:type "sequence-data"
:unique-tag "finish-step"
:data [{:type "action" :id "animate-finish-step"}
{:type "action"
:from-var [{:template "correct-answer-dialog-%"
:var-name "unique-suffix"
:action-property "id"}]}
{:type "action" :id "next-step"}]}
:animate-finish-step {:type "sequence-data"
:data [{:type "transition" :to {:opacity 0 :duration 0.1}
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "state" :id "hide"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}
{:type "state" :id "show"
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "target"}]}
{:type "transition" :to {:opacity 1 :duration 1}
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "action"
:from-var [{:template "cloud-center-dialog-%" :var-name "unique-suffix" :action-property "id"}]}
{:type "empty" :duration 2000}
{:type "transition" :to {:opacity 0 :duration 0.01}
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "transition-id"}]}
{:type "state" :id "hide"
:from-var [{:template "cloud-center-%" :var-name "unique-suffix" :action-property "target"}]}]}
:next-step {:type "sequence-data"
:data [{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "step-counter"}
{:type "counter" :counter-action "increase" :counter-id "goal-counter"}
{:type "action" :id "check-scene-finished"}]}
:check-scene-finished {:type "test-var-inequality"
:var-name "goal-counter",
:value 0,
:inequality ">=",
:success "finish-scene",
:fail "init-next",}
:init-next {:type "sequence-data"
:data [{:type "counter" :counter-action "increase" :counter-id "unique-suffix"}
{:type "action" :id "init-scene-cloud-vars"}
{:type "action" :id "sync-highlights"}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}
{:type "state" :id "show"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}]}
:introduce-task {:type "parallel"
:data [{:type "action" :id "intro-dialog"}
{:type "sequence-data"
:data [{:type "set-variable" :var-name "unique-suffix" :var-value -1}
{:type "action" :id "init-scene-cloud-vars"}
{:type "set-variable" :var-name "step-counter" :var-value 0}
{:type "action" :id "sync-highlights"}
{:type "empty" :duration 500}
{:type "set-variable" :var-name "step-counter" :var-value 0}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 410}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 320}
{:type "set-variable" :var-name "step-counter" :var-value 1}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 230}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 140}
{:type "set-variable" :var-name "step-counter" :var-value 2}
{:type "action" :id "update-left"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 50}
{:type "action" :id "update-right"}
{:type "action" :id "animate-clouds"}
{:type "empty" :duration 10}
{:type "action" :id "animate-finish-step"}]}]}
:cloud-center-dialog--1 {:type "empty" :duration 0}
:init-scene {:type "sequence-data"
:data [{:type "start-activity"},
{:type "action" :id "introduce-task"}
{:type "set-variable" :var-name "left-click-unlocked" :var-value true}
{:type "set-variable" :var-name "unique-suffix" :var-value 0}
{:type "action" :id "init-scene-cloud-vars"}
{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "goal-counter"}
{:type "counter" :counter-action "reset" :counter-value 0 :counter-id "step-counter"}
{:type "action" :id "sync-highlights"}
{:type "state" :id "show"
:from-var [{:template "moving-clouds-%" :var-name "unique-suffix" :action-property "target"}]}]}
:intro-dialog {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text "New action", :audio nil}]}],
:phrase "intro",
:phrase-description "Activity Introduction"}
:finish-scene {:type "sequence-data",
:data [{:type "action" :id "finish-dialog"}
{:type "finish-activity"}]}
:finish-dialog {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text "New action", :audio nil}]}],
:phrase "Task Completion",
:phrase-description "Task Completion"}}
:triggers {:start {:on "start" :action "init-scene"}}
:metadata {:autostart true}})
(defn add-cloud
[suffix args]
[{(common/make-name-unique-by-suffix "cloud-left-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:left-text args), :audio nil}]}],
:phrase "cloud-left-dialog",
:phrase-description "Cloud left dialog"}
(common/make-name-unique-by-suffix "cloud-right-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:right-text args), :audio nil}]}],
:phrase "cloud-right-dialog",
:phrase-description "Cloud right dialog"}
(common/make-name-unique-by-suffix "cloud-center-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence", :phrase-text (:whole-text args), :audio nil}]}],
:phrase "cloud-center-dialog",
:phrase-description "Cloud center dialog"}
(common/make-name-unique-by-suffix "correct-answer-dialog" suffix) {:type "sequence-data",
:editor-type "dialog",
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence",
:phrase-text "New action",
:audio nil}]}],
:phrase "correct-answer",
:phrase-description "Correct answer"}}
{(common/make-name-unique-by-suffix "cloud-left-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:x 0
:states cloud-states,
:y 0}
(common/make-name-unique-by-suffix "cloud-left-text" suffix) {:type "text"
:text (:left-text args)
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-left" suffix) {:type "group"
:x 25
:y 176
:transition (common/make-name-unique-by-suffix "cloud-left" suffix)
:children [(common/make-name-unique-by-suffix "cloud-left-img" suffix)
(common/make-name-unique-by-suffix "cloud-left-text" suffix)]
:states {:disable {:interactive false}
:enable {:interactive true}}
:actions {:click {:type "action"
:id "cloud-left-click-check"
:on "click"
:params {:target (common/make-name-unique-by-suffix "cloud-left" suffix)
:target-img (common/make-name-unique-by-suffix "cloud-left-img" suffix)}}}},
(common/make-name-unique-by-suffix "cloud-right-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition (common/make-name-unique-by-suffix "cloud-right-img" suffix)
:states cloud-states
:x 0
:y 0}
(common/make-name-unique-by-suffix "cloud-right-text" suffix) {:type "text"
:text (:right-text args)
:x 320
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-right" suffix) {:type "group"
:x 1214
:y 176
:transition (common/make-name-unique-by-suffix "cloud-right" suffix)
:children [(common/make-name-unique-by-suffix "cloud-right-img" suffix)
(common/make-name-unique-by-suffix "cloud-right-text" suffix)]
:states {:disable {:interactive false}
:enable {:interactive true}}
:actions {:click {:type "action"
:id "cloud-right-clicked"
:on "click"
:params {:target (common/make-name-unique-by-suffix "cloud-right" suffix)
:target-img (common/make-name-unique-by-suffix "cloud-right-img" suffix)}}}}
(common/make-name-unique-by-suffix "moving-clouds" suffix) {:type "group"
:visible false
:transition (common/make-name-unique-by-suffix "moving-clouds" suffix)
:children [(common/make-name-unique-by-suffix "cloud-right" suffix)
(common/make-name-unique-by-suffix "cloud-left" suffix)]
:states {:hide {:visible false} :show {:visible true}}}
(common/make-name-unique-by-suffix "cloud-center-img" suffix) {:type "image"
:src "/raw/img/onset-and-rime/cloud.png"
:transition (common/make-name-unique-by-suffix "cloud-center-img" suffix)
:x 0
:y 0}
(common/make-name-unique-by-suffix "cloud-center-text-img" suffix) {:type "image"
:src (get-in args [:image :src])
:transition (common/make-name-unique-by-suffix "cloud-center-text-img" suffix)
:x 400
:y 120}
(common/make-name-unique-by-suffix "cloud-center-text" suffix) {:type "text"
:text (:whole-text args)
:x 250
:y 220
:align "center"
:vertical-align "bottom"
:font-family "Lexend Deca"
:font-size 110
:fill "black"}
(common/make-name-unique-by-suffix "cloud-center" suffix) {:type "group"
:x 600
:y 176
:transition (common/make-name-unique-by-suffix "cloud-center" suffix)
:visible false
:opacity 0
:children [(common/make-name-unique-by-suffix "cloud-center-img" suffix)
(common/make-name-unique-by-suffix "cloud-center-text" suffix)
(common/make-name-unique-by-suffix "cloud-center-text-img" suffix)]
:states {:hide {:visible false} :show {:visible true}}}}
[(common/make-name-unique-by-suffix "cloud-center" suffix) (common/make-name-unique-by-suffix "moving-clouds" suffix)]])
(defn- set-data
[activity-data args]
(-> activity-data
(assoc-in [:objects :cloud-left-text--1 :text] (:left-text args))
(assoc-in [:objects :cloud-right-text--1 :text] (:right-text args))
(assoc-in [:objects :cloud-center-text--1 :text] (:whole-text args))
(assoc-in [:objects :cloud-center-text-img--1 :src] (get-in args [:image :src]))))
(defn create-activity
[args]
(-> (common/init-metadata m t args)
(set-data args)
(assoc-in [:metadata :saved-props :template-options] (select-keys args [:left-text :right-text :whole-text :image]))))
(defn- add-word
[scene args]
(let [suffix (common/get-unique-suffix scene)
[actions objects scene-objects] (add-cloud suffix args)]
(-> scene
(update-in [:objects] merge objects)
(update-in [:actions] merge actions)
(update-in [:actions :check-scene-finished :value] inc)
(common/add-scene-object scene-objects)
(common/add-track-actions (vec (map name (keys actions))) "dialog" (str "Word " (inc (common/get-unique-suffix scene))))
(common/update-unique-suffix))))
(defn- add-word-action
[scene args]
(let [suffix (common/get-unique-suffix scene)]
(-> scene
(add-word args)
(update-in [:metadata :saved-props :template-options :rounds] concat [(assoc args
:id suffix)]))))
(defn- delete-round
[activity-data round-id]
(let [[actions objects scene-objects] (add-cloud round-id {})
action-names (keys actions)
object-names (keys objects)]
(-> activity-data
(common/remove-objects object-names)
(common/remove-actions action-names))))
(defn- delete-rounds
[activity-data rounds]
(reduce delete-round activity-data rounds))
(defn- edit-round
[activity-data {:keys [id left-text right-text whole-text image] :as round}]
(let [left-text-name (common/make-name-unique-by-suffix "cloud-left-text" id)
right-text-name (common/make-name-unique-by-suffix "cloud-right-text" id)
center-img-name (common/make-name-unique-by-suffix "cloud-center-text-img" id)
center-text-name (common/make-name-unique-by-suffix "cloud-center-text" id)]
(-> activity-data
(assoc-in [:objects left-text-name :text] left-text)
(assoc-in [:objects right-text-name :text] right-text)
(assoc-in [:objects center-text-name :text] whole-text)
(assoc-in [:objects center-img-name :src] (:src image)))))
(defn- edit-rounds
[activity-data rounds]
(reduce edit-round activity-data rounds))
(defn- add-rounds
[activity-data rounds]
(reduce add-word activity-data rounds))
(defn- process-rounds
[activity-data {:keys [rounds delete-last-round] :as args}]
(let [prev-rounds-number (-> activity-data
(get-in [:metadata :saved-props :template-options :rounds])
(count))
new-rounds-number (count rounds)
rounds-to-delete (concat (if delete-last-round
[prev-rounds-number]
[])
(range new-rounds-number prev-rounds-number))
rounds-to-edit (->> rounds
(take prev-rounds-number)
(drop-last (if delete-last-round 1 0)))
rounds-to-add (->> rounds
(drop (count rounds-to-edit)))]
(-> activity-data
(delete-rounds rounds-to-delete)
(edit-rounds rounds-to-edit)
(add-rounds rounds-to-add)
(assoc-in [:actions :check-scene-finished :value] new-rounds-number)
(assoc-in [:metadata :unique-suffix] new-rounds-number)
(assoc-in [:metadata :saved-props :template-options :rounds] rounds))))
(defn- template-options
[activity-data args]
(-> activity-data
(set-data args)
(process-rounds args)))
(defn- update-activity
[old-data {:keys [action-name] :as args}]
(case (keyword action-name)
:add-ball (add-word-action old-data args)
:template-options (template-options old-data args)))
(core/register-template
m create-activity update-activity)
| |
67741cea72d92a8541391531b2b0edb854df019092e483fcdabef72ad090b5e0 | hemmi/coq2scala | btermdn.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Term
open Pattern
open Names
(** Discrimination nets with bounded depth. *)
module Make :
functor (Z : Map.OrderedType) ->
sig
type t
val create : unit -> t
val add : transparent_state option -> t -> (constr_pattern * Z.t) -> t
val rmv : transparent_state option -> t -> (constr_pattern * Z.t) -> t
val lookup : transparent_state option -> t -> constr -> (constr_pattern * Z.t) list
val app : ((constr_pattern * Z.t) -> unit) -> t -> unit
end
val dnet_depth : int ref
| null | https://raw.githubusercontent.com/hemmi/coq2scala/d10f441c18146933a99bf2088116bd213ac3648d/coq-8.4pl2-old/tactics/btermdn.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Discrimination nets with bounded depth. | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Term
open Pattern
open Names
module Make :
functor (Z : Map.OrderedType) ->
sig
type t
val create : unit -> t
val add : transparent_state option -> t -> (constr_pattern * Z.t) -> t
val rmv : transparent_state option -> t -> (constr_pattern * Z.t) -> t
val lookup : transparent_state option -> t -> constr -> (constr_pattern * Z.t) list
val app : ((constr_pattern * Z.t) -> unit) -> t -> unit
end
val dnet_depth : int ref
|
094b87e36c90b8927122e82480c11ab9e593ba3e3fd504bfa273b1b4416a94b8 | maximedenes/native-coq | refine.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
JCF -- 6 janvier 1998 EXPERIMENTAL
* , en quelque sorte , d'avoir de " vraies " métavariables
* dans Coq , c'est - à - dire
* où les trous sont typés -- et que les sous - buts correspondants
* soient engendrés pour finir la preuve .
*
* :
* J'ai le but
* forall ( x : ) , { y : | ( minus y x ) = x }
* et
* fun ( x : ) = > exist [ y : nat]((minus y x)=x ) ( plus x x ) ?
* ce qui engendre le but
* ( minus ( plus x x ) x ) = x
* L'idée est, en quelque sorte, d'avoir de "vraies" métavariables
* dans Coq, c'est-à-dire de donner des preuves incomplètes -- mais
* où les trous sont typés -- et que les sous-buts correspondants
* soient engendrés pour finir la preuve.
*
* Exemple :
* J'ai le but
* forall (x:nat), { y:nat | (minus y x) = x }
* et je donne la preuve incomplète
* fun (x:nat) => exist nat [y:nat]((minus y x)=x) (plus x x) ?
* ce qui engendre le but
* (minus (plus x x) x) = x
*)
, on procède de la manière suivante :
*
* 1 . terme contenant des variables
* existentielles Evar i.e. " _ " en syntaxe concrète .
* La résolution de ces variables n'est plus nécessairement totale
* ( ise_resolve called with fail_evar = false ) et les variables
* existentielles restantes sont remplacées par des méta - variables
* types ( celui est connu : , soit trouvé
* pendant la phase de résolution ) .
*
* 2 . On met ensuite le terme " à plat " i.e. on n'autorise des MV qu'au
* , si nécessaire , on donne
* à son tour pour .
* : le terme ( f a _ ( fun ( x : ) = > e _ ) ) donne
* ( f a ? 1 ? 2 ) avec :
* - ? 2 : = fun ( x : ) = > ? 3
* - ? 3 : = e ? 4
* ? 1 et ? 4 donneront des buts
*
* 3 . On écrit ensuite une tactique tcc qui engendre les sous - buts
* à partir d'une preuve incomplète .
*
* 1. Un terme de preuve incomplet est un terme contenant des variables
* existentielles Evar i.e. "_" en syntaxe concrète.
* La résolution de ces variables n'est plus nécessairement totale
* (ise_resolve called with fail_evar=false) et les variables
* existentielles restantes sont remplacées par des méta-variables
* castées par leur types (celui est connu : soit donné, soit trouvé
* pendant la phase de résolution).
*
* 2. On met ensuite le terme "à plat" i.e. on n'autorise des MV qu'au
* permier niveau et pour chacune d'elles, si nécessaire, on donne
* à son tour un terme de preuve incomplet pour la résoudre.
* Exemple: le terme (f a _ (fun (x:nat) => e _)) donne
* (f a ?1 ?2) avec:
* - ?2 := fun (x:nat) => ?3
* - ?3 := e ?4
* ?1 et ?4 donneront des buts
*
* 3. On écrit ensuite une tactique tcc qui engendre les sous-buts
* à partir d'une preuve incomplète.
*)
open Pp
open Errors
open Util
open Names
open Term
open Termops
open Namegen
open Tacmach
open Sign
open Environ
open Reduction
open Typing
open Tactics
open Tacticals
open Printer
type term_with_holes = TH of constr * meta_type_map * sg_proofs
and sg_proofs = (term_with_holes option) list
(* pour debugger *)
let rec pp_th (TH(c,mm,sg)) =
(str"TH=[ " ++ hov 0 (pr_lconstr c ++ fnl () ++
(* pp_mm mm ++ fnl () ++ *)
pp_sg sg) ++ str "]")
and pp_mm l =
hov 0 (prlist_with_sep (fun _ -> (fnl ()))
(fun (n,c) -> (int n ++ str" --> " ++ pr_lconstr c)) l)
and pp_sg sg =
hov 0 (prlist_with_sep (fun _ -> (fnl ()))
(function None -> (str"None") | Some th -> (pp_th th)) sg)
compute_metamap : constr - > ' a evar_map - > term_with_holes
* réalise le 2 .
*
* Pour cela , on renvoie une meta_map qui indique pour chaque meta - variable
* si elle correspond à un but ( None ) ou si elle réduite à son tour
* par ( Some c ) .
*
* On a donc l'INVARIANT suivant : le terme c rendu est " de niveau 1 "
* -- i.e. à plat -- et la meta_map contient autant d'éléments qu'il y
* a de meta - variables dans c. On suppose de plus que l'ordre dans la
* meta_map correspond à celui des buts qui seront engendrés par le refine .
* réalise le 2. ci-dessus
*
* Pour cela, on renvoie une meta_map qui indique pour chaque meta-variable
* si elle correspond à un but (None) ou si elle réduite à son tour
* par un terme de preuve incomplet (Some c).
*
* On a donc l'INVARIANT suivant : le terme c rendu est "de niveau 1"
* -- i.e. à plat -- et la meta_map contient autant d'éléments qu'il y
* a de meta-variables dans c. On suppose de plus que l'ordre dans la
* meta_map correspond à celui des buts qui seront engendrés par le refine.
*)
let replace_by_meta env sigma = function
| TH (m, mm, sgp) when isMeta (strip_outer_cast m) -> m,mm,sgp
| (TH (c,mm,_)) as th ->
let n = Evarutil.new_meta() in
let m = mkMeta n in
quand on introduit une mv on son type
let ty = match kind_of_term c with
| Lambda (Name id,c1,c2) when isCast c2 ->
let _,_,t = destCast c2 in mkNamedProd id c1 t
| Lambda (Anonymous,c1,c2) when isCast c2 ->
let _,_,t = destCast c2 in mkArrow c1 t
| _ -> (* (App _ | Case _) -> *)
let sigma' =
List.fold_right (fun (m,t) sigma -> Evd.meta_declare m t sigma)
mm sigma in
Retyping.get_type_of env sigma' c
| Fix ( ( _ , j),(v , _ , _ ) ) - >
) ( * en pleine confiance !
| Fix ((_,j),(v,_,_)) ->
v.(j) (* en pleine confiance ! *)
| _ -> invalid_arg "Tcc.replace_by_meta (TO DO)"
*)
in
mkCast (m,DEFAULTcast, ty),[n,ty],[Some th]
exception NoMeta
let replace_in_array keep_length env sigma a =
if array_for_all (function (TH (_,_,[])) -> true | _ -> false) a then
raise NoMeta;
let a' = Array.map (function
| (TH (c,mm,[])) when not keep_length -> c,mm,[]
| th -> replace_by_meta env sigma th) a
in
let v' = Array.map pi1 a' in
let mm = Array.fold_left (@) [] (Array.map pi2 a') in
let sgp = Array.fold_left (@) [] (Array.map pi3 a') in
v',mm,sgp
let fresh env n =
let id = match n with Name x -> x | _ -> id_of_string "_H" in
next_ident_away_in_goal id (ids_of_named_context (named_context env))
let rec compute_metamap env sigma c = match kind_of_term c with
(* le terme est directement une preuve *)
| (Const _ | Evar _ | Ind _ | Construct _ |
Sort _ | Var _ | Rel _ | NativeInt _) ->
TH (c,[],[])
le terme est une mv = > un but
| Meta n ->
TH (c,[],[None])
| Cast (m,_, ty) when isMeta m ->
TH (c,[destMeta m,ty],[None])
abstraction = > n'est pas pur
* attention : dans ce cas il faut remplacer ( Rel 1 ) par ( )
* où x est une variable FRAICHE
* attention : dans ce cas il faut remplacer (Rel 1) par (Var x)
* où x est une variable FRAICHE *)
| Lambda (name,c1,c2) ->
let v = fresh env name in
let env' = push_named (v,None,c1) env in
begin match compute_metamap env' sigma (subst1 (mkVar v) c2) with
(* terme de preuve complet *)
| TH (_,_,[]) -> TH (c,[],[])
(* terme de preuve incomplet *)
| th ->
let m,mm,sgp = replace_by_meta env' sigma th in
TH (mkLambda (Name v,c1,m), mm, sgp)
end
| LetIn (name, c1, t1, c2) ->
let v = fresh env name in
let th1 = compute_metamap env sigma c1 in
let env' = push_named (v,Some c1,t1) env in
let th2 = compute_metamap env' sigma (subst1 (mkVar v) c2) in
begin match th1,th2 with
(* terme de preuve complet *)
| TH (_,_,[]), TH (_,_,[]) -> TH (c,[],[])
(* terme de preuve incomplet *)
| TH (c1,mm1,sgp1), TH (c2,mm2,sgp2) ->
let m1,mm1,sgp1 =
if sgp1=[] then (c1,mm1,[])
else replace_by_meta env sigma th1 in
let m2,mm2,sgp2 =
if sgp2=[] then (c2,mm2,[])
else replace_by_meta env' sigma th2 in
TH (mkNamedLetIn v m1 t1 m2, mm1@mm2, sgp1@sgp2)
end
4 . Application
| App (f,v) ->
let a = Array.map (compute_metamap env sigma) (Array.append [|f|] v) in
begin
try
let v',mm,sgp = replace_in_array false env sigma a in
let v'' = Array.sub v' 1 (Array.length v) in
TH (mkApp(v'.(0), v''),mm,sgp)
with NoMeta ->
TH (c,[],[])
end
| Case (ci,p,cc,v) ->
(* bof... *)
let nbr = Array.length v in
let v = Array.append [|p;cc|] v in
let a = Array.map (compute_metamap env sigma) v in
begin
try
let v',mm,sgp = replace_in_array false env sigma a in
let v'' = Array.sub v' 2 nbr in
TH (mkCase (ci,v'.(0),v'.(1),v''),mm,sgp)
with NoMeta ->
TH (c,[],[])
end
5 . Fix .
| Fix ((ni,i),(fi,ai,v)) ->
(* TODO: use a fold *)
let vi = Array.map (fresh env) fi in
let fi' = Array.map (fun id -> Name id) vi in
let env' = push_named_rec_types (fi',ai,v) env in
let a = Array.map
(compute_metamap env' sigma)
(Array.map (substl (List.map mkVar (Array.to_list vi))) v)
in
begin
try
let v',mm,sgp = replace_in_array true env' sigma a in
let fix = mkFix ((ni,i),(fi',ai,v')) in
TH (fix,mm,sgp)
with NoMeta ->
TH (c,[],[])
end
Cast . Est - ce bien exact ?
| Cast (c,_,t) -> compute_metamap env sigma c
let TH ( c',mm , sgp ) = compute_metamap sign c in
TH ( ( c',t),mm , sgp )
TH (mkCast (c',t),mm,sgp) *)
Produit . Est - ce bien exact ?
| Prod (_,_,_) ->
if occur_meta c then
error "refine: proof term contains metas in a product."
else
TH (c,[],[])
(* Cofix. *)
| CoFix (i,(fi,ai,v)) ->
let vi = Array.map (fresh env) fi in
let fi' = Array.map (fun id -> Name id) vi in
let env' = push_named_rec_types (fi',ai,v) env in
let a = Array.map
(compute_metamap env' sigma)
(Array.map (substl (List.map mkVar (Array.to_list vi))) v)
in
begin
try
let v',mm,sgp = replace_in_array true env' sigma a in
let cofix = mkCoFix (i,(fi',ai,v')) in
TH (cofix,mm,sgp)
with NoMeta ->
TH (c,[],[])
end
| NativeArr (t,p) ->
(* Est Correct ? *)
(* Do not use the same hack than for app p can be to large *)
let ta = compute_metamap env sigma t in
let a = Array.map (compute_metamap env sigma) p in
begin
try
let p', mm, sgp = replace_in_array false env sigma a in
let t', mmt,sgpt =
match ta with
| TH(c,mm,[]) -> c,mm,[]
| _ -> replace_by_meta env sigma ta in
TH(mkArray(t',p'), mmt@mm, sgpt@sgp)
with NoMeta ->
match ta with
| TH(_,_,[]) -> TH(c,[],[])
| _ ->
let t',mmt,sgpt = replace_by_meta env sigma ta in
TH (mkArray(t',p),mmt,sgpt)
end
tcc_aux : term_with_holes - > tactic
*
* Réalise le 3 .
*
* Réalise le 3. ci-dessus
*)
let ensure_products n =
let p = ref 0 in
let rec aux n gl =
if n = 0 then tclFAIL 0 (mt()) gl
else
tclTHEN
(tclORELSE intro (fun gl -> incr p; introf gl))
(aux (n-1)) gl in
tclORELSE
(aux n)
(* Now we know how many red are needed *)
(fun gl -> tclDO !p red_in_concl gl)
let rec tcc_aux subst (TH (c,mm,sgp) as _th) gl =
let c = substl subst c in
match (kind_of_term c,sgp) with
mv = > sous - but : on
| Meta _ , _ ->
tclIDTAC gl
| Cast (c,_,_), _ when isMeta c ->
tclIDTAC gl
(* terme pur => refine *)
| _,[] ->
refine c gl
(* abstraction => intro *)
| Lambda (Name id,_,m), _ ->
assert (isMeta (strip_outer_cast m));
begin match sgp with
| [None] -> intro_mustbe_force id gl
| [Some th] ->
tclTHEN (introduction id)
(onLastHypId (fun id -> tcc_aux (mkVar id::subst) th)) gl
| _ -> assert false
end
| Lambda (Anonymous,_,m), _ -> (* if anon vars are allowed in evars *)
assert (isMeta (strip_outer_cast m));
begin match sgp with
| [None] -> tclTHEN intro (onLastHypId (fun id -> clear [id])) gl
| [Some th] ->
tclTHEN
intro
(onLastHypId (fun id ->
tclTHEN
(clear [id])
(tcc_aux (mkVar (*dummy*) id::subst) th))) gl
| _ -> assert false
end
(* let in without holes in the body => possibly dependent intro *)
| LetIn (Name id,c1,t1,c2), _ when not (isMeta (strip_outer_cast c1)) ->
let c = pf_concl gl in
let newc = mkNamedLetIn id c1 t1 c in
tclTHEN
(change_in_concl None newc)
(match sgp with
| [None] -> introduction id
| [Some th] ->
tclTHEN (introduction id)
(onLastHypId (fun id -> tcc_aux (mkVar id::subst) th))
| _ -> assert false)
gl
(* let in with holes in the body => unable to handle dependency
because of evars limitation, use non dependent assert instead *)
| LetIn (Name id,c1,t1,c2), _ ->
tclTHENS
(assert_tac (Name id) t1)
[(match List.hd sgp with
| None -> tclIDTAC
| Some th -> onLastHypId (fun id -> tcc_aux (mkVar id::subst) th));
(match List.tl sgp with
| [] -> refine (subst1 (mkVar id) c2) (* a complete proof *)
| [None] -> tclIDTAC (* a meta *)
| [Some th] -> (* a partial proof *)
onLastHypId (fun id -> tcc_aux (mkVar id::subst) th)
| _ -> assert false)]
gl
(* fix => tactique Fix *)
| Fix ((ni,j),(fi,ai,_)) , _ ->
let out_name = function
| Name id -> id
| _ -> error "Recursive functions must have names."
in
let fixes = array_map3 (fun f n c -> (out_name f,succ n,c)) fi ni ai in
let firsts,lasts = list_chop j (Array.to_list fixes) in
tclTHENS
(tclTHEN
(ensure_products (succ ni.(j)))
(mutual_fix (out_name fi.(j)) (succ ni.(j)) ( lasts) j))
(List.map (function
| None -> tclIDTAC
| Some th -> tcc_aux subst th) sgp)
gl
cofix = > tactique CoFix
| CoFix (j,(fi,ai,_)) , _ ->
let out_name = function
| Name id -> id
| _ -> error "Recursive functions must have names."
in
let cofixes = array_map2 (fun f c -> (out_name f,c)) fi ai in
let firsts,lasts = list_chop j (Array.to_list cofixes) in
tclTHENS
(mutual_cofix (out_name fi.(j)) ( lasts) j)
(List.map (function
| None -> tclIDTAC
| Some th -> tcc_aux subst th) sgp)
gl
sinon on fait refine appels rec . sur les sous - buts .
* c'est le cas pour App et MutCase .
* c'est le cas pour App et MutCase. *)
| _ ->
tclTHENS
(refine c)
(List.map
(function None -> tclIDTAC | Some th -> tcc_aux subst th) sgp)
gl
Et finalement la tactique refine elle - même :
let refine (evd,c) gl =
let sigma = project gl in
let evd = Typeclasses.resolve_typeclasses ~with_goals:false (pf_env gl) evd in
let c = Evarutil.nf_evar evd c in
let (evd,c) = Evarutil.evars_to_metas sigma (evd,c) in
Relies on Cast 's put on Meta 's by evars_to_metas , because it is otherwise
complicated to update meta types when passing through a binder
complicated to update meta types when passing through a binder *)
let th = compute_metamap (pf_env gl) evd c in
tclTHEN (Refiner.tclEVARS evd) (tcc_aux [] th) gl
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/tactics/refine.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
pour debugger
pp_mm mm ++ fnl () ++
(App _ | Case _) ->
en pleine confiance !
le terme est directement une preuve
terme de preuve complet
terme de preuve incomplet
terme de preuve complet
terme de preuve incomplet
bof...
TODO: use a fold
Cofix.
Est Correct ?
Do not use the same hack than for app p can be to large
Now we know how many red are needed
terme pur => refine
abstraction => intro
if anon vars are allowed in evars
dummy
let in without holes in the body => possibly dependent intro
let in with holes in the body => unable to handle dependency
because of evars limitation, use non dependent assert instead
a complete proof
a meta
a partial proof
fix => tactique Fix | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
JCF -- 6 janvier 1998 EXPERIMENTAL
* , en quelque sorte , d'avoir de " vraies " métavariables
* dans Coq , c'est - à - dire
* où les trous sont typés -- et que les sous - buts correspondants
* soient engendrés pour finir la preuve .
*
* :
* J'ai le but
* forall ( x : ) , { y : | ( minus y x ) = x }
* et
* fun ( x : ) = > exist [ y : nat]((minus y x)=x ) ( plus x x ) ?
* ce qui engendre le but
* ( minus ( plus x x ) x ) = x
* L'idée est, en quelque sorte, d'avoir de "vraies" métavariables
* dans Coq, c'est-à-dire de donner des preuves incomplètes -- mais
* où les trous sont typés -- et que les sous-buts correspondants
* soient engendrés pour finir la preuve.
*
* Exemple :
* J'ai le but
* forall (x:nat), { y:nat | (minus y x) = x }
* et je donne la preuve incomplète
* fun (x:nat) => exist nat [y:nat]((minus y x)=x) (plus x x) ?
* ce qui engendre le but
* (minus (plus x x) x) = x
*)
, on procède de la manière suivante :
*
* 1 . terme contenant des variables
* existentielles Evar i.e. " _ " en syntaxe concrète .
* La résolution de ces variables n'est plus nécessairement totale
* ( ise_resolve called with fail_evar = false ) et les variables
* existentielles restantes sont remplacées par des méta - variables
* types ( celui est connu : , soit trouvé
* pendant la phase de résolution ) .
*
* 2 . On met ensuite le terme " à plat " i.e. on n'autorise des MV qu'au
* , si nécessaire , on donne
* à son tour pour .
* : le terme ( f a _ ( fun ( x : ) = > e _ ) ) donne
* ( f a ? 1 ? 2 ) avec :
* - ? 2 : = fun ( x : ) = > ? 3
* - ? 3 : = e ? 4
* ? 1 et ? 4 donneront des buts
*
* 3 . On écrit ensuite une tactique tcc qui engendre les sous - buts
* à partir d'une preuve incomplète .
*
* 1. Un terme de preuve incomplet est un terme contenant des variables
* existentielles Evar i.e. "_" en syntaxe concrète.
* La résolution de ces variables n'est plus nécessairement totale
* (ise_resolve called with fail_evar=false) et les variables
* existentielles restantes sont remplacées par des méta-variables
* castées par leur types (celui est connu : soit donné, soit trouvé
* pendant la phase de résolution).
*
* 2. On met ensuite le terme "à plat" i.e. on n'autorise des MV qu'au
* permier niveau et pour chacune d'elles, si nécessaire, on donne
* à son tour un terme de preuve incomplet pour la résoudre.
* Exemple: le terme (f a _ (fun (x:nat) => e _)) donne
* (f a ?1 ?2) avec:
* - ?2 := fun (x:nat) => ?3
* - ?3 := e ?4
* ?1 et ?4 donneront des buts
*
* 3. On écrit ensuite une tactique tcc qui engendre les sous-buts
* à partir d'une preuve incomplète.
*)
open Pp
open Errors
open Util
open Names
open Term
open Termops
open Namegen
open Tacmach
open Sign
open Environ
open Reduction
open Typing
open Tactics
open Tacticals
open Printer
type term_with_holes = TH of constr * meta_type_map * sg_proofs
and sg_proofs = (term_with_holes option) list
let rec pp_th (TH(c,mm,sg)) =
(str"TH=[ " ++ hov 0 (pr_lconstr c ++ fnl () ++
pp_sg sg) ++ str "]")
and pp_mm l =
hov 0 (prlist_with_sep (fun _ -> (fnl ()))
(fun (n,c) -> (int n ++ str" --> " ++ pr_lconstr c)) l)
and pp_sg sg =
hov 0 (prlist_with_sep (fun _ -> (fnl ()))
(function None -> (str"None") | Some th -> (pp_th th)) sg)
compute_metamap : constr - > ' a evar_map - > term_with_holes
* réalise le 2 .
*
* Pour cela , on renvoie une meta_map qui indique pour chaque meta - variable
* si elle correspond à un but ( None ) ou si elle réduite à son tour
* par ( Some c ) .
*
* On a donc l'INVARIANT suivant : le terme c rendu est " de niveau 1 "
* -- i.e. à plat -- et la meta_map contient autant d'éléments qu'il y
* a de meta - variables dans c. On suppose de plus que l'ordre dans la
* meta_map correspond à celui des buts qui seront engendrés par le refine .
* réalise le 2. ci-dessus
*
* Pour cela, on renvoie une meta_map qui indique pour chaque meta-variable
* si elle correspond à un but (None) ou si elle réduite à son tour
* par un terme de preuve incomplet (Some c).
*
* On a donc l'INVARIANT suivant : le terme c rendu est "de niveau 1"
* -- i.e. à plat -- et la meta_map contient autant d'éléments qu'il y
* a de meta-variables dans c. On suppose de plus que l'ordre dans la
* meta_map correspond à celui des buts qui seront engendrés par le refine.
*)
let replace_by_meta env sigma = function
| TH (m, mm, sgp) when isMeta (strip_outer_cast m) -> m,mm,sgp
| (TH (c,mm,_)) as th ->
let n = Evarutil.new_meta() in
let m = mkMeta n in
quand on introduit une mv on son type
let ty = match kind_of_term c with
| Lambda (Name id,c1,c2) when isCast c2 ->
let _,_,t = destCast c2 in mkNamedProd id c1 t
| Lambda (Anonymous,c1,c2) when isCast c2 ->
let _,_,t = destCast c2 in mkArrow c1 t
let sigma' =
List.fold_right (fun (m,t) sigma -> Evd.meta_declare m t sigma)
mm sigma in
Retyping.get_type_of env sigma' c
| Fix ( ( _ , j),(v , _ , _ ) ) - >
) ( * en pleine confiance !
| Fix ((_,j),(v,_,_)) ->
| _ -> invalid_arg "Tcc.replace_by_meta (TO DO)"
*)
in
mkCast (m,DEFAULTcast, ty),[n,ty],[Some th]
exception NoMeta
let replace_in_array keep_length env sigma a =
if array_for_all (function (TH (_,_,[])) -> true | _ -> false) a then
raise NoMeta;
let a' = Array.map (function
| (TH (c,mm,[])) when not keep_length -> c,mm,[]
| th -> replace_by_meta env sigma th) a
in
let v' = Array.map pi1 a' in
let mm = Array.fold_left (@) [] (Array.map pi2 a') in
let sgp = Array.fold_left (@) [] (Array.map pi3 a') in
v',mm,sgp
let fresh env n =
let id = match n with Name x -> x | _ -> id_of_string "_H" in
next_ident_away_in_goal id (ids_of_named_context (named_context env))
let rec compute_metamap env sigma c = match kind_of_term c with
| (Const _ | Evar _ | Ind _ | Construct _ |
Sort _ | Var _ | Rel _ | NativeInt _) ->
TH (c,[],[])
le terme est une mv = > un but
| Meta n ->
TH (c,[],[None])
| Cast (m,_, ty) when isMeta m ->
TH (c,[destMeta m,ty],[None])
abstraction = > n'est pas pur
* attention : dans ce cas il faut remplacer ( Rel 1 ) par ( )
* où x est une variable FRAICHE
* attention : dans ce cas il faut remplacer (Rel 1) par (Var x)
* où x est une variable FRAICHE *)
| Lambda (name,c1,c2) ->
let v = fresh env name in
let env' = push_named (v,None,c1) env in
begin match compute_metamap env' sigma (subst1 (mkVar v) c2) with
| TH (_,_,[]) -> TH (c,[],[])
| th ->
let m,mm,sgp = replace_by_meta env' sigma th in
TH (mkLambda (Name v,c1,m), mm, sgp)
end
| LetIn (name, c1, t1, c2) ->
let v = fresh env name in
let th1 = compute_metamap env sigma c1 in
let env' = push_named (v,Some c1,t1) env in
let th2 = compute_metamap env' sigma (subst1 (mkVar v) c2) in
begin match th1,th2 with
| TH (_,_,[]), TH (_,_,[]) -> TH (c,[],[])
| TH (c1,mm1,sgp1), TH (c2,mm2,sgp2) ->
let m1,mm1,sgp1 =
if sgp1=[] then (c1,mm1,[])
else replace_by_meta env sigma th1 in
let m2,mm2,sgp2 =
if sgp2=[] then (c2,mm2,[])
else replace_by_meta env' sigma th2 in
TH (mkNamedLetIn v m1 t1 m2, mm1@mm2, sgp1@sgp2)
end
4 . Application
| App (f,v) ->
let a = Array.map (compute_metamap env sigma) (Array.append [|f|] v) in
begin
try
let v',mm,sgp = replace_in_array false env sigma a in
let v'' = Array.sub v' 1 (Array.length v) in
TH (mkApp(v'.(0), v''),mm,sgp)
with NoMeta ->
TH (c,[],[])
end
| Case (ci,p,cc,v) ->
let nbr = Array.length v in
let v = Array.append [|p;cc|] v in
let a = Array.map (compute_metamap env sigma) v in
begin
try
let v',mm,sgp = replace_in_array false env sigma a in
let v'' = Array.sub v' 2 nbr in
TH (mkCase (ci,v'.(0),v'.(1),v''),mm,sgp)
with NoMeta ->
TH (c,[],[])
end
5 . Fix .
| Fix ((ni,i),(fi,ai,v)) ->
let vi = Array.map (fresh env) fi in
let fi' = Array.map (fun id -> Name id) vi in
let env' = push_named_rec_types (fi',ai,v) env in
let a = Array.map
(compute_metamap env' sigma)
(Array.map (substl (List.map mkVar (Array.to_list vi))) v)
in
begin
try
let v',mm,sgp = replace_in_array true env' sigma a in
let fix = mkFix ((ni,i),(fi',ai,v')) in
TH (fix,mm,sgp)
with NoMeta ->
TH (c,[],[])
end
Cast . Est - ce bien exact ?
| Cast (c,_,t) -> compute_metamap env sigma c
let TH ( c',mm , sgp ) = compute_metamap sign c in
TH ( ( c',t),mm , sgp )
TH (mkCast (c',t),mm,sgp) *)
Produit . Est - ce bien exact ?
| Prod (_,_,_) ->
if occur_meta c then
error "refine: proof term contains metas in a product."
else
TH (c,[],[])
| CoFix (i,(fi,ai,v)) ->
let vi = Array.map (fresh env) fi in
let fi' = Array.map (fun id -> Name id) vi in
let env' = push_named_rec_types (fi',ai,v) env in
let a = Array.map
(compute_metamap env' sigma)
(Array.map (substl (List.map mkVar (Array.to_list vi))) v)
in
begin
try
let v',mm,sgp = replace_in_array true env' sigma a in
let cofix = mkCoFix (i,(fi',ai,v')) in
TH (cofix,mm,sgp)
with NoMeta ->
TH (c,[],[])
end
| NativeArr (t,p) ->
let ta = compute_metamap env sigma t in
let a = Array.map (compute_metamap env sigma) p in
begin
try
let p', mm, sgp = replace_in_array false env sigma a in
let t', mmt,sgpt =
match ta with
| TH(c,mm,[]) -> c,mm,[]
| _ -> replace_by_meta env sigma ta in
TH(mkArray(t',p'), mmt@mm, sgpt@sgp)
with NoMeta ->
match ta with
| TH(_,_,[]) -> TH(c,[],[])
| _ ->
let t',mmt,sgpt = replace_by_meta env sigma ta in
TH (mkArray(t',p),mmt,sgpt)
end
tcc_aux : term_with_holes - > tactic
*
* Réalise le 3 .
*
* Réalise le 3. ci-dessus
*)
let ensure_products n =
let p = ref 0 in
let rec aux n gl =
if n = 0 then tclFAIL 0 (mt()) gl
else
tclTHEN
(tclORELSE intro (fun gl -> incr p; introf gl))
(aux (n-1)) gl in
tclORELSE
(aux n)
(fun gl -> tclDO !p red_in_concl gl)
let rec tcc_aux subst (TH (c,mm,sgp) as _th) gl =
let c = substl subst c in
match (kind_of_term c,sgp) with
mv = > sous - but : on
| Meta _ , _ ->
tclIDTAC gl
| Cast (c,_,_), _ when isMeta c ->
tclIDTAC gl
| _,[] ->
refine c gl
| Lambda (Name id,_,m), _ ->
assert (isMeta (strip_outer_cast m));
begin match sgp with
| [None] -> intro_mustbe_force id gl
| [Some th] ->
tclTHEN (introduction id)
(onLastHypId (fun id -> tcc_aux (mkVar id::subst) th)) gl
| _ -> assert false
end
assert (isMeta (strip_outer_cast m));
begin match sgp with
| [None] -> tclTHEN intro (onLastHypId (fun id -> clear [id])) gl
| [Some th] ->
tclTHEN
intro
(onLastHypId (fun id ->
tclTHEN
(clear [id])
| _ -> assert false
end
| LetIn (Name id,c1,t1,c2), _ when not (isMeta (strip_outer_cast c1)) ->
let c = pf_concl gl in
let newc = mkNamedLetIn id c1 t1 c in
tclTHEN
(change_in_concl None newc)
(match sgp with
| [None] -> introduction id
| [Some th] ->
tclTHEN (introduction id)
(onLastHypId (fun id -> tcc_aux (mkVar id::subst) th))
| _ -> assert false)
gl
| LetIn (Name id,c1,t1,c2), _ ->
tclTHENS
(assert_tac (Name id) t1)
[(match List.hd sgp with
| None -> tclIDTAC
| Some th -> onLastHypId (fun id -> tcc_aux (mkVar id::subst) th));
(match List.tl sgp with
onLastHypId (fun id -> tcc_aux (mkVar id::subst) th)
| _ -> assert false)]
gl
| Fix ((ni,j),(fi,ai,_)) , _ ->
let out_name = function
| Name id -> id
| _ -> error "Recursive functions must have names."
in
let fixes = array_map3 (fun f n c -> (out_name f,succ n,c)) fi ni ai in
let firsts,lasts = list_chop j (Array.to_list fixes) in
tclTHENS
(tclTHEN
(ensure_products (succ ni.(j)))
(mutual_fix (out_name fi.(j)) (succ ni.(j)) ( lasts) j))
(List.map (function
| None -> tclIDTAC
| Some th -> tcc_aux subst th) sgp)
gl
cofix = > tactique CoFix
| CoFix (j,(fi,ai,_)) , _ ->
let out_name = function
| Name id -> id
| _ -> error "Recursive functions must have names."
in
let cofixes = array_map2 (fun f c -> (out_name f,c)) fi ai in
let firsts,lasts = list_chop j (Array.to_list cofixes) in
tclTHENS
(mutual_cofix (out_name fi.(j)) ( lasts) j)
(List.map (function
| None -> tclIDTAC
| Some th -> tcc_aux subst th) sgp)
gl
sinon on fait refine appels rec . sur les sous - buts .
* c'est le cas pour App et MutCase .
* c'est le cas pour App et MutCase. *)
| _ ->
tclTHENS
(refine c)
(List.map
(function None -> tclIDTAC | Some th -> tcc_aux subst th) sgp)
gl
Et finalement la tactique refine elle - même :
let refine (evd,c) gl =
let sigma = project gl in
let evd = Typeclasses.resolve_typeclasses ~with_goals:false (pf_env gl) evd in
let c = Evarutil.nf_evar evd c in
let (evd,c) = Evarutil.evars_to_metas sigma (evd,c) in
Relies on Cast 's put on Meta 's by evars_to_metas , because it is otherwise
complicated to update meta types when passing through a binder
complicated to update meta types when passing through a binder *)
let th = compute_metamap (pf_env gl) evd c in
tclTHEN (Refiner.tclEVARS evd) (tcc_aux [] th) gl
|
f36ff6b39464b02f225ce898108cdc02cb1b7778c3effcf830b085af068e4712 | markhibberd/postmark | Request.hs | {-# LANGUAGE GADTs, GADTSyntax #-}
module Network.Api.Postmark.Request (
PostmarkRequest (..),
PostmarkRequest'
) where
import Network.Api.Postmark.Error
import Data.Aeson
import Data.Text
import Network.Api.Support
import Network.HTTP.Types
data PostmarkRequest e a where
PostmarkRequest :: (FromJSON e, FromJSON a) => StdMethod -> Text -> RequestTransformer -> PostmarkRequest e a
type PostmarkRequest' a =
PostmarkRequest PostmarkError a
| null | https://raw.githubusercontent.com/markhibberd/postmark/2eb6087bbb19421f1dd765b973702f37c1d386e9/src/Network/Api/Postmark/Request.hs | haskell | # LANGUAGE GADTs, GADTSyntax # | module Network.Api.Postmark.Request (
PostmarkRequest (..),
PostmarkRequest'
) where
import Network.Api.Postmark.Error
import Data.Aeson
import Data.Text
import Network.Api.Support
import Network.HTTP.Types
data PostmarkRequest e a where
PostmarkRequest :: (FromJSON e, FromJSON a) => StdMethod -> Text -> RequestTransformer -> PostmarkRequest e a
type PostmarkRequest' a =
PostmarkRequest PostmarkError a
|
55478682402a57a058ec3285bfc112042a8fc0c63dcb5bf43c27822c5fc5dac1 | DKurilo/hackerrank | solution.hs | # LANGUAGE FlexibleInstances , UndecidableInstances , DuplicateRecordFields #
module Main where
import Control.Monad
import Data.Array
import Data.Bits
import Data.List
import Data.List.Split
import Data.Set
import Debug.Trace
import System.Environment
import System.IO
import System.IO.Unsafe
import Debug.Trace
-- Complete the hourglassSum function below.
hourglassSum :: [[Int]] -> Int
hourglassSum arr = maximum $ Prelude.map (sum.(applyPattern [1,1,1,0,1,0,1,1,1])) $ trace (show q) $ q where q = breakSq 3 3 arr
applyPattern :: Num a => [a] -> [a] -> [a]
applyPattern ps = Prelude.map (\(x,y) -> x*y) . zip ps
breakSq :: Show a => Int -> Int -> [[a]] -> [[a]]
breakSq w h as = breakSq' w h [] [] as
breakSq' :: Show a => Int -> Int -> [[a]] -> [[[a]]] -> [[a]] -> [[a]]
breakSq' _ _ rs [] [] = rs -- error
breakSq' _ _ rs (c:cs) [] = rs ++ c
breakSq' w h rs cs (a:as)
| length cs >= h = breakSq' w h (rs ++ head cs) (applyLine . tail $ cs) as
| otherwise = breakSq' w h rs (applyLine cs) as
where sw = length a - w + 1
applyLine = \cs' -> Prelude.map (\l -> zipWith (++) l pa) $ cs' ++ [Prelude.take sw $ repeat []]
pa = reverse $ fst $ Prelude.foldr (\n (ls,(ns)) -> (Prelude.take w ns:ls, tail ns)) ([],a) [1..sw]
readMultipleLinesAsStringArray :: Int -> IO [String]
readMultipleLinesAsStringArray 0 = return []
readMultipleLinesAsStringArray n = do
line <- getLine
rest <- readMultipleLinesAsStringArray(n - 1)
return (line : rest)
main :: IO()
main = do
stdout <- getEnv "OUTPUT_PATH"
fptr <- openFile stdout WriteMode
arrTemp <- readMultipleLinesAsStringArray 6
let arr = Data.List.map (\x -> Data.List.map (read :: String -> Int) . words $ x) arrTemp
let result = hourglassSum arr
-- hPutStrLn fptr $ show result
putStrLn $ show result
hFlush fptr
hClose fptr
| null | https://raw.githubusercontent.com/DKurilo/hackerrank/37063170567b397b25a2b7123bc9c1299d34814a/2d-array/solution.hs | haskell | Complete the hourglassSum function below.
error
hPutStrLn fptr $ show result | # LANGUAGE FlexibleInstances , UndecidableInstances , DuplicateRecordFields #
module Main where
import Control.Monad
import Data.Array
import Data.Bits
import Data.List
import Data.List.Split
import Data.Set
import Debug.Trace
import System.Environment
import System.IO
import System.IO.Unsafe
import Debug.Trace
hourglassSum :: [[Int]] -> Int
hourglassSum arr = maximum $ Prelude.map (sum.(applyPattern [1,1,1,0,1,0,1,1,1])) $ trace (show q) $ q where q = breakSq 3 3 arr
applyPattern :: Num a => [a] -> [a] -> [a]
applyPattern ps = Prelude.map (\(x,y) -> x*y) . zip ps
breakSq :: Show a => Int -> Int -> [[a]] -> [[a]]
breakSq w h as = breakSq' w h [] [] as
breakSq' :: Show a => Int -> Int -> [[a]] -> [[[a]]] -> [[a]] -> [[a]]
breakSq' _ _ rs (c:cs) [] = rs ++ c
breakSq' w h rs cs (a:as)
| length cs >= h = breakSq' w h (rs ++ head cs) (applyLine . tail $ cs) as
| otherwise = breakSq' w h rs (applyLine cs) as
where sw = length a - w + 1
applyLine = \cs' -> Prelude.map (\l -> zipWith (++) l pa) $ cs' ++ [Prelude.take sw $ repeat []]
pa = reverse $ fst $ Prelude.foldr (\n (ls,(ns)) -> (Prelude.take w ns:ls, tail ns)) ([],a) [1..sw]
readMultipleLinesAsStringArray :: Int -> IO [String]
readMultipleLinesAsStringArray 0 = return []
readMultipleLinesAsStringArray n = do
line <- getLine
rest <- readMultipleLinesAsStringArray(n - 1)
return (line : rest)
main :: IO()
main = do
stdout <- getEnv "OUTPUT_PATH"
fptr <- openFile stdout WriteMode
arrTemp <- readMultipleLinesAsStringArray 6
let arr = Data.List.map (\x -> Data.List.map (read :: String -> Int) . words $ x) arrTemp
let result = hourglassSum arr
putStrLn $ show result
hFlush fptr
hClose fptr
|
4cb50a888879f6266049a4121e75d080cc3154d747ced6f3a8d27371ee037387 | uwplse/PUMPKIN-PATCH | expansion.ml | (* Expanding proof categories *)
open Stateutils
open Names
open Environ
open Evd
open Constr
open Proofcat
open Proofcatterms
open Evaluation
open Utilities
open Debruijn
open Declarations
open Indutils
open Contextutils
open Convertibility
open Envutils
open Inference
open Checking
(* --- Type definitions --- *)
type 'a expansion_strategy = 'a -> evar_map -> 'a state
(* --- Terms and types --- *)
(* Expand a product type exactly once *)
let expand_product (env : env) ((n, t, b) : Name.t * types * types) =
bind
(eval_theorem env t)
(fun t' ->
let env' = push_local (n, t) env in
bind
(bind (eval_theorem env' b) (substitute_categories t'))
(fun c ->
bind_cat c (initial c, LazyBinding (mkRel 1, env'), terminal t')))
(* Expand a lambda term exactly once *)
let expand_lambda (env : env) ((n, t, b) : Name.t * types * types) =
expand_product env (n, t, b)
(*
* Expand an inductive type
* This is unfinished, and currently unused for any benchmarks
*)
let expand_inductive (env : env) (((i, ii), u) : pinductive) =
let mbody = lookup_mind i env in
check_inductive_supported mbody;
let bodies = mbody.mind_packets in
let env_ind = push_rel_context (bindings_for_inductive env mbody bodies) env in
let body = bodies.(ii) in
let constrs =
List.map
(fun ci -> mkConstructU (((i, ii), ci), u))
(from_one_to (Array.length body.mind_consnames))
in
bind
(map_state (eval_proof env_ind) constrs)
(fun cs ->
fold_left_state
(fun cind c ->
let ms = List.append (morphisms c) (morphisms cind) in
bind
(bind (objects cind) (fun tl -> ret (terminal c :: tl)))
(fun os -> make_category os ms (initial_opt cind) None))
(List.hd cs)
(List.tl cs))
* Expand application exactly once
* Assumes there is at least one argument
* Expand application exactly once
* Assumes there is at least one argument
*)
let expand_app (env : env) ((f, args) : types * types array) =
assert (Array.length args > 0);
let arg = args.(0) in
bind
(eval_proof env (mkApp (f, Array.make 1 arg)))
(fun f' ->
bind
(bind (eval_proof env arg) (fun c -> substitute_categories c f'))
(bind_apply_function (LazyBinding (f, env)) 1))
(* --- Contexts --- *)
(*
* Expand a term exactly once
* Default to using f when it cannot be expanded further
* Error if the type context doesn't hold any terms
*)
let expand_term (default : env -> types -> evar_map -> proof_cat state) (o : context_object) =
let (trm, env) = dest_context_term o in
match kind trm with
| Prod (n, t, b) ->
expand_product env (n, t, b)
| Lambda (n, t, b) ->
expand_lambda env (n, t, b)
| Ind ((i, ii), u) ->
expand_inductive env ((i, ii), u)
| App (f, args) ->
(match kind f with
| Lambda (n, t, b) ->
(* Does not yet delta-reduce *)
if Array.length args > 0 then
expand_app env (f, args)
else
default env trm
| _ ->
default env trm)
| _ ->
default env trm
(* Expand a product type as far as its conclusion goes *)
let expand_product_fully (o : context_object) =
let rec expand_fully env (n, t, b) =
match kind b with
| Prod (n', t', b') ->
bind
(eval_theorem env t)
(fun t'' ->
let env' = push_local (n, t) env in
bind
(bind (expand_fully env' (n', t', b')) (substitute_categories t''))
(fun c ->
let init_o = initial c in
let term_o = terminal t'' in
bind_cat c (init_o, LazyBinding (mkRel 1, env'), term_o)))
| _ ->
expand_product env (n, t, b)
in expand_fully (context_env o) (destProd (fst (dest_context_term o)))
(* --- Categories --- *)
(*
* Expand the terminal object of c exactly once
* Return c if it cannot be expanded
*)
let expand_terminal (c : proof_cat) =
let t = terminal c in
match t with
| Context (Term (trm, env), i) ->
let ms = morphisms c in
bind
(arrows_with_dest t ms)
(fun concls ->
let binding =
if non_empty concls then
let (_, ext, _) = List.hd concls in (* arbitrary for now *)
ext
else
AnonymousBinding
in
bind
(expand_term (eval_theorem_bind binding) t)
(substitute_terminal c))
| _ ->
ret c
* Utility function for expanding inductive proofs
* Partition the morphisms of a category into two parts :
* 1 . that end in a product type that is not a hypothesis
* 2 . that do not
* Utility function for expanding inductive proofs
* Partition the morphisms of a category into two parts:
* 1. Morphisms that end in a product type that is not a hypothesis
* 2. Morphisms that do not
*)
let partition_expandable (c : proof_cat) =
partition_state
(map_dest
(fun o ->
and_state
(fun o -> ret (context_is_product o))
(is_not_hypothesis c)
o
o))
(morphisms c)
(*
* Utility function for expanding inductive proofs
* Expand conclusions of different cases of an inductive proof that are dependent
*)
let expand_inductive_conclusions (ms : arrow list) =
map_state
(fun (s, e, d) ->
bind
(expand_product_fully d)
(fun dc ->
let map_i_to_src =
branch_state (objects_equal (initial dc)) (fun _ -> ret s) ret
in
let arity = (List.length (morphisms dc)) - 1 in
let env = substitute_ext_env (context_env (terminal dc)) e in
bind
(apply_functor map_i_to_src (map_source_arrow map_i_to_src) dc)
(bind_apply_function (shift_ext_by arity env) arity)))
ms
* Expand all conclusions of an inductive proof fully
* ( Fully expand all product types in conclusions )
*
* If there 's a bug here , it might be because we need to
* substitute in an environment with the inductive bindings pushed
* ( see git history prior to July 2nd , 2017 ) . This is
* especially relevant when we add support for mutually
* inductive types .
* Expand all conclusions of an inductive proof fully
* (Fully expand all product types in conclusions)
*
* If there's a bug here, it might be because we need to
* substitute in an environment with the inductive bindings pushed
* (see git history prior to July 2nd, 2017). This is
* especially relevant when we add support for mutually
* inductive types.
*)
let expand_inductive_conclusions_fully (c : proof_cat) sigma =
let sigma, c_os = objects c sigma in
let sigma, (ms_to_expand, old_ms) = partition_expandable c sigma in
let sigma, old_os = all_objects_except_those_in (conclusions ms_to_expand) c_os sigma in
let sigma, expanded = expand_inductive_conclusions ms_to_expand sigma in
let sigma, new_os = flat_map_state (map_objects (all_objects_except_those_in c_os)) expanded sigma in
let new_ms = flat_map morphisms expanded in
let os = List.append old_os new_os in
let ms = List.append old_ms new_ms in
make_category os ms (initial_opt c) None sigma
(* For an inductive proof, expand n inductive parameters and the principle P *)
let expand_inductive_params (n : int) (c : proof_cat) =
let rec expand n' c' =
if n' < 0 || (not (context_is_product (terminal c'))) then
ret c'
else
bind (expand_terminal c') (expand (n' - 1))
in expand n c
(* Check if an o is the type of an applied inductive hypothesis in c *)
let applies_ih (env : env) (p : types) (c : proof_cat) (o : context_object) =
if context_is_app o then
let (f, _) = context_as_app o in
bind
(shortest_path_length c o)
(fun n ->
and_state
(is_hypothesis c)
(fun f sigma -> has_type env sigma p f)
o
(unshift_by n f))
else
ret false
* Bind the inductive hypotheses in an expanded constructor with parameters
*
* Assumes it 's an expanded constructor , but does n't check for structure
* This also may fail if the IH is applied to something when we expand
* So we should test for that case
* Bind the inductive hypotheses in an expanded constructor with parameters
*
* Assumes it's an expanded constructor, but doesn't check for structure
* This also may fail if the IH is applied to something when we expand
* So we should test for that case
*)
let bind_ihs (c : proof_cat) =
bind
(context_at_index c 1)
(fun context ->
let env_with_p = context_env context in
let (_, _, p) = CRD.to_tuple @@ lookup_rel 1 env_with_p in
let env = pop_rel_context 1 env_with_p in
apply_functor
(fun o -> ret o)
(branch_state
(map_dest (applies_ih env p c))
(map_ext_arrow (fun _ -> ret (fresh_ih ())))
ret)
c)
(*
* Expand an inductive constructor
* That is, expand its conclusion fully if it is dependent
* Then mark the edges that are inductive hypotheses
*)
let expand_constr (c : proof_cat) =
bind
(expand_inductive_conclusions_fully c)
(fun c ->
bind
(bind_ihs c)
(fun c_exp ->
let ms = morphisms c_exp in
let assums = hypotheses ms in
let concls = conclusions ms in
bind
(all_objects_except_those_in assums concls)
(fun trs ->
let tr = List.hd trs in
bind
(objects c_exp)
(fun os -> make_category os ms (initial_opt c_exp) (Some tr)))))
(*
* Expand the application of a constant function
* TODO, do we need this in expand_app? How is this used right now?
*)
let expand_const_app env (c, u) (f, args) default =
match inductive_of_elim env (c, u) with
| Some mutind ->
let mutind_body = lookup_mind mutind env in
bind
(bind
(eval_proof env f)
(expand_inductive_params mutind_body.mind_nparams))
(fun f_exp ->
eval_induction mutind_body f_exp args)
| None ->
bind
(eval_proof env (mkApp (f, args)))
(fun exp -> ret (exp, 0, default))
(*
* Expand an application arrow
*
* This assumes it's the only arrow in c
* Otherwise, there is an error
* Like the above, this will not work yet when induction is later in the proof
*)
let expand_application (c, n, l) =
map_ext
(fun e ->
match e with
| LazyBinding (trm, env) ->
let (f, args) = destApp trm in
(match kind f with
| Const (c, u) ->
expand_const_app env (c, u) (f, args) l
| _ ->
let c_trm = Context (Term (trm, env), fid ()) in
bind
(expand_term eval_theorem c_trm)
(fun exp -> ret (exp, 0, l)))
| _ -> assert false)
(only_arrow c)
| null | https://raw.githubusercontent.com/uwplse/PUMPKIN-PATCH/73fd77ba49388fdc72702a252a8fa8f071a8e1ea/plugin/src/compilation/expansion.ml | ocaml | Expanding proof categories
--- Type definitions ---
--- Terms and types ---
Expand a product type exactly once
Expand a lambda term exactly once
* Expand an inductive type
* This is unfinished, and currently unused for any benchmarks
--- Contexts ---
* Expand a term exactly once
* Default to using f when it cannot be expanded further
* Error if the type context doesn't hold any terms
Does not yet delta-reduce
Expand a product type as far as its conclusion goes
--- Categories ---
* Expand the terminal object of c exactly once
* Return c if it cannot be expanded
arbitrary for now
* Utility function for expanding inductive proofs
* Expand conclusions of different cases of an inductive proof that are dependent
For an inductive proof, expand n inductive parameters and the principle P
Check if an o is the type of an applied inductive hypothesis in c
* Expand an inductive constructor
* That is, expand its conclusion fully if it is dependent
* Then mark the edges that are inductive hypotheses
* Expand the application of a constant function
* TODO, do we need this in expand_app? How is this used right now?
* Expand an application arrow
*
* This assumes it's the only arrow in c
* Otherwise, there is an error
* Like the above, this will not work yet when induction is later in the proof
|
open Stateutils
open Names
open Environ
open Evd
open Constr
open Proofcat
open Proofcatterms
open Evaluation
open Utilities
open Debruijn
open Declarations
open Indutils
open Contextutils
open Convertibility
open Envutils
open Inference
open Checking
type 'a expansion_strategy = 'a -> evar_map -> 'a state
let expand_product (env : env) ((n, t, b) : Name.t * types * types) =
bind
(eval_theorem env t)
(fun t' ->
let env' = push_local (n, t) env in
bind
(bind (eval_theorem env' b) (substitute_categories t'))
(fun c ->
bind_cat c (initial c, LazyBinding (mkRel 1, env'), terminal t')))
let expand_lambda (env : env) ((n, t, b) : Name.t * types * types) =
expand_product env (n, t, b)
let expand_inductive (env : env) (((i, ii), u) : pinductive) =
let mbody = lookup_mind i env in
check_inductive_supported mbody;
let bodies = mbody.mind_packets in
let env_ind = push_rel_context (bindings_for_inductive env mbody bodies) env in
let body = bodies.(ii) in
let constrs =
List.map
(fun ci -> mkConstructU (((i, ii), ci), u))
(from_one_to (Array.length body.mind_consnames))
in
bind
(map_state (eval_proof env_ind) constrs)
(fun cs ->
fold_left_state
(fun cind c ->
let ms = List.append (morphisms c) (morphisms cind) in
bind
(bind (objects cind) (fun tl -> ret (terminal c :: tl)))
(fun os -> make_category os ms (initial_opt cind) None))
(List.hd cs)
(List.tl cs))
* Expand application exactly once
* Assumes there is at least one argument
* Expand application exactly once
* Assumes there is at least one argument
*)
let expand_app (env : env) ((f, args) : types * types array) =
assert (Array.length args > 0);
let arg = args.(0) in
bind
(eval_proof env (mkApp (f, Array.make 1 arg)))
(fun f' ->
bind
(bind (eval_proof env arg) (fun c -> substitute_categories c f'))
(bind_apply_function (LazyBinding (f, env)) 1))
let expand_term (default : env -> types -> evar_map -> proof_cat state) (o : context_object) =
let (trm, env) = dest_context_term o in
match kind trm with
| Prod (n, t, b) ->
expand_product env (n, t, b)
| Lambda (n, t, b) ->
expand_lambda env (n, t, b)
| Ind ((i, ii), u) ->
expand_inductive env ((i, ii), u)
| App (f, args) ->
(match kind f with
| Lambda (n, t, b) ->
if Array.length args > 0 then
expand_app env (f, args)
else
default env trm
| _ ->
default env trm)
| _ ->
default env trm
let expand_product_fully (o : context_object) =
let rec expand_fully env (n, t, b) =
match kind b with
| Prod (n', t', b') ->
bind
(eval_theorem env t)
(fun t'' ->
let env' = push_local (n, t) env in
bind
(bind (expand_fully env' (n', t', b')) (substitute_categories t''))
(fun c ->
let init_o = initial c in
let term_o = terminal t'' in
bind_cat c (init_o, LazyBinding (mkRel 1, env'), term_o)))
| _ ->
expand_product env (n, t, b)
in expand_fully (context_env o) (destProd (fst (dest_context_term o)))
let expand_terminal (c : proof_cat) =
let t = terminal c in
match t with
| Context (Term (trm, env), i) ->
let ms = morphisms c in
bind
(arrows_with_dest t ms)
(fun concls ->
let binding =
if non_empty concls then
ext
else
AnonymousBinding
in
bind
(expand_term (eval_theorem_bind binding) t)
(substitute_terminal c))
| _ ->
ret c
* Utility function for expanding inductive proofs
* Partition the morphisms of a category into two parts :
* 1 . that end in a product type that is not a hypothesis
* 2 . that do not
* Utility function for expanding inductive proofs
* Partition the morphisms of a category into two parts:
* 1. Morphisms that end in a product type that is not a hypothesis
* 2. Morphisms that do not
*)
let partition_expandable (c : proof_cat) =
partition_state
(map_dest
(fun o ->
and_state
(fun o -> ret (context_is_product o))
(is_not_hypothesis c)
o
o))
(morphisms c)
let expand_inductive_conclusions (ms : arrow list) =
map_state
(fun (s, e, d) ->
bind
(expand_product_fully d)
(fun dc ->
let map_i_to_src =
branch_state (objects_equal (initial dc)) (fun _ -> ret s) ret
in
let arity = (List.length (morphisms dc)) - 1 in
let env = substitute_ext_env (context_env (terminal dc)) e in
bind
(apply_functor map_i_to_src (map_source_arrow map_i_to_src) dc)
(bind_apply_function (shift_ext_by arity env) arity)))
ms
* Expand all conclusions of an inductive proof fully
* ( Fully expand all product types in conclusions )
*
* If there 's a bug here , it might be because we need to
* substitute in an environment with the inductive bindings pushed
* ( see git history prior to July 2nd , 2017 ) . This is
* especially relevant when we add support for mutually
* inductive types .
* Expand all conclusions of an inductive proof fully
* (Fully expand all product types in conclusions)
*
* If there's a bug here, it might be because we need to
* substitute in an environment with the inductive bindings pushed
* (see git history prior to July 2nd, 2017). This is
* especially relevant when we add support for mutually
* inductive types.
*)
let expand_inductive_conclusions_fully (c : proof_cat) sigma =
let sigma, c_os = objects c sigma in
let sigma, (ms_to_expand, old_ms) = partition_expandable c sigma in
let sigma, old_os = all_objects_except_those_in (conclusions ms_to_expand) c_os sigma in
let sigma, expanded = expand_inductive_conclusions ms_to_expand sigma in
let sigma, new_os = flat_map_state (map_objects (all_objects_except_those_in c_os)) expanded sigma in
let new_ms = flat_map morphisms expanded in
let os = List.append old_os new_os in
let ms = List.append old_ms new_ms in
make_category os ms (initial_opt c) None sigma
let expand_inductive_params (n : int) (c : proof_cat) =
let rec expand n' c' =
if n' < 0 || (not (context_is_product (terminal c'))) then
ret c'
else
bind (expand_terminal c') (expand (n' - 1))
in expand n c
let applies_ih (env : env) (p : types) (c : proof_cat) (o : context_object) =
if context_is_app o then
let (f, _) = context_as_app o in
bind
(shortest_path_length c o)
(fun n ->
and_state
(is_hypothesis c)
(fun f sigma -> has_type env sigma p f)
o
(unshift_by n f))
else
ret false
* Bind the inductive hypotheses in an expanded constructor with parameters
*
* Assumes it 's an expanded constructor , but does n't check for structure
* This also may fail if the IH is applied to something when we expand
* So we should test for that case
* Bind the inductive hypotheses in an expanded constructor with parameters
*
* Assumes it's an expanded constructor, but doesn't check for structure
* This also may fail if the IH is applied to something when we expand
* So we should test for that case
*)
let bind_ihs (c : proof_cat) =
bind
(context_at_index c 1)
(fun context ->
let env_with_p = context_env context in
let (_, _, p) = CRD.to_tuple @@ lookup_rel 1 env_with_p in
let env = pop_rel_context 1 env_with_p in
apply_functor
(fun o -> ret o)
(branch_state
(map_dest (applies_ih env p c))
(map_ext_arrow (fun _ -> ret (fresh_ih ())))
ret)
c)
let expand_constr (c : proof_cat) =
bind
(expand_inductive_conclusions_fully c)
(fun c ->
bind
(bind_ihs c)
(fun c_exp ->
let ms = morphisms c_exp in
let assums = hypotheses ms in
let concls = conclusions ms in
bind
(all_objects_except_those_in assums concls)
(fun trs ->
let tr = List.hd trs in
bind
(objects c_exp)
(fun os -> make_category os ms (initial_opt c_exp) (Some tr)))))
let expand_const_app env (c, u) (f, args) default =
match inductive_of_elim env (c, u) with
| Some mutind ->
let mutind_body = lookup_mind mutind env in
bind
(bind
(eval_proof env f)
(expand_inductive_params mutind_body.mind_nparams))
(fun f_exp ->
eval_induction mutind_body f_exp args)
| None ->
bind
(eval_proof env (mkApp (f, args)))
(fun exp -> ret (exp, 0, default))
let expand_application (c, n, l) =
map_ext
(fun e ->
match e with
| LazyBinding (trm, env) ->
let (f, args) = destApp trm in
(match kind f with
| Const (c, u) ->
expand_const_app env (c, u) (f, args) l
| _ ->
let c_trm = Context (Term (trm, env), fid ()) in
bind
(expand_term eval_theorem c_trm)
(fun exp -> ret (exp, 0, l)))
| _ -> assert false)
(only_arrow c)
|
adba6357e5df03c862e918d94ddd7fc6c1934f34467faa62c5d7432941b37708 | basho/riak_test | mapred_verify_rt.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2012 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Runs the mapred_verify tests from
%%
-module(mapred_verify_rt).
-behavior(riak_test).
-export([confirm/0]).
-define(NODE_COUNT, 3).
confirm() ->
lager:info("Build ~b node cluster", [?NODE_COUNT]),
Nodes = rt:build_cluster(?NODE_COUNT),
@todo longer term fix is probably one or more of :
1 ) add a mapred_veryify section to riak_test.config
2 ) learn to use this " inclextra " bit of rebar to package tests.def
in the : #L57
PrivDir = case code:priv_dir(mapred_verify) of
{error, bad_name} ->
erlang:error("Could not determine priv dir for mapred_verify. Make sure that your riak_test.config contains \"deps\"");
PD -> PD
end,
MRVProps = [{node, hd(Nodes)},
%% don't need 'path' because riak_test does that for us
{keycount, 1000},
{bodysize, 1},
{populate, true},
{runjobs, true},
{testdef, filename:join(PrivDir, "tests.def")}],
lager:info("Run mapred_verify"),
0 = mapred_verify:do_verification(MRVProps),
lager:info("~s: PASS", [atom_to_list(?MODULE)]),
pass.
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/mapred_verify_rt.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Runs the mapred_verify tests from
don't need 'path' because riak_test does that for us | Copyright ( c ) 2012 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(mapred_verify_rt).
-behavior(riak_test).
-export([confirm/0]).
-define(NODE_COUNT, 3).
confirm() ->
lager:info("Build ~b node cluster", [?NODE_COUNT]),
Nodes = rt:build_cluster(?NODE_COUNT),
@todo longer term fix is probably one or more of :
1 ) add a mapred_veryify section to riak_test.config
2 ) learn to use this " inclextra " bit of rebar to package tests.def
in the : #L57
PrivDir = case code:priv_dir(mapred_verify) of
{error, bad_name} ->
erlang:error("Could not determine priv dir for mapred_verify. Make sure that your riak_test.config contains \"deps\"");
PD -> PD
end,
MRVProps = [{node, hd(Nodes)},
{keycount, 1000},
{bodysize, 1},
{populate, true},
{runjobs, true},
{testdef, filename:join(PrivDir, "tests.def")}],
lager:info("Run mapred_verify"),
0 = mapred_verify:do_verification(MRVProps),
lager:info("~s: PASS", [atom_to_list(?MODULE)]),
pass.
|
a00b97e98f697ffde049af28c938db1173d7083021996cde69dc51d24bb89933 | sneeuwballen/benchpress | Task_queue.ml |
* { 1 Task queue for the server }
open Common
module M = CCLock
module Log = (val Logs.src_log (Logs.Src.create "benchpress.task-queue"))
type job = {
j_uuid: string;
j_action: Action.t;
j_task: Task.t; (* task this action comes from *)
j_interrupted: bool M.t;
mutable j_started_time: float; (* -1. if not started *)
mutable j_percent_completion: int;
mutable j_eta: float;
}
module Job = struct
type t = job
let task self = self.j_task
let pp out self =
Fmt.fprintf out "(@[task%s@ :uuid %s@ %a@])"
(if M.get self.j_interrupted then "[int]" else "")
self.j_uuid Action.pp self.j_action
let uuid self = self.j_uuid
let to_string = Fmt.to_string pp
let interrupt self = M.set self.j_interrupted true
let interrupted self = M.get self.j_interrupted
let time_elapsed self = Unix.gettimeofday() -. self.j_started_time
end
(* TODO: replace the blocking queue with a custom thing with priorities *)
type api_job = {
mutable aj_last_seen: float;
mutable aj_interrupted: bool;
}
type t = {
defs: Definitions.t M.t;
jobs: job CCBlockingQueue.t;
jobs_tbl: (string, Job.t) Hashtbl.t;
api_jobs: (string, api_job) Hashtbl.t; (* last seen+descr *)
cur: job option M.t;
}
let defs self = self.defs
let size self = CCBlockingQueue.size self.jobs
let cur_job self = M.get self.cur
let interrupt self ~uuid : bool =
match CCHashtbl.get self.jobs_tbl uuid, CCHashtbl.get self.api_jobs uuid with
| Some j, _ -> M.set j.j_interrupted true; true
| None, Some aj -> aj.aj_interrupted <- true; true
| None, None -> false
let create ?(defs=Definitions.empty) () : t =
{ jobs= CCBlockingQueue.create 64;
defs=M.create defs;
jobs_tbl=Hashtbl.create 8;
api_jobs=Hashtbl.create 8;
cur=M.create None;
}
let push self task : unit =
let j_uuid =
Uuidm.v4_gen (Random.State.make_self_init()) ()
|> Uuidm.to_string
in
let j = {
j_action=task.Task.action; j_task=task; j_uuid; j_eta=0.;
j_interrupted=M.create false; j_started_time= -1. ; j_percent_completion=0;
} in
Hashtbl.add self.jobs_tbl j_uuid j;
CCBlockingQueue.push self.jobs j
let loop self =
while true do
let job = CCBlockingQueue.take self.jobs in
Profile.with_ "task-queue.job" @@ fun () ->
job.j_started_time <- Unix.gettimeofday();
M.set self.cur (Some job);
Log.info (fun k->k "run job for task %s" job.j_task.Task.name);
let defs = M.get self.defs in
(* run the job *)
begin
let cb_progress = object
method on_progress ~percent ~elapsed_time:_ ~eta =
job.j_percent_completion <- percent;
job.j_eta <- eta;
method on_done = ()
end in
try
Exec_action.run defs job.j_action ~cb_progress
~interrupted:(fun () -> Job.interrupted job)
with
| Error.E e ->
Log.err
(fun k->k "error while running job %s:@ %a" job.j_task.Task.name Error.pp e);
| e ->
Log.err
(fun k->k "error while running job %s:@ %s"
job.j_task.Task.name (Printexc.to_string e));
end;
Hashtbl.remove self.jobs_tbl job.j_uuid; (* cleanup *)
M.set self.cur None;
done
| null | https://raw.githubusercontent.com/sneeuwballen/benchpress/180692844c7680d391fb97ea348084fe9b143ea1/src/core/Task_queue.ml | ocaml | task this action comes from
-1. if not started
TODO: replace the blocking queue with a custom thing with priorities
last seen+descr
run the job
cleanup |
* { 1 Task queue for the server }
open Common
module M = CCLock
module Log = (val Logs.src_log (Logs.Src.create "benchpress.task-queue"))
type job = {
j_uuid: string;
j_action: Action.t;
j_interrupted: bool M.t;
mutable j_percent_completion: int;
mutable j_eta: float;
}
module Job = struct
type t = job
let task self = self.j_task
let pp out self =
Fmt.fprintf out "(@[task%s@ :uuid %s@ %a@])"
(if M.get self.j_interrupted then "[int]" else "")
self.j_uuid Action.pp self.j_action
let uuid self = self.j_uuid
let to_string = Fmt.to_string pp
let interrupt self = M.set self.j_interrupted true
let interrupted self = M.get self.j_interrupted
let time_elapsed self = Unix.gettimeofday() -. self.j_started_time
end
type api_job = {
mutable aj_last_seen: float;
mutable aj_interrupted: bool;
}
type t = {
defs: Definitions.t M.t;
jobs: job CCBlockingQueue.t;
jobs_tbl: (string, Job.t) Hashtbl.t;
cur: job option M.t;
}
let defs self = self.defs
let size self = CCBlockingQueue.size self.jobs
let cur_job self = M.get self.cur
let interrupt self ~uuid : bool =
match CCHashtbl.get self.jobs_tbl uuid, CCHashtbl.get self.api_jobs uuid with
| Some j, _ -> M.set j.j_interrupted true; true
| None, Some aj -> aj.aj_interrupted <- true; true
| None, None -> false
let create ?(defs=Definitions.empty) () : t =
{ jobs= CCBlockingQueue.create 64;
defs=M.create defs;
jobs_tbl=Hashtbl.create 8;
api_jobs=Hashtbl.create 8;
cur=M.create None;
}
let push self task : unit =
let j_uuid =
Uuidm.v4_gen (Random.State.make_self_init()) ()
|> Uuidm.to_string
in
let j = {
j_action=task.Task.action; j_task=task; j_uuid; j_eta=0.;
j_interrupted=M.create false; j_started_time= -1. ; j_percent_completion=0;
} in
Hashtbl.add self.jobs_tbl j_uuid j;
CCBlockingQueue.push self.jobs j
let loop self =
while true do
let job = CCBlockingQueue.take self.jobs in
Profile.with_ "task-queue.job" @@ fun () ->
job.j_started_time <- Unix.gettimeofday();
M.set self.cur (Some job);
Log.info (fun k->k "run job for task %s" job.j_task.Task.name);
let defs = M.get self.defs in
begin
let cb_progress = object
method on_progress ~percent ~elapsed_time:_ ~eta =
job.j_percent_completion <- percent;
job.j_eta <- eta;
method on_done = ()
end in
try
Exec_action.run defs job.j_action ~cb_progress
~interrupted:(fun () -> Job.interrupted job)
with
| Error.E e ->
Log.err
(fun k->k "error while running job %s:@ %a" job.j_task.Task.name Error.pp e);
| e ->
Log.err
(fun k->k "error while running job %s:@ %s"
job.j_task.Task.name (Printexc.to_string e));
end;
M.set self.cur None;
done
|
2cf410b0b75daa9ecb9ebd463ffd399f647ee67319efb4393dff5bd0e46e5700 | cachix/cachix | SigningKeyCreate.hs | module Cachix.Types.SigningKeyCreate
( SigningKeyCreate (..),
)
where
import Data.Aeson
( FromJSON,
ToJSON,
)
import Data.Swagger
import Protolude
| that a signing secret key was created , by sharing the public key .
newtype SigningKeyCreate = SigningKeyCreate
{ publicKey :: Text
}
deriving (Show, Generic, FromJSON, ToJSON, ToSchema)
| null | https://raw.githubusercontent.com/cachix/cachix/9ab7727239262e3727081278c1a988245053f454/cachix-api/src/Cachix/Types/SigningKeyCreate.hs | haskell | module Cachix.Types.SigningKeyCreate
( SigningKeyCreate (..),
)
where
import Data.Aeson
( FromJSON,
ToJSON,
)
import Data.Swagger
import Protolude
| that a signing secret key was created , by sharing the public key .
newtype SigningKeyCreate = SigningKeyCreate
{ publicKey :: Text
}
deriving (Show, Generic, FromJSON, ToJSON, ToSchema)
| |
fc2f139a65b7ece36fdce10bfcb775dfcf24f8e568b1c10a663dbdc49d46acb8 | mzp/coq-ide-for-ios | tactic_debug.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : tactic_debug.ml 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
open Names
open Constrextern
open Pp
open Tacexpr
open Termops
let prtac = ref (fun _ -> assert false)
let set_tactic_printer f = prtac := f
let prmatchpatt = ref (fun _ _ -> assert false)
let set_match_pattern_printer f = prmatchpatt := f
let prmatchrl = ref (fun _ -> assert false)
let set_match_rule_printer f = prmatchrl := f
(* This module intends to be a beginning of debugger for tactic expressions.
Currently, it is quite simple and we can hope to have, in the future, a more
complete panel of commands dedicated to a proof assistant framework *)
(* Debug information *)
type debug_info =
| DebugOn of int
| DebugOff
(* An exception handler *)
let explain_logic_error = ref (fun e -> mt())
let explain_logic_error_no_anomaly = ref (fun e -> mt())
(* Prints the goal *)
let db_pr_goal g =
msgnl (str "Goal:" ++ fnl () ++ Proof_trees.db_pr_goal (Refiner.sig_it g))
(* Prints the commands *)
let help () =
msgnl (str "Commands: <Enter>=Continue" ++ fnl() ++
str " h/?=Help" ++ fnl() ++
str " r<num>=Run <num> times" ++ fnl() ++
str " s=Skip" ++ fnl() ++
str " x=Exit")
(* Prints the goal and the command to be executed *)
let goal_com g tac =
begin
db_pr_goal g;
msg (str "Going to execute:" ++ fnl () ++ !prtac tac ++ fnl ())
end
(* Gives the number of a run command *)
let run_com inst =
if (String.get inst 0)='r' then
let num = int_of_string (String.sub inst 1 ((String.length inst)-1)) in
if num>0 then num
else raise (Invalid_argument "run_com")
else
raise (Invalid_argument "run_com")
let allskip = ref 0
let skip = ref 0
(* Prints the run counter *)
let run ini =
if not ini then
for i=1 to 2 do
print_char (Char.chr 8);print_char (Char.chr 13)
done;
msg (str "Executed expressions: " ++ int (!allskip - !skip) ++
fnl() ++ fnl())
(* Prints the prompt *)
let rec prompt level =
begin
msg (fnl () ++ str "TcDebug (" ++ int level ++ str ") > ");
flush stdout;
let exit () = skip:=0;allskip:=0;raise Sys.Break in
let inst = try read_line () with End_of_file -> exit () in
match inst with
| "" -> true
| "s" -> false
| "x" -> print_char (Char.chr 8); exit ()
| "h"| "?" ->
begin
help ();
prompt level
end
| _ ->
(try let ctr=run_com inst in skip:=ctr;allskip:=ctr;run true;true
with Failure _ | Invalid_argument _ -> prompt level)
end
(* Prints the state and waits for an instruction *)
let debug_prompt lev g tac f =
(* What to print and to do next *)
let continue =
if !skip = 0 then (goal_com g tac; prompt lev)
else (decr skip; run false; if !skip=0 then allskip:=0; true) in
(* What to execute *)
try f (if continue then DebugOn (lev+1) else DebugOff)
with e ->
skip:=0; allskip:=0;
if Logic.catchable_exception e then
ppnl (str "Level " ++ int lev ++ str ": " ++ !explain_logic_error e);
raise e
(* Prints a constr *)
let db_constr debug env c =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Evaluated term: " ++ print_constr_env env c)
(* Prints the pattern rule *)
let db_pattern_rule debug num r =
if debug <> DebugOff & !skip = 0 then
begin
msgnl (str "Pattern rule " ++ int num ++ str ":");
msgnl (str "|" ++ spc () ++ !prmatchrl r)
end
(* Prints the hypothesis pattern identifier if it exists *)
let hyp_bound = function
| Anonymous -> " (unbound)"
| Name id -> " (bound to "^(Names.string_of_id id)^")"
(* Prints a matched hypothesis *)
let db_matched_hyp debug env (id,_,c) ido =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Hypothesis " ++
str ((Names.string_of_id id)^(hyp_bound ido)^
" has been matched: ") ++ print_constr_env env c)
(* Prints the matched conclusion *)
let db_matched_concl debug env c =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Conclusion has been matched: " ++ print_constr_env env c)
(* Prints a success message when the goal has been matched *)
let db_mc_pattern_success debug =
if debug <> DebugOff & !skip = 0 then
msgnl (str "The goal has been successfully matched!" ++ fnl() ++
str "Let us execute the right-hand side part..." ++ fnl())
let pp_match_pattern env = function
| Term c -> Term (extern_constr_pattern (names_of_rel_context env) c)
| Subterm (b,o,c) ->
Subterm (b,o,(extern_constr_pattern (names_of_rel_context env) c))
(* Prints a failure message for an hypothesis pattern *)
let db_hyp_pattern_failure debug env (na,hyp) =
if debug <> DebugOff & !skip = 0 then
msgnl (str ("The pattern hypothesis"^(hyp_bound na)^
" cannot match: ") ++
!prmatchpatt env hyp)
(* Prints a matching failure message for a rule *)
let db_matching_failure debug =
if debug <> DebugOff & !skip = 0 then
msgnl (str "This rule has failed due to matching errors!" ++ fnl() ++
str "Let us try the next one...")
(* Prints an evaluation failure message for a rule *)
let db_eval_failure debug s =
if debug <> DebugOff & !skip = 0 then
let s = str "message \"" ++ s ++ str "\"" in
msgnl
(str "This rule has failed due to \"Fail\" tactic (" ++
s ++ str ", level 0)!" ++ fnl() ++ str "Let us try the next one...")
(* Prints a logic failure message for a rule *)
let db_logic_failure debug err =
if debug <> DebugOff & !skip = 0 then
begin
msgnl (!explain_logic_error err);
msgnl (str "This rule has failed due to a logic error!" ++ fnl() ++
str "Let us try the next one...")
end
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/proofs/tactic_debug.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
This module intends to be a beginning of debugger for tactic expressions.
Currently, it is quite simple and we can hope to have, in the future, a more
complete panel of commands dedicated to a proof assistant framework
Debug information
An exception handler
Prints the goal
Prints the commands
Prints the goal and the command to be executed
Gives the number of a run command
Prints the run counter
Prints the prompt
Prints the state and waits for an instruction
What to print and to do next
What to execute
Prints a constr
Prints the pattern rule
Prints the hypothesis pattern identifier if it exists
Prints a matched hypothesis
Prints the matched conclusion
Prints a success message when the goal has been matched
Prints a failure message for an hypothesis pattern
Prints a matching failure message for a rule
Prints an evaluation failure message for a rule
Prints a logic failure message for a rule | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : tactic_debug.ml 13323 2010 - 07 - 24 15:57:30Z herbelin $ i
open Names
open Constrextern
open Pp
open Tacexpr
open Termops
let prtac = ref (fun _ -> assert false)
let set_tactic_printer f = prtac := f
let prmatchpatt = ref (fun _ _ -> assert false)
let set_match_pattern_printer f = prmatchpatt := f
let prmatchrl = ref (fun _ -> assert false)
let set_match_rule_printer f = prmatchrl := f
type debug_info =
| DebugOn of int
| DebugOff
let explain_logic_error = ref (fun e -> mt())
let explain_logic_error_no_anomaly = ref (fun e -> mt())
let db_pr_goal g =
msgnl (str "Goal:" ++ fnl () ++ Proof_trees.db_pr_goal (Refiner.sig_it g))
let help () =
msgnl (str "Commands: <Enter>=Continue" ++ fnl() ++
str " h/?=Help" ++ fnl() ++
str " r<num>=Run <num> times" ++ fnl() ++
str " s=Skip" ++ fnl() ++
str " x=Exit")
let goal_com g tac =
begin
db_pr_goal g;
msg (str "Going to execute:" ++ fnl () ++ !prtac tac ++ fnl ())
end
let run_com inst =
if (String.get inst 0)='r' then
let num = int_of_string (String.sub inst 1 ((String.length inst)-1)) in
if num>0 then num
else raise (Invalid_argument "run_com")
else
raise (Invalid_argument "run_com")
let allskip = ref 0
let skip = ref 0
let run ini =
if not ini then
for i=1 to 2 do
print_char (Char.chr 8);print_char (Char.chr 13)
done;
msg (str "Executed expressions: " ++ int (!allskip - !skip) ++
fnl() ++ fnl())
let rec prompt level =
begin
msg (fnl () ++ str "TcDebug (" ++ int level ++ str ") > ");
flush stdout;
let exit () = skip:=0;allskip:=0;raise Sys.Break in
let inst = try read_line () with End_of_file -> exit () in
match inst with
| "" -> true
| "s" -> false
| "x" -> print_char (Char.chr 8); exit ()
| "h"| "?" ->
begin
help ();
prompt level
end
| _ ->
(try let ctr=run_com inst in skip:=ctr;allskip:=ctr;run true;true
with Failure _ | Invalid_argument _ -> prompt level)
end
let debug_prompt lev g tac f =
let continue =
if !skip = 0 then (goal_com g tac; prompt lev)
else (decr skip; run false; if !skip=0 then allskip:=0; true) in
try f (if continue then DebugOn (lev+1) else DebugOff)
with e ->
skip:=0; allskip:=0;
if Logic.catchable_exception e then
ppnl (str "Level " ++ int lev ++ str ": " ++ !explain_logic_error e);
raise e
let db_constr debug env c =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Evaluated term: " ++ print_constr_env env c)
let db_pattern_rule debug num r =
if debug <> DebugOff & !skip = 0 then
begin
msgnl (str "Pattern rule " ++ int num ++ str ":");
msgnl (str "|" ++ spc () ++ !prmatchrl r)
end
let hyp_bound = function
| Anonymous -> " (unbound)"
| Name id -> " (bound to "^(Names.string_of_id id)^")"
let db_matched_hyp debug env (id,_,c) ido =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Hypothesis " ++
str ((Names.string_of_id id)^(hyp_bound ido)^
" has been matched: ") ++ print_constr_env env c)
let db_matched_concl debug env c =
if debug <> DebugOff & !skip = 0 then
msgnl (str "Conclusion has been matched: " ++ print_constr_env env c)
let db_mc_pattern_success debug =
if debug <> DebugOff & !skip = 0 then
msgnl (str "The goal has been successfully matched!" ++ fnl() ++
str "Let us execute the right-hand side part..." ++ fnl())
let pp_match_pattern env = function
| Term c -> Term (extern_constr_pattern (names_of_rel_context env) c)
| Subterm (b,o,c) ->
Subterm (b,o,(extern_constr_pattern (names_of_rel_context env) c))
let db_hyp_pattern_failure debug env (na,hyp) =
if debug <> DebugOff & !skip = 0 then
msgnl (str ("The pattern hypothesis"^(hyp_bound na)^
" cannot match: ") ++
!prmatchpatt env hyp)
let db_matching_failure debug =
if debug <> DebugOff & !skip = 0 then
msgnl (str "This rule has failed due to matching errors!" ++ fnl() ++
str "Let us try the next one...")
let db_eval_failure debug s =
if debug <> DebugOff & !skip = 0 then
let s = str "message \"" ++ s ++ str "\"" in
msgnl
(str "This rule has failed due to \"Fail\" tactic (" ++
s ++ str ", level 0)!" ++ fnl() ++ str "Let us try the next one...")
let db_logic_failure debug err =
if debug <> DebugOff & !skip = 0 then
begin
msgnl (!explain_logic_error err);
msgnl (str "This rule has failed due to a logic error!" ++ fnl() ++
str "Let us try the next one...")
end
|
3c3229048d80c27a7aec73bd4d5d26cf89a53ae0b290b37ceed62d693d62f80d | craff/simple_httpd | compile_headers_tbl.ml |
let filename =
if Array.length Sys.argv <> 2 then
begin
Printf.eprintf "usage: %s path_to_field_names.csv" Sys.argv.(0);
exit 1
end;
Sys.argv.(1)
let ch = open_in filename
let _ = input_line ch
let rec fn acc =
try
let line = input_line ch in
if String.length line > 0 && Char.lowercase_ascii line.[0] <> Char.uppercase_ascii line.[0] then
begin
let line = String.split_on_char ',' line in
let line = List.map String.trim line in
fn (line :: acc)
end
else acc
with
End_of_file -> acc
let to_cstr = String.map (function '-' -> '_' | c -> c)
let lines = List.rev (fn [])
let fields = List.map (function [] -> assert false | (h::_) -> h) lines
let _ =
Printf.printf "type t =\n";
List.iter (fun h -> Printf.printf " | %s\n" (to_cstr h)) fields;
Printf.printf "\n%!"
let _ = Printf.printf "
let eq (h1:t) (h2:t) = h1 = h2
"
let _ =
Printf.printf {|
let to_string = function
|};
List.iter (fun h -> Printf.printf " | %s -> %S\n" (to_cstr h) h) fields;
Printf.printf "\n%!"
type tree = { leaf : string option; nodes : (char * (char * tree)) list }
let empty = { leaf = None; nodes = [] }
let rec sort tree =
let nodes = List.sort (fun (c1,_) (c2,_) -> compare c1 c2) tree.nodes in
let nodes = List.map (fun (c,(d,s)) -> (c,(d,sort s))) nodes in
{ tree with nodes }
let tree : tree =
let rec fn tree h0 h =
let len = String.length h in
if len = 0 then
{ tree with leaf = Some h0 }
else (
let c = Char.uppercase_ascii h.[0] in
let h' = String.sub h 1 (len - 1) in
let son = try snd (List.assoc c tree.nodes)
with Not_found -> empty
in
let nodes = List.filter (fun (c',_) -> c <> c') tree.nodes in
let son = fn son h0 h' in
{ tree with nodes = (c,(h.[0], son)) :: nodes })
in
let rec gn tree = function
| [] -> tree
| h :: l -> gn (fn tree h h) l
in
sort (gn empty fields)
type tree2 = { leaf2 : string option; nodes2 : tree2 option array;
id2 : int; offset2: int; acc2 : string }
let cid = ref 0
let rec compile_nodes acc l =
if l = [] then (0, [||]) else
let c0 = match l with [] -> assert false
| (c,_)::_ -> c in
let c1 = match List.rev l with [] -> assert false
| (c,_)::_ -> c in
let l = ref l in
let offset = Char.code c0 in
let size = Char.code c1 - offset + 1 in
(offset, Array.init size (fun i ->
match !l with
| [] -> assert false
| (c,(d,t)) :: r ->
if Char.code c - offset = i then (
l := r;
Some (compile_tree (acc ^ String.make 1 d) t))
else
None))
and compile_tree acc2 t =
let id2 = !cid in incr cid;
let (offset2, nodes2) = compile_nodes acc2 t.nodes in
{ id2; leaf2 = t.leaf; nodes2; acc2; offset2}
let ctree = compile_tree "" tree
let rec output_nodes r =
Array.iter (function
| None -> ()
| Some r -> output_nodes r) r.nodes2;
Printf.printf "let leaf_%d = %s\n"
r.id2 (match r.leaf2 with None -> Printf.sprintf "Bad %S" r.acc2
| Some c -> Printf.sprintf "Good %s" (to_cstr c));
Printf.printf "let offset_%d = %d\n"
r.id2 r.offset2;
Printf.printf "let tbl_%d = [|%s|]\n" r.id2
(String.concat ";" (Array.to_list
(Array.mapi (fun i -> function
| None -> Printf.sprintf "{leaf=Bad %S;tbl=[||];offset=0}" (r.acc2 ^ String.make 1 (Char.chr (i + r.offset2)))
| Some r -> Printf.sprintf "{leaf=leaf_%d;tbl=tbl_%d;offset=offset_%d}" r.id2 r.id2 r.id2)
r.nodes2)))
let _ =
Printf.printf "type leaf = Good of t | Bad of string\n
type cell = { leaf : leaf; tbl : cell array; offset: int }\n";
output_nodes ctree
let _ = Printf.printf "%s" {|
exception Invalid_header of string
exception End_of_headers
let cell0 = {leaf=leaf_0; tbl=tbl_0; offset=offset_0}
let fin_fn {leaf; _} =
match leaf with Good h -> h
| Bad s -> raise (Invalid_header s)
let fold_fn ({leaf; tbl; offset} as acc) c =
if c = '\r' && acc == cell0 then raise End_of_headers;
if c = ':' then raise Exit else
let i = Char.code (Char.uppercase_ascii c) - offset in
if i >= 0 && i < Array.length tbl then
tbl.(i)
else
raise (Invalid_header (
(match leaf with Good h -> to_string h
| Bad s -> s) ^ String.make 1 c))
|}
| null | https://raw.githubusercontent.com/craff/simple_httpd/fdd326dca29e1b3bd94b60291d38f829c8f8b9e2/src/gen/compile_headers_tbl.ml | ocaml |
let filename =
if Array.length Sys.argv <> 2 then
begin
Printf.eprintf "usage: %s path_to_field_names.csv" Sys.argv.(0);
exit 1
end;
Sys.argv.(1)
let ch = open_in filename
let _ = input_line ch
let rec fn acc =
try
let line = input_line ch in
if String.length line > 0 && Char.lowercase_ascii line.[0] <> Char.uppercase_ascii line.[0] then
begin
let line = String.split_on_char ',' line in
let line = List.map String.trim line in
fn (line :: acc)
end
else acc
with
End_of_file -> acc
let to_cstr = String.map (function '-' -> '_' | c -> c)
let lines = List.rev (fn [])
let fields = List.map (function [] -> assert false | (h::_) -> h) lines
let _ =
Printf.printf "type t =\n";
List.iter (fun h -> Printf.printf " | %s\n" (to_cstr h)) fields;
Printf.printf "\n%!"
let _ = Printf.printf "
let eq (h1:t) (h2:t) = h1 = h2
"
let _ =
Printf.printf {|
let to_string = function
|};
List.iter (fun h -> Printf.printf " | %s -> %S\n" (to_cstr h) h) fields;
Printf.printf "\n%!"
type tree = { leaf : string option; nodes : (char * (char * tree)) list }
let empty = { leaf = None; nodes = [] }
let rec sort tree =
let nodes = List.sort (fun (c1,_) (c2,_) -> compare c1 c2) tree.nodes in
let nodes = List.map (fun (c,(d,s)) -> (c,(d,sort s))) nodes in
{ tree with nodes }
let tree : tree =
let rec fn tree h0 h =
let len = String.length h in
if len = 0 then
{ tree with leaf = Some h0 }
else (
let c = Char.uppercase_ascii h.[0] in
let h' = String.sub h 1 (len - 1) in
let son = try snd (List.assoc c tree.nodes)
with Not_found -> empty
in
let nodes = List.filter (fun (c',_) -> c <> c') tree.nodes in
let son = fn son h0 h' in
{ tree with nodes = (c,(h.[0], son)) :: nodes })
in
let rec gn tree = function
| [] -> tree
| h :: l -> gn (fn tree h h) l
in
sort (gn empty fields)
type tree2 = { leaf2 : string option; nodes2 : tree2 option array;
id2 : int; offset2: int; acc2 : string }
let cid = ref 0
let rec compile_nodes acc l =
if l = [] then (0, [||]) else
let c0 = match l with [] -> assert false
| (c,_)::_ -> c in
let c1 = match List.rev l with [] -> assert false
| (c,_)::_ -> c in
let l = ref l in
let offset = Char.code c0 in
let size = Char.code c1 - offset + 1 in
(offset, Array.init size (fun i ->
match !l with
| [] -> assert false
| (c,(d,t)) :: r ->
if Char.code c - offset = i then (
l := r;
Some (compile_tree (acc ^ String.make 1 d) t))
else
None))
and compile_tree acc2 t =
let id2 = !cid in incr cid;
let (offset2, nodes2) = compile_nodes acc2 t.nodes in
{ id2; leaf2 = t.leaf; nodes2; acc2; offset2}
let ctree = compile_tree "" tree
let rec output_nodes r =
Array.iter (function
| None -> ()
| Some r -> output_nodes r) r.nodes2;
Printf.printf "let leaf_%d = %s\n"
r.id2 (match r.leaf2 with None -> Printf.sprintf "Bad %S" r.acc2
| Some c -> Printf.sprintf "Good %s" (to_cstr c));
Printf.printf "let offset_%d = %d\n"
r.id2 r.offset2;
Printf.printf "let tbl_%d = [|%s|]\n" r.id2
(String.concat ";" (Array.to_list
(Array.mapi (fun i -> function
| None -> Printf.sprintf "{leaf=Bad %S;tbl=[||];offset=0}" (r.acc2 ^ String.make 1 (Char.chr (i + r.offset2)))
| Some r -> Printf.sprintf "{leaf=leaf_%d;tbl=tbl_%d;offset=offset_%d}" r.id2 r.id2 r.id2)
r.nodes2)))
let _ =
Printf.printf "type leaf = Good of t | Bad of string\n
type cell = { leaf : leaf; tbl : cell array; offset: int }\n";
output_nodes ctree
let _ = Printf.printf "%s" {|
exception Invalid_header of string
exception End_of_headers
let cell0 = {leaf=leaf_0; tbl=tbl_0; offset=offset_0}
let fin_fn {leaf; _} =
match leaf with Good h -> h
| Bad s -> raise (Invalid_header s)
let fold_fn ({leaf; tbl; offset} as acc) c =
if c = '\r' && acc == cell0 then raise End_of_headers;
if c = ':' then raise Exit else
let i = Char.code (Char.uppercase_ascii c) - offset in
if i >= 0 && i < Array.length tbl then
tbl.(i)
else
raise (Invalid_header (
(match leaf with Good h -> to_string h
| Bad s -> s) ^ String.make 1 c))
|}
| |
0e3fce492b64430083b32edbca13d010aa026cb931aad5c397f99f5cecbc3799 | emina/rosette | websynthlib.rkt | #lang rosette
(require "dom.rkt")
(provide (except-out (all-defined-out) tags))
(define-syntax-rule (define-tags ts)
(tags ts))
(define tag? integer?)
(define tags (make-parameter (cons (hash "" 0) (vector ""))
(lambda (vs)
(cons
(for/hash ([(s i) (in-indexed (cons "" vs))])
(values s i))
(list->vector (cons "" vs))))))
(define (tag str) (hash-ref (car (tags)) str))
(define (label i) (vector-ref (cdr (tags)) i))
Maximum depth of the DOM , so we know how many variables
to allocate . ( Writen by )
(define (depth dom)
(if (DOMNode? dom)
(+ 1 (apply max (cons 0 (map depth (DOMNode-content dom)))))
0))
(define (size dom )
(if (DOMNode? dom)
(+ 1 (apply + (map size (DOMNode-content dom))))
0))
Checker function that returns true iff a prefix of the
given path connects the source node to the sink node . ( Writen by )
(define (path? path source sink)
(or (and (equal? source sink)
(andmap (lambda (p) (equal? p (tag ""))) path))
(and (DOMNode? source)
(not (null? path))
(equal? (car path) (tag (DOMNode-tagname source)))
(ormap (lambda (child) (path? (cdr path) child sink)) (DOMNode-content source)))))
; Convert the final evaluated solution into a zpath string
(define (synthsis_solution->zpath zpath_list)
;(string-append "/" (string-join (remove* (list "") (cdr zpath_list)) "/")))
(string-append "/" (string-join (remove* (list "") zpath_list) "/")))
; Mask function
(define (generate-mask zpath1 zpath2 mask depth)
(unless (= (length zpath1) 0)
(assert (eq? (car mask)
(eq? (car zpath1) (car zpath2))))
(generate-mask (cdr zpath1) (cdr zpath2) (cdr mask) depth)))
; Zip
; Found at -taste-of-racket/ in the comments.
(define (zip list . lists)
(apply map (cons (lambda (x . xs) (cons x xs)) (cons list lists))))
| null | https://raw.githubusercontent.com/emina/rosette/a64e2bccfe5876c5daaf4a17c5a28a49e2fbd501/sdsl/websynth/websynthlib.rkt | racket | Convert the final evaluated solution into a zpath string
(string-append "/" (string-join (remove* (list "") (cdr zpath_list)) "/")))
Mask function
Zip
Found at -taste-of-racket/ in the comments. | #lang rosette
(require "dom.rkt")
(provide (except-out (all-defined-out) tags))
(define-syntax-rule (define-tags ts)
(tags ts))
(define tag? integer?)
(define tags (make-parameter (cons (hash "" 0) (vector ""))
(lambda (vs)
(cons
(for/hash ([(s i) (in-indexed (cons "" vs))])
(values s i))
(list->vector (cons "" vs))))))
(define (tag str) (hash-ref (car (tags)) str))
(define (label i) (vector-ref (cdr (tags)) i))
Maximum depth of the DOM , so we know how many variables
to allocate . ( Writen by )
(define (depth dom)
(if (DOMNode? dom)
(+ 1 (apply max (cons 0 (map depth (DOMNode-content dom)))))
0))
(define (size dom )
(if (DOMNode? dom)
(+ 1 (apply + (map size (DOMNode-content dom))))
0))
Checker function that returns true iff a prefix of the
given path connects the source node to the sink node . ( Writen by )
(define (path? path source sink)
(or (and (equal? source sink)
(andmap (lambda (p) (equal? p (tag ""))) path))
(and (DOMNode? source)
(not (null? path))
(equal? (car path) (tag (DOMNode-tagname source)))
(ormap (lambda (child) (path? (cdr path) child sink)) (DOMNode-content source)))))
(define (synthsis_solution->zpath zpath_list)
(string-append "/" (string-join (remove* (list "") zpath_list) "/")))
(define (generate-mask zpath1 zpath2 mask depth)
(unless (= (length zpath1) 0)
(assert (eq? (car mask)
(eq? (car zpath1) (car zpath2))))
(generate-mask (cdr zpath1) (cdr zpath2) (cdr mask) depth)))
(define (zip list . lists)
(apply map (cons (lambda (x . xs) (cons x xs)) (cons list lists))))
|
665d82ca88760214ea66cfe68709e35b0634847fed7ef84e20120f5acb853144 | Average-user/gozar | views.cljs | (ns gozar.views
(:require [reagent.core :as r]
[gozar.util :as u]
[goog.events :as events]
[gozar.sgfparser :as parser]
[re-frame.core :as re-frame]))
(def r 0.46)
(defn distance [[a b] [c d]]
(let [abs (fn [x] (if (< x 0) (* -1 x) x))]
(+ (abs (- a c)) (abs (- b d)))))
(defn get-move []
(if @(re-frame/subscribe [:analyze-mode])
@(re-frame/subscribe [:custom-move])
@(re-frame/subscribe [:move])))
(defn get-moves []
(if @(re-frame/subscribe [:analyze-mode])
@(re-frame/subscribe [:custom-moves])
@(re-frame/subscribe [:moves])))
(defn draw-line [x1 y1 x2 y2]
[:line {:x1 x1 :y1 y1 :x2 x2 :y2 y2 :style {:stroke "rgb(0,0,0)" :stroke-width 0.03}}])
(defn draw-dots [size]
(map (fn [[x y]] [:circle {:cx (+ 1 x) :cy (+ 1 y) :r 0.10 :fill "black"}])
(case size
19 [[3 3] [15 15] [3 15] [15 3] [9 3] [3 9] [9 9] [9 15] [15 9]]
13 [[3 3] [9 3] [3 9] [9 9] [6 6]]
9 [[2 2] [6 2] [2 6] [6 6] [4 4]]
[])))
(defn draw-stones [{:keys [stones turn]}]
(let [lm (:location (get (get-moves) (dec (get-move))))
am @(re-frame/subscribe [:analyze-mode])
moves @(re-frame/subscribe [:moves])
move @(re-frame/subscribe [:move])]
(map (fn [[[y x] t]]
(case t
:black [:g [:circle.n {:cx (inc x) :cy (inc y) :r r :fill "black"}]
(when (= lm [y x])
[:circle {:cx (inc x) :cy (inc y) :r (/ r 2) :stroke "white" :stroke-width "0.08" :fill "none"}])]
:white [:g [:circle.n {:cx (inc x) :cy (inc y) :r r :fill "white"}]
(when (= lm [y x])
[:circle {:cx (inc x) :cy (inc y) :r (/ r 2) :stroke "black" :stroke-width "0.08" :fill "none"}])]
:free [:g {:on-click #(cond
am (re-frame/dispatch [:add-custom-move {:player turn :location [y x]}])
(= (:location (get moves move)) [y x]) (do (re-frame/dispatch [:inc-move 1])
(re-frame/dispatch [:set-attempt-to nil]))
:else (re-frame/dispatch [:set-attempt-to [y x]]))}
(case (= turn :white)
true [:circle.w {:cx (inc x) :cy (inc y) :r r}]
false [:circle.b {:cx (inc x) :cy (inc y) :r r}]
[:circle {:cx (inc x) :cy (inc y) :r r :fill "rgba(0,0,0,0)"}])]))
stones)))
(defn draw-board-base [{:keys [stones] :as board} size]
(into [:g [:rect {:x 0 :y 0 :width (inc size) :height (inc size) :fill "#dfbd6d"
:rx "0.20" :ry "0.20"}]]
(concat (draw-dots size)
(mapv (fn [[x1 y1 x2 y2]] (draw-line (inc x1) (inc y1) (inc x2) (inc y2)))
(concat (map vector (range size) (repeat 0) (range size) (repeat (dec size)))
(map vector (repeat 0) (range size) (repeat (dec size)) (range size))))
(draw-stones board)
(map (fn [[x l]] [:text.coordinate {:x x :y 0.4} (str l)])
(map (fn [a b] [(- a 0.2) b]) (range 1 (inc size)) "ABCDEFGHJKLMNOPQRST"))
(map (fn [[x l]] [:text.coordinate {:x x :y (+ size 0.9)} (str l)])
(map (fn [a b] [(- a 0.2) b]) (range 1 (inc size)) "ABCDEFGHJKLMNOPQRST"))
(map (fn [[y l]] [:text.coordinate {:x 0.04 :y y} (str l)])
(map (fn [a b] [(+ a 0.2) b]) (range 1 (inc size)) (range size 0 -1)))
(map (fn [[y l]] [:text.coordinate {:x (+ size 0.5) :y y} (str l)])
(map (fn [a b] [(+ a 0.2) b]) (range 1 (inc size)) (range size 0 -1))))))
(defn board-svg []
(let [size @(re-frame/subscribe [:board-size])]
[:svg {:width "97%"
:height "97%"
:view-box (str "0 0 " (+ 1 size) " " (+ 1 size))}
(let [board @(re-frame/subscribe [:board])
moves (get-moves)
move (get-move)]
[draw-board-base (u/apply-moves board (subvec moves 0 move))
size])]))
(defn sgf-file-input []
[:div.element
[:label.boxed
[:input
{:type "file"
:accept ".sgf"
:on-change
(fn [e]
(let [file (first (array-seq (.. e -target -files)))
file-reader (js/FileReader.)]
(set! (.-onload file-reader)
(fn [e']
(let [game (-> e' .-target .-result parser/parse-game)
nb (u/create-board (:board-size game)
(:handicap game)
(:turn game)
(:komi game))]
(re-frame/dispatch
[:change-of-file
nb
(vec (:moves game))
{:result (:result game)
:player-black (:player-black game)
:player-white (:player-white game)}
(:handicap-n game)
(.-name file)
(:board-size game)]))))
(.readAsText file-reader file)))}]
[:span
[:span.icon
[:i.fas.fa-upload]]
@(re-frame/subscribe [:filename])]]])
(defn info-table []
(let [info @(re-frame/subscribe [:info])]
[:table
[:thead
[:tr
[:th [:abbr "Move"]]
[:th [:abbr "Komi"]]
[:th [:abbr "Handicap"]]
[:th [:abbr "Black"]]
[:th [:abbr "White"]]
[:th [:abbr "Result"]]]]
[:tfoot
[:tr
[:th [:abbr (get-move)]]
[:th [:abbr (:komi @(re-frame/subscribe [:board]))]]
[:th [:abbr @(re-frame/subscribe [:handicap])]]
[:th [:abbr (:player-black info)]]
[:th [:abbr (:player-white info)]]
[:th [:abbr (:result info)]]]]]))
(defn moves-range []
[:div.element {:style {:margin-top "1em"}}
[:center
[:input {:type "range"
:min 0
:max (count (get-moves))
:value (get-move)
:style {:width "85%"}
:on-change #(re-frame/dispatch [:change-move (js/parseInt (.-target.value %))])}]
[:span
(count (get-moves))]
[:div
[:a.icon {:on-click #(do (re-frame/dispatch [:dec-move 10])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-double-left]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:dec-move 1])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-left]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:inc-move 1])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-right]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:inc-move 10])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-double-right]]]]]])
(defn analyze-mode-checkbox []
[:div.element
[:a.boxed {:on-click #(re-frame/dispatch [:analyze-mode-change])}
[:span.icon (if @(re-frame/subscribe [:analyze-mode])
[:i.fa.fa-lightbulb]
[:i.fa.fa-code-branch])]
[:span
(if @(re-frame/subscribe [:analyze-mode])
"Change to guess mode"
"Change to analyze mode")]]])
(defn svg-progress-bar [min value color]
[:div.element
[:svg {:width "100%"
:height "1.1em"}
[:g
[:rect {:width "98%" :height "100%" :fill "rgb(200,200,200)" :rx "10" :ry "10"}]
[:rect {:width (str (- value 2) "%") :height "100%" :fill color :rx "10" :ry "10"}]]]])
(defn how-close-bar []
[:div.element
(let [moves @(re-frame/subscribe [:moves])
attempt @(re-frame/subscribe [:attempt])
nl (:location (get moves @(re-frame/subscribe [:move])))
faraway (->> @(re-frame/subscribe [:board])
(:stones)
(keys)
(map #(distance % nl))
(reduce max))
n (if (or (empty? moves) (nil? attempt))
100
(/ (* 100 (- faraway (distance attempt nl))) faraway))]
(cond
(> n 80) [svg-progress-bar 0 n "#3BC84A"]
:else [svg-progress-bar 0 n "#F3475B"]))])
(events/listen js/window "keydown"
(fn [e]
(let [key-code (.-keyCode e)]
(when (or (= key-code 37) (= key-code 39))
(do (if (= 39 key-code)
(re-frame/dispatch-sync [:inc-move 1])
(re-frame/dispatch-sync [:dec-move 1]))
(re-frame/dispatch [:set-attempt-to nil]))))))
(defn main-panel []
[:div {:style {:width "100%"}}
[:div.left
[:center.element
[:h1 "GOzar"]
[info-table]]
[analyze-mode-checkbox]
(if-not @(re-frame/subscribe [:analyze-mode])
(if (= :pass (:location (get (get-moves) (get-move))))
[:div.element
[:a.boxed.pass
{:on-click #(re-frame/dispatch [:inc-move 1])}
[:span "Pass"]]]
[how-close-bar])
(let [turn (u/enemy (:player (get @(re-frame/subscribe [:custom-moves])
(dec @(re-frame/subscribe [:custom-move])))))]
[:div.element
[:a.boxed.pass
{:on-click #(re-frame/dispatch [:add-custom-move {:player turn :location :pass}])}
[:span "Pass"]]]))
[moves-range]
[:div.element
[sgf-file-input]]
[:p.element
"Report any bug, erros or recommendations via "
[:a {:href "-user/gozar#readme"}
"github"]
". The most likely cause of failures is the SGF's "
"parser which doesn't handle files with multiple branches."]]
[:div.right
[board-svg]]])
| null | https://raw.githubusercontent.com/Average-user/gozar/102a184bfb41f44a5ee1bc2aa03a63d72290592e/src/cljs/gozar/views.cljs | clojure | (ns gozar.views
(:require [reagent.core :as r]
[gozar.util :as u]
[goog.events :as events]
[gozar.sgfparser :as parser]
[re-frame.core :as re-frame]))
(def r 0.46)
(defn distance [[a b] [c d]]
(let [abs (fn [x] (if (< x 0) (* -1 x) x))]
(+ (abs (- a c)) (abs (- b d)))))
(defn get-move []
(if @(re-frame/subscribe [:analyze-mode])
@(re-frame/subscribe [:custom-move])
@(re-frame/subscribe [:move])))
(defn get-moves []
(if @(re-frame/subscribe [:analyze-mode])
@(re-frame/subscribe [:custom-moves])
@(re-frame/subscribe [:moves])))
(defn draw-line [x1 y1 x2 y2]
[:line {:x1 x1 :y1 y1 :x2 x2 :y2 y2 :style {:stroke "rgb(0,0,0)" :stroke-width 0.03}}])
(defn draw-dots [size]
(map (fn [[x y]] [:circle {:cx (+ 1 x) :cy (+ 1 y) :r 0.10 :fill "black"}])
(case size
19 [[3 3] [15 15] [3 15] [15 3] [9 3] [3 9] [9 9] [9 15] [15 9]]
13 [[3 3] [9 3] [3 9] [9 9] [6 6]]
9 [[2 2] [6 2] [2 6] [6 6] [4 4]]
[])))
(defn draw-stones [{:keys [stones turn]}]
(let [lm (:location (get (get-moves) (dec (get-move))))
am @(re-frame/subscribe [:analyze-mode])
moves @(re-frame/subscribe [:moves])
move @(re-frame/subscribe [:move])]
(map (fn [[[y x] t]]
(case t
:black [:g [:circle.n {:cx (inc x) :cy (inc y) :r r :fill "black"}]
(when (= lm [y x])
[:circle {:cx (inc x) :cy (inc y) :r (/ r 2) :stroke "white" :stroke-width "0.08" :fill "none"}])]
:white [:g [:circle.n {:cx (inc x) :cy (inc y) :r r :fill "white"}]
(when (= lm [y x])
[:circle {:cx (inc x) :cy (inc y) :r (/ r 2) :stroke "black" :stroke-width "0.08" :fill "none"}])]
:free [:g {:on-click #(cond
am (re-frame/dispatch [:add-custom-move {:player turn :location [y x]}])
(= (:location (get moves move)) [y x]) (do (re-frame/dispatch [:inc-move 1])
(re-frame/dispatch [:set-attempt-to nil]))
:else (re-frame/dispatch [:set-attempt-to [y x]]))}
(case (= turn :white)
true [:circle.w {:cx (inc x) :cy (inc y) :r r}]
false [:circle.b {:cx (inc x) :cy (inc y) :r r}]
[:circle {:cx (inc x) :cy (inc y) :r r :fill "rgba(0,0,0,0)"}])]))
stones)))
(defn draw-board-base [{:keys [stones] :as board} size]
(into [:g [:rect {:x 0 :y 0 :width (inc size) :height (inc size) :fill "#dfbd6d"
:rx "0.20" :ry "0.20"}]]
(concat (draw-dots size)
(mapv (fn [[x1 y1 x2 y2]] (draw-line (inc x1) (inc y1) (inc x2) (inc y2)))
(concat (map vector (range size) (repeat 0) (range size) (repeat (dec size)))
(map vector (repeat 0) (range size) (repeat (dec size)) (range size))))
(draw-stones board)
(map (fn [[x l]] [:text.coordinate {:x x :y 0.4} (str l)])
(map (fn [a b] [(- a 0.2) b]) (range 1 (inc size)) "ABCDEFGHJKLMNOPQRST"))
(map (fn [[x l]] [:text.coordinate {:x x :y (+ size 0.9)} (str l)])
(map (fn [a b] [(- a 0.2) b]) (range 1 (inc size)) "ABCDEFGHJKLMNOPQRST"))
(map (fn [[y l]] [:text.coordinate {:x 0.04 :y y} (str l)])
(map (fn [a b] [(+ a 0.2) b]) (range 1 (inc size)) (range size 0 -1)))
(map (fn [[y l]] [:text.coordinate {:x (+ size 0.5) :y y} (str l)])
(map (fn [a b] [(+ a 0.2) b]) (range 1 (inc size)) (range size 0 -1))))))
(defn board-svg []
(let [size @(re-frame/subscribe [:board-size])]
[:svg {:width "97%"
:height "97%"
:view-box (str "0 0 " (+ 1 size) " " (+ 1 size))}
(let [board @(re-frame/subscribe [:board])
moves (get-moves)
move (get-move)]
[draw-board-base (u/apply-moves board (subvec moves 0 move))
size])]))
(defn sgf-file-input []
[:div.element
[:label.boxed
[:input
{:type "file"
:accept ".sgf"
:on-change
(fn [e]
(let [file (first (array-seq (.. e -target -files)))
file-reader (js/FileReader.)]
(set! (.-onload file-reader)
(fn [e']
(let [game (-> e' .-target .-result parser/parse-game)
nb (u/create-board (:board-size game)
(:handicap game)
(:turn game)
(:komi game))]
(re-frame/dispatch
[:change-of-file
nb
(vec (:moves game))
{:result (:result game)
:player-black (:player-black game)
:player-white (:player-white game)}
(:handicap-n game)
(.-name file)
(:board-size game)]))))
(.readAsText file-reader file)))}]
[:span
[:span.icon
[:i.fas.fa-upload]]
@(re-frame/subscribe [:filename])]]])
(defn info-table []
(let [info @(re-frame/subscribe [:info])]
[:table
[:thead
[:tr
[:th [:abbr "Move"]]
[:th [:abbr "Komi"]]
[:th [:abbr "Handicap"]]
[:th [:abbr "Black"]]
[:th [:abbr "White"]]
[:th [:abbr "Result"]]]]
[:tfoot
[:tr
[:th [:abbr (get-move)]]
[:th [:abbr (:komi @(re-frame/subscribe [:board]))]]
[:th [:abbr @(re-frame/subscribe [:handicap])]]
[:th [:abbr (:player-black info)]]
[:th [:abbr (:player-white info)]]
[:th [:abbr (:result info)]]]]]))
(defn moves-range []
[:div.element {:style {:margin-top "1em"}}
[:center
[:input {:type "range"
:min 0
:max (count (get-moves))
:value (get-move)
:style {:width "85%"}
:on-change #(re-frame/dispatch [:change-move (js/parseInt (.-target.value %))])}]
[:span
(count (get-moves))]
[:div
[:a.icon {:on-click #(do (re-frame/dispatch [:dec-move 10])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-double-left]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:dec-move 1])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-left]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:inc-move 1])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-right]]]
[:a.icon {:on-click #(do (re-frame/dispatch [:inc-move 10])
(re-frame/dispatch [:set-attempt-to nil]))}
[:span.button-icon [:i.fa.fa-angle-double-right]]]]]])
(defn analyze-mode-checkbox []
[:div.element
[:a.boxed {:on-click #(re-frame/dispatch [:analyze-mode-change])}
[:span.icon (if @(re-frame/subscribe [:analyze-mode])
[:i.fa.fa-lightbulb]
[:i.fa.fa-code-branch])]
[:span
(if @(re-frame/subscribe [:analyze-mode])
"Change to guess mode"
"Change to analyze mode")]]])
(defn svg-progress-bar [min value color]
[:div.element
[:svg {:width "100%"
:height "1.1em"}
[:g
[:rect {:width "98%" :height "100%" :fill "rgb(200,200,200)" :rx "10" :ry "10"}]
[:rect {:width (str (- value 2) "%") :height "100%" :fill color :rx "10" :ry "10"}]]]])
(defn how-close-bar []
[:div.element
(let [moves @(re-frame/subscribe [:moves])
attempt @(re-frame/subscribe [:attempt])
nl (:location (get moves @(re-frame/subscribe [:move])))
faraway (->> @(re-frame/subscribe [:board])
(:stones)
(keys)
(map #(distance % nl))
(reduce max))
n (if (or (empty? moves) (nil? attempt))
100
(/ (* 100 (- faraway (distance attempt nl))) faraway))]
(cond
(> n 80) [svg-progress-bar 0 n "#3BC84A"]
:else [svg-progress-bar 0 n "#F3475B"]))])
(events/listen js/window "keydown"
(fn [e]
(let [key-code (.-keyCode e)]
(when (or (= key-code 37) (= key-code 39))
(do (if (= 39 key-code)
(re-frame/dispatch-sync [:inc-move 1])
(re-frame/dispatch-sync [:dec-move 1]))
(re-frame/dispatch [:set-attempt-to nil]))))))
(defn main-panel []
[:div {:style {:width "100%"}}
[:div.left
[:center.element
[:h1 "GOzar"]
[info-table]]
[analyze-mode-checkbox]
(if-not @(re-frame/subscribe [:analyze-mode])
(if (= :pass (:location (get (get-moves) (get-move))))
[:div.element
[:a.boxed.pass
{:on-click #(re-frame/dispatch [:inc-move 1])}
[:span "Pass"]]]
[how-close-bar])
(let [turn (u/enemy (:player (get @(re-frame/subscribe [:custom-moves])
(dec @(re-frame/subscribe [:custom-move])))))]
[:div.element
[:a.boxed.pass
{:on-click #(re-frame/dispatch [:add-custom-move {:player turn :location :pass}])}
[:span "Pass"]]]))
[moves-range]
[:div.element
[sgf-file-input]]
[:p.element
"Report any bug, erros or recommendations via "
[:a {:href "-user/gozar#readme"}
"github"]
". The most likely cause of failures is the SGF's "
"parser which doesn't handle files with multiple branches."]]
[:div.right
[board-svg]]])
| |
bb6b4edf903269871a086e7e4bc89bdd4ee48ac8cb5c51b8594b1ba9f484c4e7 | thelema/ocaml-community | odoc_info.mli | (***********************************************************************)
(* *)
(* OCamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(** Interface to the information collected in source files. *)
(** The differents kinds of element references. *)
type ref_kind = Odoc_types.ref_kind =
RK_module
| RK_module_type
| RK_class
| RK_class_type
| RK_value
| RK_type
| RK_exception
| RK_attribute
| RK_method
| RK_section of text
| RK_recfield
| RK_const
and text_element = Odoc_types.text_element =
| Raw of string (** Raw text. *)
| Code of string (** The string is source code. *)
| CodePre of string (** The string is pre-formatted source code. *)
| Verbatim of string (** String 'as is'. *)
| Bold of text (** Text in bold style. *)
| Italic of text (** Text in italic. *)
| Emphasize of text (** Emphasized text. *)
| Center of text (** Centered text. *)
| Left of text (** Left alignment. *)
| Right of text (** Right alignment. *)
| List of text list (** A list. *)
| Enum of text list (** An enumerated list. *)
| Newline (** To force a line break. *)
| Block of text (** Like html's block quote. *)
| Title of int * string option * text
(** Style number, optional label, and text. *)
| Latex of string (** A string for latex. *)
| Link of string * text (** A reference string and the link text. *)
| Ref of string * ref_kind option * text option
(** A reference to an element. Complete name and kind.
An optional text can be given to display this text instead
of the element name.*)
* .
| Subscript of text (** Subscripts. *)
| Module_list of string list
(** The table of the given modules with their abstract. *)
| Index_list (** The links to the various indexes (values, types, ...) *)
| Custom of string * text (** to extend \{foo syntax *)
| Target of string * string (** (target, code) : to specify code specific to a target format *)
(** A text is a list of [text_element]. The order matters. *)
and text = text_element list
* The different forms of references in \@see tags .
type see_ref = Odoc_types.see_ref =
See_url of string
| See_file of string
| See_doc of string
(** Raised when parsing string to build a {!Odoc_info.text}
structure. [(line, char, string)] *)
exception Text_syntax of int * int * string
* The information in a \@see tag .
type see = see_ref * text
(** Parameter name and description. *)
type param = (string * text)
(** Raised exception name and description. *)
type raised_exception = (string * text)
* Information in a special comment
@before 3.12.0 \@before information was not present .
@before 3.12.0 \@before information was not present.
*)
type info = Odoc_types.info = {
i_desc : text option; (** The description text. *)
i_authors : string list; (** The list of authors in \@author tags. *)
i_version : string option; (** The string in the \@version tag. *)
* The list of \@see tags .
* The string in the \@since tag .
i_before : (string * text) list ; (** the version number and text in \@before tag *)
i_deprecated : text option; (** The of the \@deprecated tag. *)
i_params : param list; (** The list of parameter descriptions. *)
i_raised_exceptions : raised_exception list; (** The list of raised exceptions. *)
i_return_value : text option; (** The description text of the return value. *)
* A text associated to a custom @-tag .
}
(** Location of elements in implementation and interface files. *)
type location = Odoc_types.location = {
loc_impl : Location.t option ; (** implementation location *)
loc_inter : Location.t option ; (** interface location *)
}
(** A dummy location. *)
val dummy_loc : location
(** Representation of element names. *)
module Name :
sig
type t = string
(** Access to the simple name. *)
val simple : t -> t
(** [concat t1 t2] returns the concatenation of [t1] and [t2].*)
val concat : t -> t -> t
* Return the depth of the name , i.e. the numer of levels to the root .
Example : [ depth " Toto.Tutu.name " ] = [ 3 ] .
Example : [depth "Toto.Tutu.name"] = [3]. *)
val depth : t -> int
* Take two names n1 and n2 = n3.n4 and return n4 if n3 = n1 or else n2 .
val get_relative : t -> t -> t
(** Return the name of the 'father' (like [dirname] for a file name).*)
val father : t -> t
end
(** Representation and manipulation of method / function / class / module parameters.*)
module Parameter :
sig
(** {3 Types} *)
(** Representation of a simple parameter name *)
type simple_name = Odoc_parameter.simple_name =
{
sn_name : string ;
sn_type : Types.type_expr ;
mutable sn_text : text option ;
}
(** Representation of parameter names. We need it to represent parameter names in tuples.
The value [Tuple ([], t)] stands for an anonymous parameter.*)
type param_info = Odoc_parameter.param_info =
Simple_name of simple_name
| Tuple of param_info list * Types.type_expr
(** A parameter is just a param_info.*)
type parameter = param_info
* { 3 Functions }
(** Acces to the name as a string. For tuples, parenthesis and commas are added. *)
val complete_name : parameter -> string
(** Access to the complete type. *)
val typ : parameter -> Types.type_expr
* Access to the list of names ; only one for a simple parameter , or
a list for a tuple .
a list for a tuple. *)
val names : parameter -> string list
(** Access to the description of a specific name.
@raise Not_found if no description is associated to the given name. *)
val desc_by_name : parameter -> string -> text option
(** Access to the type of a specific name.
@raise Not_found if no type is associated to the given name. *)
val type_by_name : parameter -> string -> Types.type_expr
end
(** Representation and manipulation of exceptions. *)
module Exception :
sig
(** Used when the exception is a rebind of another exception,
when we have [exception Ex = Target_ex].*)
type exception_alias = Odoc_exception.exception_alias =
{
ea_name : Name.t ; (** The complete name of the target exception. *)
mutable ea_ex : t_exception option ; (** The target exception, if we found it.*)
}
and t_exception = Odoc_exception.t_exception =
{
ex_name : Name.t ;
mutable ex_info : info option ; (** Information found in the optional associated comment. *)
ex_args : Types.type_expr list ; (** The types of the parameters. *)
ex_alias : exception_alias option ; (** [None] when the exception is not a rebind. *)
mutable ex_loc : location ;
mutable ex_code : string option ;
}
end
(** Representation and manipulation of types.*)
module Type :
sig
type private_flag = Odoc_type.private_flag =
Private | Public
(** Description of a variant type constructor. *)
type variant_constructor = Odoc_type.variant_constructor =
{
vc_name : string ; (** Name of the constructor. *)
vc_args : Types.type_expr list ; (** Arguments of the constructor. *)
vc_ret : Types.type_expr option ;
mutable vc_text : text option ; (** Optional description in the associated comment. *)
}
(** Description of a record type field. *)
type record_field = Odoc_type.record_field =
{
rf_name : string ; (** Name of the field. *)
rf_mutable : bool ; (** [true] if mutable. *)
rf_type : Types.type_expr ; (** Type of the field. *)
mutable rf_text : text option ; (** Optional description in the associated comment.*)
}
(** The various kinds of a type. *)
type type_kind = Odoc_type.type_kind =
Type_abstract (** Type is abstract, for example [type t]. *)
| Type_variant of variant_constructor list
(** constructors *)
| Type_record of record_field list
(** fields *)
(** Representation of a type. *)
type t_type = Odoc_type.t_type =
{
ty_name : Name.t ; (** Complete name of the type. *)
mutable ty_info : info option ; (** Information found in the optional associated comment. *)
ty_parameters : (Types.type_expr * bool * bool) list ;
(** type parameters: (type, covariant, contravariant) *)
ty_kind : type_kind; (** Type kind. *)
ty_private : private_flag; (** Private or public type. *)
ty_manifest : Types.type_expr option; (** Type manifest. *)
mutable ty_loc : location ;
mutable ty_code : string option;
}
end
(** Representation and manipulation of values, class attributes and class methods. *)
module Value :
sig
(** Representation of a value. *)
type t_value = Odoc_value.t_value =
{
val_name : Name.t ; (** Complete name of the value. *)
mutable val_info : info option ; (** Information found in the optional associated comment. *)
val_type : Types.type_expr ; (** Type of the value. *)
val_recursive : bool ; (** [true] if the value is recursive. *)
mutable val_parameters : Odoc_parameter.parameter list ; (** The parameters, if any. *)
mutable val_code : string option ; (** The code of the value, if we had the only the implementation file. *)
mutable val_loc : location ;
}
(** Representation of a class attribute. *)
type t_attribute = Odoc_value.t_attribute =
{
att_value : t_value ; (** an attribute has almost all the same information as a value *)
att_mutable : bool ; (** [true] if the attribute is mutable. *)
att_virtual : bool ; (** [true] if the attribute is virtual. *)
}
(** Representation of a class method. *)
type t_method = Odoc_value.t_method =
{
met_value : t_value ; (** a method has almost all the same information as a value *)
met_private : bool ; (** [true] if the method is private.*)
met_virtual : bool ; (** [true] if the method is virtual. *)
}
(** Return [true] if the value is a function, i.e. it has a functional type. *)
val is_function : t_value -> bool
(** Access to the description associated to the given parameter name.*)
val value_parameter_text_by_name : t_value -> string -> text option
end
(** Representation and manipulation of classes and class types.*)
module Class :
sig
(** {3 Types} *)
(** To keep the order of elements in a class. *)
type class_element = Odoc_class.class_element =
Class_attribute of Value.t_attribute
| Class_method of Value.t_method
| Class_comment of text
(** Used when we can reference a t_class or a t_class_type. *)
type cct = Odoc_class.cct =
Cl of t_class
| Cltype of t_class_type * Types.type_expr list (** Class type and type parameters. *)
and inherited_class = Odoc_class.inherited_class =
{
ic_name : Name.t ; (** Complete name of the inherited class. *)
mutable ic_class : cct option ; (** The associated t_class or t_class_type. *)
ic_text : text option ; (** The inheritance description, if any. *)
}
and class_apply = Odoc_class.class_apply =
{
capp_name : Name.t ; (** The complete name of the applied class. *)
mutable capp_class : t_class option; (** The associated t_class if we found it. *)
capp_params : Types.type_expr list; (** The type of expressions the class is applied to. *)
capp_params_code : string list ; (** The code of these exprssions. *)
}
and class_constr = Odoc_class.class_constr =
{
cco_name : Name.t ; (** The complete name of the applied class. *)
mutable cco_class : cct option;
(** The associated class or class type if we found it. *)
cco_type_parameters : Types.type_expr list; (** The type parameters of the class, if needed. *)
}
and class_kind = Odoc_class.class_kind =
Class_structure of inherited_class list * class_element list
(** An explicit class structure, used in implementation and interface. *)
| Class_apply of class_apply
(** Application/alias of a class, used in implementation only. *)
| Class_constr of class_constr
(** A class used to give the type of the defined class,
instead of a structure, used in interface only.
For example, it will be used with the name [M1.M2....bar]
when the class foo is defined like this :
[class foo : int -> bar] *)
| Class_constraint of class_kind * class_type_kind
(** A class definition with a constraint. *)
(** Representation of a class. *)
and t_class = Odoc_class.t_class =
{
cl_name : Name.t ; (** Complete name of the class. *)
mutable cl_info : info option ; (** Information found in the optional associated comment. *)
cl_type : Types.class_type ; (** Type of the class. *)
cl_type_parameters : Types.type_expr list ; (** Type parameters. *)
cl_virtual : bool ; (** [true] when the class is virtual. *)
mutable cl_kind : class_kind ; (** The way the class is defined. *)
mutable cl_parameters : Parameter.parameter list ; (** The parameters of the class. *)
mutable cl_loc : location ;
}
and class_type_alias = Odoc_class.class_type_alias =
{
cta_name : Name.t ; (** Complete name of the target class type. *)
mutable cta_class : cct option ; (** The target t_class or t_class_type, if we found it.*)
cta_type_parameters : Types.type_expr list ; (** The type parameters. A VOIR : mettre des string ? *)
}
and class_type_kind = Odoc_class.class_type_kind =
Class_signature of inherited_class list * class_element list
| Class_type of class_type_alias (** A class type eventually applied to type args. *)
(** Representation of a class type. *)
and t_class_type = Odoc_class.t_class_type =
{
clt_name : Name.t ; (** Complete name of the type. *)
mutable clt_info : info option ; (** Information found in the optional associated comment. *)
clt_type : Types.class_type ;
clt_type_parameters : Types.type_expr list ; (** Type parameters. *)
clt_virtual : bool ; (** [true] if the class type is virtual *)
mutable clt_kind : class_type_kind ; (** The way the class type is defined. *)
mutable clt_loc : location ;
}
* { 3 Functions }
(** Access to the elements of a class. *)
val class_elements : ?trans:bool -> t_class -> class_element list
(** Access to the list of class attributes. *)
val class_attributes : ?trans:bool -> t_class -> Value.t_attribute list
(** Access to the description associated to the given class parameter name. *)
val class_parameter_text_by_name : t_class -> string -> text option
(** Access to the methods of a class. *)
val class_methods : ?trans:bool -> t_class -> Value.t_method list
(** Access to the comments of a class. *)
val class_comments : ?trans:bool -> t_class -> text list
(** Access to the elements of a class type. *)
val class_type_elements : ?trans:bool -> t_class_type -> class_element list
(** Access to the list of class type attributes. *)
val class_type_attributes : ?trans:bool -> t_class_type -> Value.t_attribute list
(** Access to the description associated to the given class type parameter name. *)
val class_type_parameter_text_by_name : t_class_type -> string -> text option
(** Access to the methods of a class type. *)
val class_type_methods : ?trans:bool -> t_class_type -> Value.t_method list
(** Access to the comments of a class type. *)
val class_type_comments : ?trans:bool -> t_class_type -> text list
end
(** Representation and manipulation of modules and module types. *)
module Module :
sig
(** {3 Types} *)
(** To keep the order of elements in a module. *)
type module_element = Odoc_module.module_element =
Element_module of t_module
| Element_module_type of t_module_type
| Element_included_module of included_module
| Element_class of Class.t_class
| Element_class_type of Class.t_class_type
| Element_value of Value.t_value
| Element_exception of Exception.t_exception
| Element_type of Type.t_type
| Element_module_comment of text
(** Used where we can reference t_module or t_module_type. *)
and mmt = Odoc_module.mmt =
| Mod of t_module
| Modtype of t_module_type
and included_module = Odoc_module.included_module =
{
im_name : Name.t ; (** Complete name of the included module. *)
mutable im_module : mmt option ; (** The included module or module type, if we found it. *)
mutable im_info : Odoc_types.info option ; (** comment associated to the includ directive *)
}
and module_alias = Odoc_module.module_alias =
{
ma_name : Name.t ; (** Complete name of the target module. *)
mutable ma_module : mmt option ; (** The real module or module type if we could associate it. *)
}
and module_parameter = Odoc_module.module_parameter = {
mp_name : string ; (** the name *)
mp_type : Types.module_type ; (** the type *)
mp_type_code : string ; (** the original code *)
mp_kind : module_type_kind ; (** the way the parameter was built *)
}
(** Different kinds of a module. *)
and module_kind = Odoc_module.module_kind =
| Module_struct of module_element list (** A complete module structure. *)
| Module_alias of module_alias (** Complete name and corresponding module if we found it *)
| Module_functor of module_parameter * module_kind
(** A functor, with its parameter and the rest of its definition *)
| Module_apply of module_kind * module_kind
(** A module defined by application of a functor. *)
| Module_with of module_type_kind * string
(** A module whose type is a with ... constraint.
Should appear in interface files only. *)
| Module_constraint of module_kind * module_type_kind
(** A module constraint by a module type. *)
| Module_typeof of string (** by now only the code of the module expression *)
| Module_unpack of string * module_type_alias (** code of the expression and module type alias *)
(** Representation of a module. *)
and t_module = Odoc_module.t_module =
{
m_name : Name.t ; (** Complete name of the module. *)
mutable m_type : Types.module_type ; (** The type of the module. *)
mutable m_info : info option ; (** Information found in the optional associated comment. *)
m_is_interface : bool ; (** [true] for modules read from interface files *)
m_file : string ; (** The file the module is defined in. *)
mutable m_kind : module_kind ; (** The way the module is defined. *)
mutable m_loc : location ;
mutable m_top_deps : Name.t list ; (** The toplevels module names this module depends on. *)
mutable m_code : string option ; (** The whole code of the module *)
mutable m_code_intf : string option ; (** The whole code of the interface of the module *)
m_text_only : bool ; (** [true] if the module comes from a text file *)
}
and module_type_alias = Odoc_module.module_type_alias =
{
mta_name : Name.t ; (** Complete name of the target module type. *)
mutable mta_module : t_module_type option ; (** The real module type if we could associate it. *)
}
(** Different kinds of module type. *)
and module_type_kind = Odoc_module.module_type_kind =
| Module_type_struct of module_element list (** A complete module signature. *)
| Module_type_functor of module_parameter * module_type_kind
(** A functor, with its parameter and the rest of its definition *)
| Module_type_alias of module_type_alias
(** Complete alias name and corresponding module type if we found it. *)
| Module_type_with of module_type_kind * string
(** The module type kind and the code of the with constraint. *)
| Module_type_typeof of string
(** by now only the code of the module expression *)
(** Representation of a module type. *)
and t_module_type = Odoc_module.t_module_type =
{
mt_name : Name.t ; (** Complete name of the module type. *)
mutable mt_info : info option ; (** Information found in the optional associated comment. *)
mutable mt_type : Types.module_type option ; (** [None] means that the module type is abstract. *)
mt_is_interface : bool ; (** [true] for modules read from interface files. *)
mt_file : string ; (** The file the module type is defined in. *)
mutable mt_kind : module_type_kind option ;
(** The way the module is defined. [None] means that module type is abstract.
It is always [None] when the module type was extracted from the implementation file.
That means module types are only analysed in interface files. *)
mutable mt_loc : location ;
}
* { 3 Functions for modules }
(** Access to the elements of a module. *)
val module_elements : ?trans:bool -> t_module -> module_element list
(** Access to the submodules of a module. *)
val module_modules : ?trans:bool -> t_module -> t_module list
(** Access to the module types of a module. *)
val module_module_types : ?trans:bool -> t_module -> t_module_type list
(** Access to the included modules of a module. *)
val module_included_modules : ?trans:bool-> t_module -> included_module list
(** Access to the exceptions of a module. *)
val module_exceptions : ?trans:bool-> t_module -> Exception.t_exception list
(** Access to the types of a module. *)
val module_types : ?trans:bool-> t_module -> Type.t_type list
(** Access to the values of a module. *)
val module_values : ?trans:bool -> t_module -> Value.t_value list
(** Access to functional values of a module. *)
val module_functions : ?trans:bool-> t_module -> Value.t_value list
(** Access to non-functional values of a module. *)
val module_simple_values : ?trans:bool-> t_module -> Value.t_value list
(** Access to the classes of a module. *)
val module_classes : ?trans:bool-> t_module -> Class.t_class list
(** Access to the class types of a module. *)
val module_class_types : ?trans:bool-> t_module -> Class.t_class_type list
(** The list of classes defined in this module and all its submodules and functors. *)
val module_all_classes : ?trans:bool-> t_module -> Class.t_class list
(** [true] if the module is functor. *)
val module_is_functor : t_module -> bool
(** The list of couples (module parameter, optional description). *)
val module_parameters : ?trans:bool-> t_module -> (module_parameter * text option) list
(** The list of module comments. *)
val module_comments : ?trans:bool-> t_module -> text list
* { 3 Functions for module types }
(** Access to the elements of a module type. *)
val module_type_elements : ?trans:bool-> t_module_type -> module_element list
(** Access to the submodules of a module type. *)
val module_type_modules : ?trans:bool-> t_module_type -> t_module list
(** Access to the module types of a module type. *)
val module_type_module_types : ?trans:bool-> t_module_type -> t_module_type list
(** Access to the included modules of a module type. *)
val module_type_included_modules : ?trans:bool-> t_module_type -> included_module list
(** Access to the exceptions of a module type. *)
val module_type_exceptions : ?trans:bool-> t_module_type -> Exception.t_exception list
(** Access to the types of a module type. *)
val module_type_types : ?trans:bool-> t_module_type -> Type.t_type list
(** Access to the values of a module type. *)
val module_type_values : ?trans:bool-> t_module_type -> Value.t_value list
(** Access to functional values of a module type. *)
val module_type_functions : ?trans:bool-> t_module_type -> Value.t_value list
(** Access to non-functional values of a module type. *)
val module_type_simple_values : ?trans:bool-> t_module_type -> Value.t_value list
(** Access to the classes of a module type. *)
val module_type_classes : ?trans:bool-> t_module_type -> Class.t_class list
(** Access to the class types of a module type. *)
val module_type_class_types : ?trans:bool-> t_module_type -> Class.t_class_type list
(** The list of classes defined in this module type and all its submodules and functors. *)
val module_type_all_classes : ?trans:bool-> t_module_type -> Class.t_class list
(** [true] if the module type is functor. *)
val module_type_is_functor : t_module_type -> bool
(** The list of couples (module parameter, optional description). *)
val module_type_parameters : ?trans:bool-> t_module_type -> (module_parameter * text option) list
(** The list of module comments. *)
val module_type_comments : ?trans:bool-> t_module_type -> text list
end
* { 3 Getting strings from values }
(** This function is used to reset the names of type variables.
It must be called when printing the whole type of a function,
but not when printing the type of its parameters. Same for
classes (call it) and methods and attributes (don't call it).*)
val reset_type_names : unit -> unit
(** [string_of_variance t (covariant, invariant)] returns ["+"] if
the given information means "covariant", ["-"] if the it means
"contravariant", orelse [""], and always [""] if the given
type is not an abstract type with no manifest (i.e. no need
for the variance to be printed.*)
val string_of_variance : Type.t_type -> (bool * bool) -> string
(** This function returns a string representing a Types.type_expr. *)
val string_of_type_expr : Types.type_expr -> string
(** @return a string to display the parameters of the given class,
in the same form as the compiler. *)
val string_of_class_params : Class.t_class -> string
(** This function returns a string to represent the given list of types,
with a given separator. *)
val string_of_type_list : ?par: bool -> string -> Types.type_expr list -> string
(** This function returns a string to represent the list of type parameters
for the given type. *)
val string_of_type_param_list : Type.t_type -> string
(** This function returns a string to represent the given list of
type parameters of a class or class type,
with a given separator. *)
val string_of_class_type_param_list : Types.type_expr list -> string
(** This function returns a string representing a [Types.module_type].
@param complete indicates if we must print complete signatures
or just [sig end]. Default if [false].
@param code if [complete = false] and the type contains something else
than identificators and functors, then the given code is used.
*)
val string_of_module_type : ?code: string -> ?complete: bool -> Types.module_type -> string
(** This function returns a string representing a [Types.class_type].
@param complete indicates if we must print complete signatures
or just [object end]. Default if [false].
*)
val string_of_class_type : ?complete: bool -> Types.class_type -> string
(** Get a string from a text. *)
val string_of_text : text -> string
(** Get a string from an info structure. *)
val string_of_info : info -> string
(** @return a string to describe the given type. *)
val string_of_type : Type.t_type -> string
(** @return a string to describe the given exception. *)
val string_of_exception : Exception.t_exception -> string
(** @return a string to describe the given value. *)
val string_of_value : Value.t_value -> string
(** @return a string to describe the given attribute. *)
val string_of_attribute : Value.t_attribute -> string
(** @return a string to describe the given method. *)
val string_of_method : Value.t_method -> string
* { 3 Miscelaneous functions }
* Return the first sentence ( until the first dot followed by a blank
or the first blank line ) of a text .
Do n't stop in the middle of [ Code ] , [ CodePre ] , [ Verbatim ] , [ List ] , [ ] ,
[ Latex ] , [ Link ] , [ Ref ] , [ Subscript ] or [ Superscript ] .
or the first blank line) of a text.
Don't stop in the middle of [Code], [CodePre], [Verbatim], [List], [Enum],
[Latex], [Link], [Ref], [Subscript] or [Superscript]. *)
val first_sentence_of_text : text -> text
* Return the first sentence ( until the first dot followed by a blank
or the first blank line ) of a text , and the remaining text after .
Do n't stop in the middle of [ Code ] , [ CodePre ] , [ Verbatim ] , [ List ] , [ ] ,
[ Latex ] , [ Link ] , [ Ref ] , [ Subscript ] or [ Superscript ] .
or the first blank line) of a text, and the remaining text after.
Don't stop in the middle of [Code], [CodePre], [Verbatim], [List], [Enum],
[Latex], [Link], [Ref], [Subscript] or [Superscript].*)
val first_sentence_and_rest_of_text : text -> text * text
(** Return the given [text] without any title or list. *)
val text_no_title_no_list : text -> text
* [ concat sep l ] the given list of text [ l ] , each separated with
the text [ sep ] .
the text [sep]. *)
val text_concat : Odoc_types.text -> Odoc_types.text list -> Odoc_types.text
(** Return the list of titles in a [text].
A title is a title level, an optional label and a text.*)
val get_titles_in_text : text -> (int * string option * text) list
* Take a sorted list of elements , a function to get the name
of an element and return the list of list of elements ,
where each list group elements beginning by the same letter .
Since the original list is sorted , elements whose name does not
begin with a letter should be in the first returned list .
of an element and return the list of list of elements,
where each list group elements beginning by the same letter.
Since the original list is sorted, elements whose name does not
begin with a letter should be in the first returned list.*)
val create_index_lists : 'a list -> ('a -> string) -> 'a list list
(** Take a type and remove the option top constructor. This is
useful when printing labels, we we then remove the top option contructor
for optional labels.*)
val remove_option : Types.type_expr -> Types.type_expr
(** Return [true] if the given label is optional.*)
val is_optional : string -> bool
(** Return the label name for the given label,
i.e. removes the beginning '?' if present.*)
val label_name : string -> string
* Return the given name where the module name or
part of it was removed , according to the list of modules
which must be hidden ( cf { ! )
part of it was removed, according to the list of modules
which must be hidden (cf {!Odoc_args.hidden_modules})*)
val use_hidden_modules : Name.t -> Name.t
(** Print the given string if the verbose mode is activated. *)
val verbose : string -> unit
(** Print a warning message to stderr.
If warnings must be treated as errors, then the
error counter is incremented. *)
val warning : string -> unit
(** A flag to indicate whether ocamldoc warnings must be printed or not. *)
val print_warnings : bool ref
* Increment this counter when an error is encountered .
The ocamldoc tool will print the number of errors
encountered exit with code 1 if this number is greater
than 0 .
The ocamldoc tool will print the number of errors
encountered exit with code 1 if this number is greater
than 0. *)
val errors : int ref
(** Apply a function to an optional value. *)
val apply_opt : ('a -> 'b) -> 'a option -> 'b option
* Apply a function to a first value if it is
not different from a second value . If the two values
are different , return the second one .
not different from a second value. If the two values
are different, return the second one.*)
val apply_if_equal : ('a -> 'a) -> 'a -> 'a -> 'a
(** [text_of_string s] returns the text structure from the
given string.
@raise Text_syntax if a syntax error is encountered. *)
val text_of_string : string -> text
(** [text_string_of_text text] returns the string representing
the given [text]. This string can then be parsed again
by {!Odoc_info.text_of_string}.*)
val text_string_of_text : text -> string
(** [info_of_string s] parses the given string
like a regular ocamldoc comment and return an
{!Odoc_info.info} structure.
@return an empty structure if there was a syntax error. TODO: change this
*)
val info_of_string : string -> info
(** [info_string_of_info info] returns the string representing
the given [info]. This string can then be parsed again
by {!Odoc_info.info_of_string}.*)
val info_string_of_info : info -> string
(** [info_of_comment_file file] parses the given file
and return an {!Odoc_info.info} structure. The content of the
file must have the same syntax as the content of a special comment.
The given module list is used for cross reference.
@raise Failure is the file could not be opened or there is a
syntax error.
*)
val info_of_comment_file : Module.t_module list -> string -> info
(** [remove_ending_newline s] returns [s] without the optional ending newline. *)
val remove_ending_newline : string -> string
(** Research in elements *)
module Search :
sig
type result_element = Odoc_search.result_element =
Res_module of Module.t_module
| Res_module_type of Module.t_module_type
| Res_class of Class.t_class
| Res_class_type of Class.t_class_type
| Res_value of Value.t_value
| Res_type of Type.t_type
| Res_exception of Exception.t_exception
| Res_attribute of Value.t_attribute
| Res_method of Value.t_method
| Res_section of string * text
| Res_recfield of Type.t_type * Type.record_field
| Res_const of Type.t_type * Type.variant_constructor
(** The type representing a research result.*)
type search_result = result_element list
(** Research of the elements whose name matches the given regular expression.*)
val search_by_name : Module.t_module list -> Str.regexp -> search_result
(** A function to search all the values in a list of modules. *)
val values : Module.t_module list -> Value.t_value list
(** A function to search all the exceptions in a list of modules. *)
val exceptions : Module.t_module list -> Exception.t_exception list
(** A function to search all the types in a list of modules. *)
val types : Module.t_module list -> Type.t_type list
(** A function to search all the class attributes in a list of modules. *)
val attributes : Module.t_module list -> Value.t_attribute list
(** A function to search all the class methods in a list of modules. *)
val methods : Module.t_module list -> Value.t_method list
(** A function to search all the classes in a list of modules. *)
val classes : Module.t_module list -> Class.t_class list
(** A function to search all the class types in a list of modules. *)
val class_types : Module.t_module list -> Class.t_class_type list
(** A function to search all the modules in a list of modules. *)
val modules : Module.t_module list -> Module.t_module list
(** A function to search all the module types in a list of modules. *)
val module_types : Module.t_module list -> Module.t_module_type list
end
(** Scanning of collected information *)
module Scan :
sig
class scanner :
object
(** Scan of 'leaf elements'. *)
method scan_value : Value.t_value -> unit
method scan_type_pre : Type.t_type -> bool
method scan_type_const : Type.t_type -> Type.variant_constructor -> unit
method scan_type_recfield : Type.t_type -> Type.record_field -> unit
method scan_type : Type.t_type -> unit
method scan_exception : Exception.t_exception -> unit
method scan_attribute : Value.t_attribute -> unit
method scan_method : Value.t_method -> unit
method scan_included_module : Module.included_module -> unit
(** Scan of a class. *)
(** Scan of a comment inside a class. *)
method scan_class_comment : text -> unit
(** Override this method to perform controls on the class comment
and params. This method is called before scanning the class elements.
@return true if the class elements must be scanned.*)
method scan_class_pre : Class.t_class -> bool
(** This method scan the elements of the given class. *)
method scan_class_elements : Class.t_class -> unit
(** Scan of a class. Should not be overridden. It calls [scan_class_pre]
and if [scan_class_pre] returns [true], then it calls scan_class_elements.*)
method scan_class : Class.t_class -> unit
(** Scan of a class type. *)
(** Scan of a comment inside a class type. *)
method scan_class_type_comment : text -> unit
(** Override this method to perform controls on the class type comment
and form. This method is called before scanning the class type elements.
@return true if the class type elements must be scanned.*)
method scan_class_type_pre : Class.t_class_type -> bool
(** This method scan the elements of the given class type. *)
method scan_class_type_elements : Class.t_class_type -> unit
(** Scan of a class type. Should not be overridden. It calls [scan_class_type_pre]
and if [scan_class_type_pre] returns [true], then it calls scan_class_type_elements.*)
method scan_class_type : Class.t_class_type -> unit
(** Scan of modules. *)
(** Scan of a comment inside a module. *)
method scan_module_comment : text -> unit
(** Override this method to perform controls on the module comment
and form. This method is called before scanning the module elements.
@return true if the module elements must be scanned.*)
method scan_module_pre : Module.t_module -> bool
(** This method scan the elements of the given module. *)
method scan_module_elements : Module.t_module -> unit
(** Scan of a module. Should not be overridden. It calls [scan_module_pre]
and if [scan_module_pre] returns [true], then it calls scan_module_elements.*)
method scan_module : Module.t_module -> unit
(** Scan of module types. *)
(** Scan of a comment inside a module type. *)
method scan_module_type_comment : text -> unit
(** Override this method to perform controls on the module type comment
and form. This method is called before scanning the module type elements.
@return true if the module type elements must be scanned. *)
method scan_module_type_pre : Module.t_module_type -> bool
(** This method scan the elements of the given module type. *)
method scan_module_type_elements : Module.t_module_type -> unit
(** Scan of a module type. Should not be overridden. It calls [scan_module_type_pre]
and if [scan_module_type_pre] returns [true], then it calls scan_module_type_elements.*)
method scan_module_type : Module.t_module_type -> unit
(** Main scanning method. *)
(** Scan a list of modules. *)
method scan_module_list : Module.t_module list -> unit
end
end
(** Computation of dependencies. *)
module Dep :
sig
(** Modify the modules depencies of the given list of modules,
to get the minimum transitivity kernel. *)
val kernel_deps_of_modules : Module.t_module list -> unit
(** Return the list of dependencies between the given types,
in the form of a list [(type name, names of types it depends on)].
@param kernel indicates if we must keep only the transitivity kernel
of the dependencies. Default is [false].
*)
val deps_of_types : ?kernel: bool -> Type.t_type list -> (Type.t_type * (Name.t list)) list
end
* { 2 Some global variables }
module Global :
sig
val errors : int ref
val warn_error : bool ref
* The file used by the generators outputting only one file .
val out_file : string ref
* Verbose mode or not .
val verbose : bool ref
(** The directory where files have to be generated. *)
val target_dir : string ref
(** The optional title to use in the generated documentation. *)
val title : string option ref
(** The optional file whose content can be used as intro text. *)
val intro_file : string option ref
(** The flag which indicates if we must generate a table of contents. *)
val with_toc : bool ref
(** The flag which indicates if we must generate an index. *)
val with_index : bool ref
(** The flag which indicates if we must generate a header.*)
val with_header : bool ref
(** The flag which indicates if we must generate a trailer.*)
val with_trailer : bool ref
end
(** Analysis of the given source files.
@param init is the list of modules already known from a previous analysis.
@return the list of analysed top modules. *)
val analyse_files :
?merge_options:Odoc_types.merge_option list ->
?include_dirs:string list ->
?labels:bool ->
?sort_modules:bool ->
?no_stop:bool ->
?init: Odoc_module.t_module list ->
Odoc_global.source_file list ->
Module.t_module list
(** Dump of a list of modules into a file.
@raise Failure if an error occurs.*)
val dump_modules : string -> Odoc_module.t_module list -> unit
(** Load of a list of modules from a file.
@raise Failure if an error occurs.*)
val load_modules : string -> Odoc_module.t_module list
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/ocamldoc/odoc_info.mli | ocaml | *********************************************************************
OCamldoc
*********************************************************************
* Interface to the information collected in source files.
* The differents kinds of element references.
* Raw text.
* The string is source code.
* The string is pre-formatted source code.
* String 'as is'.
* Text in bold style.
* Text in italic.
* Emphasized text.
* Centered text.
* Left alignment.
* Right alignment.
* A list.
* An enumerated list.
* To force a line break.
* Like html's block quote.
* Style number, optional label, and text.
* A string for latex.
* A reference string and the link text.
* A reference to an element. Complete name and kind.
An optional text can be given to display this text instead
of the element name.
* Subscripts.
* The table of the given modules with their abstract.
* The links to the various indexes (values, types, ...)
* to extend \{foo syntax
* (target, code) : to specify code specific to a target format
* A text is a list of [text_element]. The order matters.
* Raised when parsing string to build a {!Odoc_info.text}
structure. [(line, char, string)]
* Parameter name and description.
* Raised exception name and description.
* The description text.
* The list of authors in \@author tags.
* The string in the \@version tag.
* the version number and text in \@before tag
* The of the \@deprecated tag.
* The list of parameter descriptions.
* The list of raised exceptions.
* The description text of the return value.
* Location of elements in implementation and interface files.
* implementation location
* interface location
* A dummy location.
* Representation of element names.
* Access to the simple name.
* [concat t1 t2] returns the concatenation of [t1] and [t2].
* Return the name of the 'father' (like [dirname] for a file name).
* Representation and manipulation of method / function / class / module parameters.
* {3 Types}
* Representation of a simple parameter name
* Representation of parameter names. We need it to represent parameter names in tuples.
The value [Tuple ([], t)] stands for an anonymous parameter.
* A parameter is just a param_info.
* Acces to the name as a string. For tuples, parenthesis and commas are added.
* Access to the complete type.
* Access to the description of a specific name.
@raise Not_found if no description is associated to the given name.
* Access to the type of a specific name.
@raise Not_found if no type is associated to the given name.
* Representation and manipulation of exceptions.
* Used when the exception is a rebind of another exception,
when we have [exception Ex = Target_ex].
* The complete name of the target exception.
* The target exception, if we found it.
* Information found in the optional associated comment.
* The types of the parameters.
* [None] when the exception is not a rebind.
* Representation and manipulation of types.
* Description of a variant type constructor.
* Name of the constructor.
* Arguments of the constructor.
* Optional description in the associated comment.
* Description of a record type field.
* Name of the field.
* [true] if mutable.
* Type of the field.
* Optional description in the associated comment.
* The various kinds of a type.
* Type is abstract, for example [type t].
* constructors
* fields
* Representation of a type.
* Complete name of the type.
* Information found in the optional associated comment.
* type parameters: (type, covariant, contravariant)
* Type kind.
* Private or public type.
* Type manifest.
* Representation and manipulation of values, class attributes and class methods.
* Representation of a value.
* Complete name of the value.
* Information found in the optional associated comment.
* Type of the value.
* [true] if the value is recursive.
* The parameters, if any.
* The code of the value, if we had the only the implementation file.
* Representation of a class attribute.
* an attribute has almost all the same information as a value
* [true] if the attribute is mutable.
* [true] if the attribute is virtual.
* Representation of a class method.
* a method has almost all the same information as a value
* [true] if the method is private.
* [true] if the method is virtual.
* Return [true] if the value is a function, i.e. it has a functional type.
* Access to the description associated to the given parameter name.
* Representation and manipulation of classes and class types.
* {3 Types}
* To keep the order of elements in a class.
* Used when we can reference a t_class or a t_class_type.
* Class type and type parameters.
* Complete name of the inherited class.
* The associated t_class or t_class_type.
* The inheritance description, if any.
* The complete name of the applied class.
* The associated t_class if we found it.
* The type of expressions the class is applied to.
* The code of these exprssions.
* The complete name of the applied class.
* The associated class or class type if we found it.
* The type parameters of the class, if needed.
* An explicit class structure, used in implementation and interface.
* Application/alias of a class, used in implementation only.
* A class used to give the type of the defined class,
instead of a structure, used in interface only.
For example, it will be used with the name [M1.M2....bar]
when the class foo is defined like this :
[class foo : int -> bar]
* A class definition with a constraint.
* Representation of a class.
* Complete name of the class.
* Information found in the optional associated comment.
* Type of the class.
* Type parameters.
* [true] when the class is virtual.
* The way the class is defined.
* The parameters of the class.
* Complete name of the target class type.
* The target t_class or t_class_type, if we found it.
* The type parameters. A VOIR : mettre des string ?
* A class type eventually applied to type args.
* Representation of a class type.
* Complete name of the type.
* Information found in the optional associated comment.
* Type parameters.
* [true] if the class type is virtual
* The way the class type is defined.
* Access to the elements of a class.
* Access to the list of class attributes.
* Access to the description associated to the given class parameter name.
* Access to the methods of a class.
* Access to the comments of a class.
* Access to the elements of a class type.
* Access to the list of class type attributes.
* Access to the description associated to the given class type parameter name.
* Access to the methods of a class type.
* Access to the comments of a class type.
* Representation and manipulation of modules and module types.
* {3 Types}
* To keep the order of elements in a module.
* Used where we can reference t_module or t_module_type.
* Complete name of the included module.
* The included module or module type, if we found it.
* comment associated to the includ directive
* Complete name of the target module.
* The real module or module type if we could associate it.
* the name
* the type
* the original code
* the way the parameter was built
* Different kinds of a module.
* A complete module structure.
* Complete name and corresponding module if we found it
* A functor, with its parameter and the rest of its definition
* A module defined by application of a functor.
* A module whose type is a with ... constraint.
Should appear in interface files only.
* A module constraint by a module type.
* by now only the code of the module expression
* code of the expression and module type alias
* Representation of a module.
* Complete name of the module.
* The type of the module.
* Information found in the optional associated comment.
* [true] for modules read from interface files
* The file the module is defined in.
* The way the module is defined.
* The toplevels module names this module depends on.
* The whole code of the module
* The whole code of the interface of the module
* [true] if the module comes from a text file
* Complete name of the target module type.
* The real module type if we could associate it.
* Different kinds of module type.
* A complete module signature.
* A functor, with its parameter and the rest of its definition
* Complete alias name and corresponding module type if we found it.
* The module type kind and the code of the with constraint.
* by now only the code of the module expression
* Representation of a module type.
* Complete name of the module type.
* Information found in the optional associated comment.
* [None] means that the module type is abstract.
* [true] for modules read from interface files.
* The file the module type is defined in.
* The way the module is defined. [None] means that module type is abstract.
It is always [None] when the module type was extracted from the implementation file.
That means module types are only analysed in interface files.
* Access to the elements of a module.
* Access to the submodules of a module.
* Access to the module types of a module.
* Access to the included modules of a module.
* Access to the exceptions of a module.
* Access to the types of a module.
* Access to the values of a module.
* Access to functional values of a module.
* Access to non-functional values of a module.
* Access to the classes of a module.
* Access to the class types of a module.
* The list of classes defined in this module and all its submodules and functors.
* [true] if the module is functor.
* The list of couples (module parameter, optional description).
* The list of module comments.
* Access to the elements of a module type.
* Access to the submodules of a module type.
* Access to the module types of a module type.
* Access to the included modules of a module type.
* Access to the exceptions of a module type.
* Access to the types of a module type.
* Access to the values of a module type.
* Access to functional values of a module type.
* Access to non-functional values of a module type.
* Access to the classes of a module type.
* Access to the class types of a module type.
* The list of classes defined in this module type and all its submodules and functors.
* [true] if the module type is functor.
* The list of couples (module parameter, optional description).
* The list of module comments.
* This function is used to reset the names of type variables.
It must be called when printing the whole type of a function,
but not when printing the type of its parameters. Same for
classes (call it) and methods and attributes (don't call it).
* [string_of_variance t (covariant, invariant)] returns ["+"] if
the given information means "covariant", ["-"] if the it means
"contravariant", orelse [""], and always [""] if the given
type is not an abstract type with no manifest (i.e. no need
for the variance to be printed.
* This function returns a string representing a Types.type_expr.
* @return a string to display the parameters of the given class,
in the same form as the compiler.
* This function returns a string to represent the given list of types,
with a given separator.
* This function returns a string to represent the list of type parameters
for the given type.
* This function returns a string to represent the given list of
type parameters of a class or class type,
with a given separator.
* This function returns a string representing a [Types.module_type].
@param complete indicates if we must print complete signatures
or just [sig end]. Default if [false].
@param code if [complete = false] and the type contains something else
than identificators and functors, then the given code is used.
* This function returns a string representing a [Types.class_type].
@param complete indicates if we must print complete signatures
or just [object end]. Default if [false].
* Get a string from a text.
* Get a string from an info structure.
* @return a string to describe the given type.
* @return a string to describe the given exception.
* @return a string to describe the given value.
* @return a string to describe the given attribute.
* @return a string to describe the given method.
* Return the given [text] without any title or list.
* Return the list of titles in a [text].
A title is a title level, an optional label and a text.
* Take a type and remove the option top constructor. This is
useful when printing labels, we we then remove the top option contructor
for optional labels.
* Return [true] if the given label is optional.
* Return the label name for the given label,
i.e. removes the beginning '?' if present.
* Print the given string if the verbose mode is activated.
* Print a warning message to stderr.
If warnings must be treated as errors, then the
error counter is incremented.
* A flag to indicate whether ocamldoc warnings must be printed or not.
* Apply a function to an optional value.
* [text_of_string s] returns the text structure from the
given string.
@raise Text_syntax if a syntax error is encountered.
* [text_string_of_text text] returns the string representing
the given [text]. This string can then be parsed again
by {!Odoc_info.text_of_string}.
* [info_of_string s] parses the given string
like a regular ocamldoc comment and return an
{!Odoc_info.info} structure.
@return an empty structure if there was a syntax error. TODO: change this
* [info_string_of_info info] returns the string representing
the given [info]. This string can then be parsed again
by {!Odoc_info.info_of_string}.
* [info_of_comment_file file] parses the given file
and return an {!Odoc_info.info} structure. The content of the
file must have the same syntax as the content of a special comment.
The given module list is used for cross reference.
@raise Failure is the file could not be opened or there is a
syntax error.
* [remove_ending_newline s] returns [s] without the optional ending newline.
* Research in elements
* The type representing a research result.
* Research of the elements whose name matches the given regular expression.
* A function to search all the values in a list of modules.
* A function to search all the exceptions in a list of modules.
* A function to search all the types in a list of modules.
* A function to search all the class attributes in a list of modules.
* A function to search all the class methods in a list of modules.
* A function to search all the classes in a list of modules.
* A function to search all the class types in a list of modules.
* A function to search all the modules in a list of modules.
* A function to search all the module types in a list of modules.
* Scanning of collected information
* Scan of 'leaf elements'.
* Scan of a class.
* Scan of a comment inside a class.
* Override this method to perform controls on the class comment
and params. This method is called before scanning the class elements.
@return true if the class elements must be scanned.
* This method scan the elements of the given class.
* Scan of a class. Should not be overridden. It calls [scan_class_pre]
and if [scan_class_pre] returns [true], then it calls scan_class_elements.
* Scan of a class type.
* Scan of a comment inside a class type.
* Override this method to perform controls on the class type comment
and form. This method is called before scanning the class type elements.
@return true if the class type elements must be scanned.
* This method scan the elements of the given class type.
* Scan of a class type. Should not be overridden. It calls [scan_class_type_pre]
and if [scan_class_type_pre] returns [true], then it calls scan_class_type_elements.
* Scan of modules.
* Scan of a comment inside a module.
* Override this method to perform controls on the module comment
and form. This method is called before scanning the module elements.
@return true if the module elements must be scanned.
* This method scan the elements of the given module.
* Scan of a module. Should not be overridden. It calls [scan_module_pre]
and if [scan_module_pre] returns [true], then it calls scan_module_elements.
* Scan of module types.
* Scan of a comment inside a module type.
* Override this method to perform controls on the module type comment
and form. This method is called before scanning the module type elements.
@return true if the module type elements must be scanned.
* This method scan the elements of the given module type.
* Scan of a module type. Should not be overridden. It calls [scan_module_type_pre]
and if [scan_module_type_pre] returns [true], then it calls scan_module_type_elements.
* Main scanning method.
* Scan a list of modules.
* Computation of dependencies.
* Modify the modules depencies of the given list of modules,
to get the minimum transitivity kernel.
* Return the list of dependencies between the given types,
in the form of a list [(type name, names of types it depends on)].
@param kernel indicates if we must keep only the transitivity kernel
of the dependencies. Default is [false].
* The directory where files have to be generated.
* The optional title to use in the generated documentation.
* The optional file whose content can be used as intro text.
* The flag which indicates if we must generate a table of contents.
* The flag which indicates if we must generate an index.
* The flag which indicates if we must generate a header.
* The flag which indicates if we must generate a trailer.
* Analysis of the given source files.
@param init is the list of modules already known from a previous analysis.
@return the list of analysed top modules.
* Dump of a list of modules into a file.
@raise Failure if an error occurs.
* Load of a list of modules from a file.
@raise Failure if an error occurs. | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
type ref_kind = Odoc_types.ref_kind =
RK_module
| RK_module_type
| RK_class
| RK_class_type
| RK_value
| RK_type
| RK_exception
| RK_attribute
| RK_method
| RK_section of text
| RK_recfield
| RK_const
and text_element = Odoc_types.text_element =
| Title of int * string option * text
| Ref of string * ref_kind option * text option
* .
| Module_list of string list
and text = text_element list
* The different forms of references in \@see tags .
type see_ref = Odoc_types.see_ref =
See_url of string
| See_file of string
| See_doc of string
exception Text_syntax of int * int * string
* The information in a \@see tag .
type see = see_ref * text
type param = (string * text)
type raised_exception = (string * text)
* Information in a special comment
@before 3.12.0 \@before information was not present .
@before 3.12.0 \@before information was not present.
*)
type info = Odoc_types.info = {
* The list of \@see tags .
* The string in the \@since tag .
* A text associated to a custom @-tag .
}
type location = Odoc_types.location = {
}
val dummy_loc : location
module Name :
sig
type t = string
val simple : t -> t
val concat : t -> t -> t
* Return the depth of the name , i.e. the numer of levels to the root .
Example : [ depth " Toto.Tutu.name " ] = [ 3 ] .
Example : [depth "Toto.Tutu.name"] = [3]. *)
val depth : t -> int
* Take two names n1 and n2 = n3.n4 and return n4 if n3 = n1 or else n2 .
val get_relative : t -> t -> t
val father : t -> t
end
module Parameter :
sig
type simple_name = Odoc_parameter.simple_name =
{
sn_name : string ;
sn_type : Types.type_expr ;
mutable sn_text : text option ;
}
type param_info = Odoc_parameter.param_info =
Simple_name of simple_name
| Tuple of param_info list * Types.type_expr
type parameter = param_info
* { 3 Functions }
val complete_name : parameter -> string
val typ : parameter -> Types.type_expr
* Access to the list of names ; only one for a simple parameter , or
a list for a tuple .
a list for a tuple. *)
val names : parameter -> string list
val desc_by_name : parameter -> string -> text option
val type_by_name : parameter -> string -> Types.type_expr
end
module Exception :
sig
type exception_alias = Odoc_exception.exception_alias =
{
}
and t_exception = Odoc_exception.t_exception =
{
ex_name : Name.t ;
mutable ex_loc : location ;
mutable ex_code : string option ;
}
end
module Type :
sig
type private_flag = Odoc_type.private_flag =
Private | Public
type variant_constructor = Odoc_type.variant_constructor =
{
vc_ret : Types.type_expr option ;
}
type record_field = Odoc_type.record_field =
{
}
type type_kind = Odoc_type.type_kind =
| Type_variant of variant_constructor list
| Type_record of record_field list
type t_type = Odoc_type.t_type =
{
ty_parameters : (Types.type_expr * bool * bool) list ;
mutable ty_loc : location ;
mutable ty_code : string option;
}
end
module Value :
sig
type t_value = Odoc_value.t_value =
{
mutable val_loc : location ;
}
type t_attribute = Odoc_value.t_attribute =
{
}
type t_method = Odoc_value.t_method =
{
}
val is_function : t_value -> bool
val value_parameter_text_by_name : t_value -> string -> text option
end
module Class :
sig
type class_element = Odoc_class.class_element =
Class_attribute of Value.t_attribute
| Class_method of Value.t_method
| Class_comment of text
type cct = Odoc_class.cct =
Cl of t_class
and inherited_class = Odoc_class.inherited_class =
{
}
and class_apply = Odoc_class.class_apply =
{
}
and class_constr = Odoc_class.class_constr =
{
mutable cco_class : cct option;
}
and class_kind = Odoc_class.class_kind =
Class_structure of inherited_class list * class_element list
| Class_apply of class_apply
| Class_constr of class_constr
| Class_constraint of class_kind * class_type_kind
and t_class = Odoc_class.t_class =
{
mutable cl_loc : location ;
}
and class_type_alias = Odoc_class.class_type_alias =
{
}
and class_type_kind = Odoc_class.class_type_kind =
Class_signature of inherited_class list * class_element list
and t_class_type = Odoc_class.t_class_type =
{
clt_type : Types.class_type ;
mutable clt_loc : location ;
}
* { 3 Functions }
val class_elements : ?trans:bool -> t_class -> class_element list
val class_attributes : ?trans:bool -> t_class -> Value.t_attribute list
val class_parameter_text_by_name : t_class -> string -> text option
val class_methods : ?trans:bool -> t_class -> Value.t_method list
val class_comments : ?trans:bool -> t_class -> text list
val class_type_elements : ?trans:bool -> t_class_type -> class_element list
val class_type_attributes : ?trans:bool -> t_class_type -> Value.t_attribute list
val class_type_parameter_text_by_name : t_class_type -> string -> text option
val class_type_methods : ?trans:bool -> t_class_type -> Value.t_method list
val class_type_comments : ?trans:bool -> t_class_type -> text list
end
module Module :
sig
type module_element = Odoc_module.module_element =
Element_module of t_module
| Element_module_type of t_module_type
| Element_included_module of included_module
| Element_class of Class.t_class
| Element_class_type of Class.t_class_type
| Element_value of Value.t_value
| Element_exception of Exception.t_exception
| Element_type of Type.t_type
| Element_module_comment of text
and mmt = Odoc_module.mmt =
| Mod of t_module
| Modtype of t_module_type
and included_module = Odoc_module.included_module =
{
}
and module_alias = Odoc_module.module_alias =
{
}
and module_parameter = Odoc_module.module_parameter = {
}
and module_kind = Odoc_module.module_kind =
| Module_functor of module_parameter * module_kind
| Module_apply of module_kind * module_kind
| Module_with of module_type_kind * string
| Module_constraint of module_kind * module_type_kind
and t_module = Odoc_module.t_module =
{
mutable m_loc : location ;
}
and module_type_alias = Odoc_module.module_type_alias =
{
}
and module_type_kind = Odoc_module.module_type_kind =
| Module_type_functor of module_parameter * module_type_kind
| Module_type_alias of module_type_alias
| Module_type_with of module_type_kind * string
| Module_type_typeof of string
and t_module_type = Odoc_module.t_module_type =
{
mutable mt_kind : module_type_kind option ;
mutable mt_loc : location ;
}
* { 3 Functions for modules }
val module_elements : ?trans:bool -> t_module -> module_element list
val module_modules : ?trans:bool -> t_module -> t_module list
val module_module_types : ?trans:bool -> t_module -> t_module_type list
val module_included_modules : ?trans:bool-> t_module -> included_module list
val module_exceptions : ?trans:bool-> t_module -> Exception.t_exception list
val module_types : ?trans:bool-> t_module -> Type.t_type list
val module_values : ?trans:bool -> t_module -> Value.t_value list
val module_functions : ?trans:bool-> t_module -> Value.t_value list
val module_simple_values : ?trans:bool-> t_module -> Value.t_value list
val module_classes : ?trans:bool-> t_module -> Class.t_class list
val module_class_types : ?trans:bool-> t_module -> Class.t_class_type list
val module_all_classes : ?trans:bool-> t_module -> Class.t_class list
val module_is_functor : t_module -> bool
val module_parameters : ?trans:bool-> t_module -> (module_parameter * text option) list
val module_comments : ?trans:bool-> t_module -> text list
* { 3 Functions for module types }
val module_type_elements : ?trans:bool-> t_module_type -> module_element list
val module_type_modules : ?trans:bool-> t_module_type -> t_module list
val module_type_module_types : ?trans:bool-> t_module_type -> t_module_type list
val module_type_included_modules : ?trans:bool-> t_module_type -> included_module list
val module_type_exceptions : ?trans:bool-> t_module_type -> Exception.t_exception list
val module_type_types : ?trans:bool-> t_module_type -> Type.t_type list
val module_type_values : ?trans:bool-> t_module_type -> Value.t_value list
val module_type_functions : ?trans:bool-> t_module_type -> Value.t_value list
val module_type_simple_values : ?trans:bool-> t_module_type -> Value.t_value list
val module_type_classes : ?trans:bool-> t_module_type -> Class.t_class list
val module_type_class_types : ?trans:bool-> t_module_type -> Class.t_class_type list
val module_type_all_classes : ?trans:bool-> t_module_type -> Class.t_class list
val module_type_is_functor : t_module_type -> bool
val module_type_parameters : ?trans:bool-> t_module_type -> (module_parameter * text option) list
val module_type_comments : ?trans:bool-> t_module_type -> text list
end
* { 3 Getting strings from values }
val reset_type_names : unit -> unit
val string_of_variance : Type.t_type -> (bool * bool) -> string
val string_of_type_expr : Types.type_expr -> string
val string_of_class_params : Class.t_class -> string
val string_of_type_list : ?par: bool -> string -> Types.type_expr list -> string
val string_of_type_param_list : Type.t_type -> string
val string_of_class_type_param_list : Types.type_expr list -> string
val string_of_module_type : ?code: string -> ?complete: bool -> Types.module_type -> string
val string_of_class_type : ?complete: bool -> Types.class_type -> string
val string_of_text : text -> string
val string_of_info : info -> string
val string_of_type : Type.t_type -> string
val string_of_exception : Exception.t_exception -> string
val string_of_value : Value.t_value -> string
val string_of_attribute : Value.t_attribute -> string
val string_of_method : Value.t_method -> string
* { 3 Miscelaneous functions }
* Return the first sentence ( until the first dot followed by a blank
or the first blank line ) of a text .
Do n't stop in the middle of [ Code ] , [ CodePre ] , [ Verbatim ] , [ List ] , [ ] ,
[ Latex ] , [ Link ] , [ Ref ] , [ Subscript ] or [ Superscript ] .
or the first blank line) of a text.
Don't stop in the middle of [Code], [CodePre], [Verbatim], [List], [Enum],
[Latex], [Link], [Ref], [Subscript] or [Superscript]. *)
val first_sentence_of_text : text -> text
* Return the first sentence ( until the first dot followed by a blank
or the first blank line ) of a text , and the remaining text after .
Do n't stop in the middle of [ Code ] , [ CodePre ] , [ Verbatim ] , [ List ] , [ ] ,
[ Latex ] , [ Link ] , [ Ref ] , [ Subscript ] or [ Superscript ] .
or the first blank line) of a text, and the remaining text after.
Don't stop in the middle of [Code], [CodePre], [Verbatim], [List], [Enum],
[Latex], [Link], [Ref], [Subscript] or [Superscript].*)
val first_sentence_and_rest_of_text : text -> text * text
val text_no_title_no_list : text -> text
* [ concat sep l ] the given list of text [ l ] , each separated with
the text [ sep ] .
the text [sep]. *)
val text_concat : Odoc_types.text -> Odoc_types.text list -> Odoc_types.text
val get_titles_in_text : text -> (int * string option * text) list
* Take a sorted list of elements , a function to get the name
of an element and return the list of list of elements ,
where each list group elements beginning by the same letter .
Since the original list is sorted , elements whose name does not
begin with a letter should be in the first returned list .
of an element and return the list of list of elements,
where each list group elements beginning by the same letter.
Since the original list is sorted, elements whose name does not
begin with a letter should be in the first returned list.*)
val create_index_lists : 'a list -> ('a -> string) -> 'a list list
val remove_option : Types.type_expr -> Types.type_expr
val is_optional : string -> bool
val label_name : string -> string
* Return the given name where the module name or
part of it was removed , according to the list of modules
which must be hidden ( cf { ! )
part of it was removed, according to the list of modules
which must be hidden (cf {!Odoc_args.hidden_modules})*)
val use_hidden_modules : Name.t -> Name.t
val verbose : string -> unit
val warning : string -> unit
val print_warnings : bool ref
* Increment this counter when an error is encountered .
The ocamldoc tool will print the number of errors
encountered exit with code 1 if this number is greater
than 0 .
The ocamldoc tool will print the number of errors
encountered exit with code 1 if this number is greater
than 0. *)
val errors : int ref
val apply_opt : ('a -> 'b) -> 'a option -> 'b option
* Apply a function to a first value if it is
not different from a second value . If the two values
are different , return the second one .
not different from a second value. If the two values
are different, return the second one.*)
val apply_if_equal : ('a -> 'a) -> 'a -> 'a -> 'a
val text_of_string : string -> text
val text_string_of_text : text -> string
val info_of_string : string -> info
val info_string_of_info : info -> string
val info_of_comment_file : Module.t_module list -> string -> info
val remove_ending_newline : string -> string
module Search :
sig
type result_element = Odoc_search.result_element =
Res_module of Module.t_module
| Res_module_type of Module.t_module_type
| Res_class of Class.t_class
| Res_class_type of Class.t_class_type
| Res_value of Value.t_value
| Res_type of Type.t_type
| Res_exception of Exception.t_exception
| Res_attribute of Value.t_attribute
| Res_method of Value.t_method
| Res_section of string * text
| Res_recfield of Type.t_type * Type.record_field
| Res_const of Type.t_type * Type.variant_constructor
type search_result = result_element list
val search_by_name : Module.t_module list -> Str.regexp -> search_result
val values : Module.t_module list -> Value.t_value list
val exceptions : Module.t_module list -> Exception.t_exception list
val types : Module.t_module list -> Type.t_type list
val attributes : Module.t_module list -> Value.t_attribute list
val methods : Module.t_module list -> Value.t_method list
val classes : Module.t_module list -> Class.t_class list
val class_types : Module.t_module list -> Class.t_class_type list
val modules : Module.t_module list -> Module.t_module list
val module_types : Module.t_module list -> Module.t_module_type list
end
module Scan :
sig
class scanner :
object
method scan_value : Value.t_value -> unit
method scan_type_pre : Type.t_type -> bool
method scan_type_const : Type.t_type -> Type.variant_constructor -> unit
method scan_type_recfield : Type.t_type -> Type.record_field -> unit
method scan_type : Type.t_type -> unit
method scan_exception : Exception.t_exception -> unit
method scan_attribute : Value.t_attribute -> unit
method scan_method : Value.t_method -> unit
method scan_included_module : Module.included_module -> unit
method scan_class_comment : text -> unit
method scan_class_pre : Class.t_class -> bool
method scan_class_elements : Class.t_class -> unit
method scan_class : Class.t_class -> unit
method scan_class_type_comment : text -> unit
method scan_class_type_pre : Class.t_class_type -> bool
method scan_class_type_elements : Class.t_class_type -> unit
method scan_class_type : Class.t_class_type -> unit
method scan_module_comment : text -> unit
method scan_module_pre : Module.t_module -> bool
method scan_module_elements : Module.t_module -> unit
method scan_module : Module.t_module -> unit
method scan_module_type_comment : text -> unit
method scan_module_type_pre : Module.t_module_type -> bool
method scan_module_type_elements : Module.t_module_type -> unit
method scan_module_type : Module.t_module_type -> unit
method scan_module_list : Module.t_module list -> unit
end
end
module Dep :
sig
val kernel_deps_of_modules : Module.t_module list -> unit
val deps_of_types : ?kernel: bool -> Type.t_type list -> (Type.t_type * (Name.t list)) list
end
* { 2 Some global variables }
module Global :
sig
val errors : int ref
val warn_error : bool ref
* The file used by the generators outputting only one file .
val out_file : string ref
* Verbose mode or not .
val verbose : bool ref
val target_dir : string ref
val title : string option ref
val intro_file : string option ref
val with_toc : bool ref
val with_index : bool ref
val with_header : bool ref
val with_trailer : bool ref
end
val analyse_files :
?merge_options:Odoc_types.merge_option list ->
?include_dirs:string list ->
?labels:bool ->
?sort_modules:bool ->
?no_stop:bool ->
?init: Odoc_module.t_module list ->
Odoc_global.source_file list ->
Module.t_module list
val dump_modules : string -> Odoc_module.t_module list -> unit
val load_modules : string -> Odoc_module.t_module list
|
f9cf78388de1ab5c2f7766b3978cf75ad35a04860051b4df8d8916bd982b9cbb | hodur-org/hodur-example-app | graphql.clj | (ns hodur-example-app.graphql
(:require [camel-snake-kebab.core :refer [->kebab-case-keyword]]
[camel-snake-kebab.extras :refer [transform-keys]]
[cheshire.core :as cheshire]
[com.walmartlabs.lacinia :as lacinia]))
(def ^:private cors-headers
{"Access-Control-Allow-Origin" "*"
"Access-Control-Allow-Headers" "*"
"Access-Control-Allow-Methods" "DELETE,GET,HEAD,OPTIONS,PATCH,POST,PUT"})
(defn ^:private respond [status body]
{:status status
:headers (merge {"Content-Type" "application/json"} cors-headers)
:body (cheshire/generate-string body)})
(defn ^:private respond-ok [body]
(respond 200 body))
(defn ^:private respond-user-error [error]
(respond 400 {:error error}))
(defn ^:private respond-server-error [ex]
(respond 500 {:error {:stack-trace (Throwable->map ex)}}))
(defn ^:private is-graphql-request? [headers]
(= "application/graphql" (get headers :content-type)))
(defn ^:private is-json-request? [headers]
(= "application/json" (get headers :content-type)))
(defn ^:private valid-headers? [headers]
(or
(is-graphql-request? headers)
(is-json-request? headers)))
(defn ^:private valid-request?
"Returns true if proper content type is set and there is query to execute."
[headers body]
(and (valid-headers? headers) (not (nil? body))))
(defn ^:private run-json-query
"Runs the query which was passed as application/json content type (from Graphqurl for example).
JSON body of this request is in format: { query: \"\", variables: \"\" }"
[compiled-schema body context]
(let [parsed-body (cheshire/parse-string body true)
query (:query parsed-body)
variables (:variables parsed-body)]
(lacinia/execute compiled-schema query variables context)))
(defn ^:private run-graphql-query
[compiled-schema query vars context]
(lacinia/execute compiled-schema query vars context))
(defn ^:private get-response [{:keys [headers body]}
compiled-schema context]
(let [transformed-headers (transform-keys ->kebab-case-keyword headers)]
(if (valid-request? transformed-headers body)
;; adding headers to context to be used for feature flag override
(let [context' (merge context {:request {:headers headers}})]
(if (is-graphql-request? transformed-headers)
;; we are supporting both "raw" GraphQL queries inside of request body
and request body
(respond-ok (run-graphql-query compiled-schema body nil context'))
(respond-ok (run-json-query compiled-schema body context'))))
;; request headers and body is included in response for debugging purposes
;; we can remove it later if needed
(respond-user-error {:message "Invalid request." :headers transformed-headers :body body}))))
(defn ^:private respond-to-query [{:keys [headers body] :as payload}
compiled-schema context]
(try
(let [body' (some-> body slurp)]
(get-response (assoc payload :body body')
compiled-schema context))
(catch Exception e
(respond-server-error e))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn interceptor [{:keys [request-method headers body] :as payload}
compiled-schema context]
(if (= request-method :options)
(respond-ok "")
(respond-to-query payload compiled-schema context)))
| null | https://raw.githubusercontent.com/hodur-org/hodur-example-app/e327080de31ecbd32319794eec84b38f0bd5ec41/src/hodur_example_app/graphql.clj | clojure | adding headers to context to be used for feature flag override
we are supporting both "raw" GraphQL queries inside of request body
request headers and body is included in response for debugging purposes
we can remove it later if needed
| (ns hodur-example-app.graphql
(:require [camel-snake-kebab.core :refer [->kebab-case-keyword]]
[camel-snake-kebab.extras :refer [transform-keys]]
[cheshire.core :as cheshire]
[com.walmartlabs.lacinia :as lacinia]))
(def ^:private cors-headers
{"Access-Control-Allow-Origin" "*"
"Access-Control-Allow-Headers" "*"
"Access-Control-Allow-Methods" "DELETE,GET,HEAD,OPTIONS,PATCH,POST,PUT"})
(defn ^:private respond [status body]
{:status status
:headers (merge {"Content-Type" "application/json"} cors-headers)
:body (cheshire/generate-string body)})
(defn ^:private respond-ok [body]
(respond 200 body))
(defn ^:private respond-user-error [error]
(respond 400 {:error error}))
(defn ^:private respond-server-error [ex]
(respond 500 {:error {:stack-trace (Throwable->map ex)}}))
(defn ^:private is-graphql-request? [headers]
(= "application/graphql" (get headers :content-type)))
(defn ^:private is-json-request? [headers]
(= "application/json" (get headers :content-type)))
(defn ^:private valid-headers? [headers]
(or
(is-graphql-request? headers)
(is-json-request? headers)))
(defn ^:private valid-request?
"Returns true if proper content type is set and there is query to execute."
[headers body]
(and (valid-headers? headers) (not (nil? body))))
(defn ^:private run-json-query
"Runs the query which was passed as application/json content type (from Graphqurl for example).
JSON body of this request is in format: { query: \"\", variables: \"\" }"
[compiled-schema body context]
(let [parsed-body (cheshire/parse-string body true)
query (:query parsed-body)
variables (:variables parsed-body)]
(lacinia/execute compiled-schema query variables context)))
(defn ^:private run-graphql-query
[compiled-schema query vars context]
(lacinia/execute compiled-schema query vars context))
(defn ^:private get-response [{:keys [headers body]}
compiled-schema context]
(let [transformed-headers (transform-keys ->kebab-case-keyword headers)]
(if (valid-request? transformed-headers body)
(let [context' (merge context {:request {:headers headers}})]
(if (is-graphql-request? transformed-headers)
and request body
(respond-ok (run-graphql-query compiled-schema body nil context'))
(respond-ok (run-json-query compiled-schema body context'))))
(respond-user-error {:message "Invalid request." :headers transformed-headers :body body}))))
(defn ^:private respond-to-query [{:keys [headers body] :as payload}
compiled-schema context]
(try
(let [body' (some-> body slurp)]
(get-response (assoc payload :body body')
compiled-schema context))
(catch Exception e
(respond-server-error e))))
(defn interceptor [{:keys [request-method headers body] :as payload}
compiled-schema context]
(if (= request-method :options)
(respond-ok "")
(respond-to-query payload compiled-schema context)))
|
520fd37a4c7168f899c76fdc2c1f55ef5fa10c57df49fc78cac799f765c1747a | Liutos/Project-Euler | pro8.lisp | (defun digstr-prod (digstr)
(apply #'* (map 'list #'(lambda (c)
(- (char-code c) (char-code #\0)))
digstr)))
(defun pro8 (digit-string)
(let ((max 0))
(dotimes (i 996)
(let ((num (digstr-prod (subseq digit-string i (+ 5 i)))))
(if (> num max)
(setf max num))))
max))
(with-open-file (s "pro8.txt")
(pro8 (read-line s nil 'eof))) | null | https://raw.githubusercontent.com/Liutos/Project-Euler/dd59940099ae37f971df1d74c4b7c78131fd5470/lisp/pro8.lisp | lisp | (defun digstr-prod (digstr)
(apply #'* (map 'list #'(lambda (c)
(- (char-code c) (char-code #\0)))
digstr)))
(defun pro8 (digit-string)
(let ((max 0))
(dotimes (i 996)
(let ((num (digstr-prod (subseq digit-string i (+ 5 i)))))
(if (> num max)
(setf max num))))
max))
(with-open-file (s "pro8.txt")
(pro8 (read-line s nil 'eof))) | |
c2e15a5c822677f44dc8c29cce6afc64bf5ce282e434eef8294a8390ee04d134 | bennn/dissertation | main.rkt | #lang typed/racket
(require require-typed-check
"data-adaptor.rkt")
(require/typed/check "const.rkt"
[WORLD (-> World)])
(require/typed/check "motion.rkt"
[reset! (-> Void)]
[world->world (World . -> . World)])
(require/typed/check "handlers.rkt"
[handle-key (World String . -> . World)]
[game-over? (World . -> . Boolean)])
(: replay : World (Listof Any) -> Void)
(define (replay w0 hist)
(reset!)
(let loop ((w : World w0)
(h : (Listof Any) hist))
(if (empty? h)
w
(let ()
(loop
(match (car h)
[`(on-key ,(? string? ke))
(handle-key w ke)]
[`(on-tick)
(world->world w)]
[`(stop-when)
(game-over? w)
w])
(cdr h)))))
(void))
(define DATA (with-input-from-file "../base/snake-hist.rktd" read))
(define LOOPS 200)
(: main (-> Any Void))
(define (main hist)
(define w0 (WORLD))
(cond [(list? hist)
(for ([_i (in-range LOOPS)])
(replay w0 hist))]
[else
(error "bad input")]))
(time (main DATA))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/snake/typed/main.rkt | racket | #lang typed/racket
(require require-typed-check
"data-adaptor.rkt")
(require/typed/check "const.rkt"
[WORLD (-> World)])
(require/typed/check "motion.rkt"
[reset! (-> Void)]
[world->world (World . -> . World)])
(require/typed/check "handlers.rkt"
[handle-key (World String . -> . World)]
[game-over? (World . -> . Boolean)])
(: replay : World (Listof Any) -> Void)
(define (replay w0 hist)
(reset!)
(let loop ((w : World w0)
(h : (Listof Any) hist))
(if (empty? h)
w
(let ()
(loop
(match (car h)
[`(on-key ,(? string? ke))
(handle-key w ke)]
[`(on-tick)
(world->world w)]
[`(stop-when)
(game-over? w)
w])
(cdr h)))))
(void))
(define DATA (with-input-from-file "../base/snake-hist.rktd" read))
(define LOOPS 200)
(: main (-> Any Void))
(define (main hist)
(define w0 (WORLD))
(cond [(list? hist)
(for ([_i (in-range LOOPS)])
(replay w0 hist))]
[else
(error "bad input")]))
(time (main DATA))
| |
b978d98364e3e7e743008a9fd4464daf2cd9ce7f9f2ac253a939cb97d0189a04 | ocamllabs/ocaml-modular-implicits | outcometree.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(* Module [Outcometree]: results displayed by the toplevel *)
These types represent messages that the toplevel displays as normal
results or errors . The real displaying is customisable using the hooks :
[ Toploop.print_out_value ]
[ Toploop.print_out_type ]
[ Toploop.print_out_sig_item ]
[ Toploop.print_out_phrase ]
results or errors. The real displaying is customisable using the hooks:
[Toploop.print_out_value]
[Toploop.print_out_type]
[Toploop.print_out_sig_item]
[Toploop.print_out_phrase] *)
type out_ident =
| Oide_apply of out_ident * out_ident * Asttypes.implicit_flag
| Oide_dot of out_ident * string
| Oide_ident of string
type out_value =
| Oval_array of out_value list
| Oval_char of char
| Oval_constr of out_ident * out_value list
| Oval_ellipsis
| Oval_float of float
| Oval_int of int
| Oval_int32 of int32
| Oval_int64 of int64
| Oval_nativeint of nativeint
| Oval_list of out_value list
| Oval_printer of (Format.formatter -> unit)
| Oval_record of (out_ident * out_value) list
| Oval_string of string
| Oval_stuff of string
| Oval_tuple of out_value list
| Oval_variant of string * out_value option
type out_type =
| Otyp_abstract
| Otyp_open
| Otyp_alias of out_type * string
| Otyp_arrow of string * out_type * out_type
| Otyp_implicit_arrow of string * out_type * out_type
| Otyp_class of bool * out_ident * out_type list
| Otyp_constr of out_ident * out_type list
| Otyp_manifest of out_type * out_type
| Otyp_object of (string * out_type) list * bool option
| Otyp_record of (string * bool * out_type) list
| Otyp_stuff of string
| Otyp_sum of (string * out_type list * out_type option) list
| Otyp_tuple of out_type list
| Otyp_var of bool * string
| Otyp_variant of
bool * out_variant * bool * (string list) option
| Otyp_poly of string list * out_type
| Otyp_module of string * string list * out_type list
and out_variant =
| Ovar_fields of (string * bool * out_type list) list
| Ovar_name of out_ident * out_type list
type out_class_type =
| Octy_constr of out_ident * out_type list
| Octy_arrow of string * out_type * out_class_type
| Octy_signature of out_type option * out_class_sig_item list
and out_class_sig_item =
| Ocsg_constraint of out_type * out_type
| Ocsg_method of string * bool * bool * out_type
| Ocsg_value of string * bool * bool * out_type
type out_module_type =
| Omty_abstract
| Omty_functor of out_module_parameter * out_module_type
| Omty_ident of out_ident
| Omty_signature of out_sig_item list
| Omty_alias of out_ident
and out_module_parameter =
| Ompar_generative
| Ompar_applicative of string * out_module_type
| Ompar_implicit of string * out_module_type
and out_sig_item =
| Osig_class of
bool * string * (string * (bool * bool)) list * out_class_type *
out_rec_status
| Osig_class_type of
bool * string * (string * (bool * bool)) list * out_class_type *
out_rec_status
| Osig_typext of out_extension_constructor * out_ext_status
| Osig_modtype of string * out_module_type
| Osig_module of string * out_module_type * out_rec_status * Asttypes.implicit_flag
| Osig_type of out_type_decl * out_rec_status
| Osig_value of string * out_type * string list
and out_type_decl =
{ otype_name: string;
otype_params: (string * (bool * bool)) list;
otype_type: out_type;
otype_private: Asttypes.private_flag;
otype_cstrs: (out_type * out_type) list }
and out_extension_constructor =
{ oext_name: string;
oext_type_name: string;
oext_type_params: string list;
oext_args: out_type list;
oext_ret_type: out_type option;
oext_private: Asttypes.private_flag }
and out_type_extension =
{ otyext_name: string;
otyext_params: string list;
otyext_constructors: (string * out_type list * out_type option) list;
otyext_private: Asttypes.private_flag }
and out_rec_status =
| Orec_not
| Orec_first
| Orec_next
and out_ext_status =
| Oext_first
| Oext_next
| Oext_exception
type out_phrase =
| Ophr_eval of out_value * out_type
| Ophr_signature of (out_sig_item * out_value option) list
| Ophr_exception of (exn * out_value)
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/typing/outcometree.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Module [Outcometree]: results displayed by the toplevel | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
These types represent messages that the toplevel displays as normal
results or errors . The real displaying is customisable using the hooks :
[ Toploop.print_out_value ]
[ Toploop.print_out_type ]
[ Toploop.print_out_sig_item ]
[ Toploop.print_out_phrase ]
results or errors. The real displaying is customisable using the hooks:
[Toploop.print_out_value]
[Toploop.print_out_type]
[Toploop.print_out_sig_item]
[Toploop.print_out_phrase] *)
type out_ident =
| Oide_apply of out_ident * out_ident * Asttypes.implicit_flag
| Oide_dot of out_ident * string
| Oide_ident of string
type out_value =
| Oval_array of out_value list
| Oval_char of char
| Oval_constr of out_ident * out_value list
| Oval_ellipsis
| Oval_float of float
| Oval_int of int
| Oval_int32 of int32
| Oval_int64 of int64
| Oval_nativeint of nativeint
| Oval_list of out_value list
| Oval_printer of (Format.formatter -> unit)
| Oval_record of (out_ident * out_value) list
| Oval_string of string
| Oval_stuff of string
| Oval_tuple of out_value list
| Oval_variant of string * out_value option
type out_type =
| Otyp_abstract
| Otyp_open
| Otyp_alias of out_type * string
| Otyp_arrow of string * out_type * out_type
| Otyp_implicit_arrow of string * out_type * out_type
| Otyp_class of bool * out_ident * out_type list
| Otyp_constr of out_ident * out_type list
| Otyp_manifest of out_type * out_type
| Otyp_object of (string * out_type) list * bool option
| Otyp_record of (string * bool * out_type) list
| Otyp_stuff of string
| Otyp_sum of (string * out_type list * out_type option) list
| Otyp_tuple of out_type list
| Otyp_var of bool * string
| Otyp_variant of
bool * out_variant * bool * (string list) option
| Otyp_poly of string list * out_type
| Otyp_module of string * string list * out_type list
and out_variant =
| Ovar_fields of (string * bool * out_type list) list
| Ovar_name of out_ident * out_type list
type out_class_type =
| Octy_constr of out_ident * out_type list
| Octy_arrow of string * out_type * out_class_type
| Octy_signature of out_type option * out_class_sig_item list
and out_class_sig_item =
| Ocsg_constraint of out_type * out_type
| Ocsg_method of string * bool * bool * out_type
| Ocsg_value of string * bool * bool * out_type
type out_module_type =
| Omty_abstract
| Omty_functor of out_module_parameter * out_module_type
| Omty_ident of out_ident
| Omty_signature of out_sig_item list
| Omty_alias of out_ident
and out_module_parameter =
| Ompar_generative
| Ompar_applicative of string * out_module_type
| Ompar_implicit of string * out_module_type
and out_sig_item =
| Osig_class of
bool * string * (string * (bool * bool)) list * out_class_type *
out_rec_status
| Osig_class_type of
bool * string * (string * (bool * bool)) list * out_class_type *
out_rec_status
| Osig_typext of out_extension_constructor * out_ext_status
| Osig_modtype of string * out_module_type
| Osig_module of string * out_module_type * out_rec_status * Asttypes.implicit_flag
| Osig_type of out_type_decl * out_rec_status
| Osig_value of string * out_type * string list
and out_type_decl =
{ otype_name: string;
otype_params: (string * (bool * bool)) list;
otype_type: out_type;
otype_private: Asttypes.private_flag;
otype_cstrs: (out_type * out_type) list }
and out_extension_constructor =
{ oext_name: string;
oext_type_name: string;
oext_type_params: string list;
oext_args: out_type list;
oext_ret_type: out_type option;
oext_private: Asttypes.private_flag }
and out_type_extension =
{ otyext_name: string;
otyext_params: string list;
otyext_constructors: (string * out_type list * out_type option) list;
otyext_private: Asttypes.private_flag }
and out_rec_status =
| Orec_not
| Orec_first
| Orec_next
and out_ext_status =
| Oext_first
| Oext_next
| Oext_exception
type out_phrase =
| Ophr_eval of out_value * out_type
| Ophr_signature of (out_sig_item * out_value option) list
| Ophr_exception of (exn * out_value)
|
311217f3e492ea530c146bd7c6d295463199f2de3495bf81baf21169c22119c6 | input-output-hk/marlowe-cardano | Types.hs | -----------------------------------------------------------------------------
--
-- Module : $Headers
License : Apache 2.0
--
-- Stability : Experimental
Portability : Portable
--
| types for the test service client .
--
-----------------------------------------------------------------------------
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Spec.Marlowe.Service.Types
( -- * Types
Request(..)
, Response(..)
, Seed(..)
, Size(..)
) where
import Control.Applicative ((<|>))
import Data.Aeson (FromJSON(..), ToJSON(..))
import Plutus.V1.Ledger.Api (POSIXTime(..))
import qualified Data.Aeson as A (Value(Object, String), object, withObject, (.:), (.:?), (.=))
import qualified Data.Aeson.Types as A (Parser)
import qualified Language.Marlowe.Core.V1.Semantics as Marlowe
import qualified Language.Marlowe.Core.V1.Semantics.Types as Marlowe
newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON)
newtype Seed = Seed Int deriving (Eq, Show, ToJSON, FromJSON)
data Request =
TestRoundtripSerialization
{
typeSerialized :: String
, valueSerialized :: A.Value
}
| GenerateRandomValue
{
typeSerialized :: String
, size :: Maybe Size
, seed :: Maybe Seed
}
| ComputeTransaction
{
transactionInput :: Marlowe.TransactionInput
, contract :: Marlowe.Contract
, state :: Marlowe.State
}
| PlayTrace
{
transactionInputs :: [Marlowe.TransactionInput]
, contract :: Marlowe.Contract
, initialTime :: POSIXTime
}
| EvalValue
{
environment :: Marlowe.Environment
, state :: Marlowe.State
, value :: Marlowe.Value Marlowe.Observation
}
deriving (Eq, Show)
instance FromJSON Request where
parseJSON =
A.withObject "Request"
$ \o ->
(o A..: "request" :: A.Parser String)
>>= \case
"test-roundtrip-serialization" -> TestRoundtripSerialization <$> o A..: "typeId" <*> o A..: "json"
"generate-random-value" -> GenerateRandomValue <$> o A..: "typeId" <*> o A..:? "size" <*> o A..:? "seed"
"compute-transaction" -> ComputeTransaction <$> o A..: "transactionInput" <*> o A..: "coreContract" <*> o A..: "state"
"playtrace" -> PlayTrace <$> o A..: "transactionInputs" <*> o A..: "coreContract" <*> (POSIXTime <$> o A..: "initialTime")
"eval-value" -> EvalValue <$> o A..: "environment" <*> o A..: "state" <*> o A..: "value"
request -> fail $ "Request not understood: " <> show request <> "."
instance ToJSON Request where
toJSON TestRoundtripSerialization{..} =
A.object
[
"request" A..= ("test-roundtrip-serialization" :: String)
, "typeId" A..= typeSerialized
, "json" A..= valueSerialized
]
toJSON GenerateRandomValue{..} =
A.object
[
"request" A..= ("generate-random-value" :: String)
, "typeId" A..= typeSerialized
, "size" A..= size
, "seed" A..= seed
]
toJSON ComputeTransaction{..} =
A.object
[
"request" A..= ("compute-transaction" :: String)
, "transactionInput" A..= transactionInput
, "coreContract" A..= contract
, "state" A..= state
]
toJSON PlayTrace{..} =
A.object
[
"request" A..= ("playtrace" :: String)
, "transactionInputs" A..= transactionInputs
, "coreContract" A..= contract
, "initialTime" A..= getPOSIXTime initialTime
]
toJSON EvalValue{..} =
A.object
[
"request" A..= ("eval-value" :: String)
, "environment" A..= environment
, "state" A..= state
, "value" A..= value
]
data Response =
InvalidRequest
{
errorInvalid :: String
}
| UnknownRequest
| RequestResponse
{
valueResponse :: A.Value
}
| RequestNotImplemented
| RequestTimeOut
| ResponseFailure
{
failureResponse :: String
}
deriving (Eq, Ord, Read, Show)
instance FromJSON Response where
parseJSON (A.String "UnknownRequest") = return UnknownRequest
parseJSON (A.String "RequestNotImplemented") = return RequestNotImplemented
parseJSON (A.Object v) = (InvalidRequest <$> v A..: "invalid-request") <|> (RequestResponse <$> v A..: "request-response")
parseJSON _ = fail "Response must be either a string or an A.object"
instance ToJSON Response where
toJSON UnknownRequest = "UnknownRequest"
toJSON RequestNotImplemented = "RequestNotImplemented"
toJSON RequestTimeOut = "RequestTimeOut"
toJSON (InvalidRequest err) = A.object . pure $ "invalid-request" A..= err
toJSON (RequestResponse res) = A.object . pure $ "request-response" A..= res
toJSON (ResponseFailure err) = A.object . pure $ "invalid-request" A..= err
| null | https://raw.githubusercontent.com/input-output-hk/marlowe-cardano/16d1e16b0aac74e63f090fca4fc1a40104b7b573/marlowe-test/src/Spec/Marlowe/Service/Types.hs | haskell | ---------------------------------------------------------------------------
Module : $Headers
Stability : Experimental
---------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
* Types | License : Apache 2.0
Portability : Portable
| types for the test service client .
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
module Spec.Marlowe.Service.Types
Request(..)
, Response(..)
, Seed(..)
, Size(..)
) where
import Control.Applicative ((<|>))
import Data.Aeson (FromJSON(..), ToJSON(..))
import Plutus.V1.Ledger.Api (POSIXTime(..))
import qualified Data.Aeson as A (Value(Object, String), object, withObject, (.:), (.:?), (.=))
import qualified Data.Aeson.Types as A (Parser)
import qualified Language.Marlowe.Core.V1.Semantics as Marlowe
import qualified Language.Marlowe.Core.V1.Semantics.Types as Marlowe
newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON)
newtype Seed = Seed Int deriving (Eq, Show, ToJSON, FromJSON)
data Request =
TestRoundtripSerialization
{
typeSerialized :: String
, valueSerialized :: A.Value
}
| GenerateRandomValue
{
typeSerialized :: String
, size :: Maybe Size
, seed :: Maybe Seed
}
| ComputeTransaction
{
transactionInput :: Marlowe.TransactionInput
, contract :: Marlowe.Contract
, state :: Marlowe.State
}
| PlayTrace
{
transactionInputs :: [Marlowe.TransactionInput]
, contract :: Marlowe.Contract
, initialTime :: POSIXTime
}
| EvalValue
{
environment :: Marlowe.Environment
, state :: Marlowe.State
, value :: Marlowe.Value Marlowe.Observation
}
deriving (Eq, Show)
instance FromJSON Request where
parseJSON =
A.withObject "Request"
$ \o ->
(o A..: "request" :: A.Parser String)
>>= \case
"test-roundtrip-serialization" -> TestRoundtripSerialization <$> o A..: "typeId" <*> o A..: "json"
"generate-random-value" -> GenerateRandomValue <$> o A..: "typeId" <*> o A..:? "size" <*> o A..:? "seed"
"compute-transaction" -> ComputeTransaction <$> o A..: "transactionInput" <*> o A..: "coreContract" <*> o A..: "state"
"playtrace" -> PlayTrace <$> o A..: "transactionInputs" <*> o A..: "coreContract" <*> (POSIXTime <$> o A..: "initialTime")
"eval-value" -> EvalValue <$> o A..: "environment" <*> o A..: "state" <*> o A..: "value"
request -> fail $ "Request not understood: " <> show request <> "."
instance ToJSON Request where
toJSON TestRoundtripSerialization{..} =
A.object
[
"request" A..= ("test-roundtrip-serialization" :: String)
, "typeId" A..= typeSerialized
, "json" A..= valueSerialized
]
toJSON GenerateRandomValue{..} =
A.object
[
"request" A..= ("generate-random-value" :: String)
, "typeId" A..= typeSerialized
, "size" A..= size
, "seed" A..= seed
]
toJSON ComputeTransaction{..} =
A.object
[
"request" A..= ("compute-transaction" :: String)
, "transactionInput" A..= transactionInput
, "coreContract" A..= contract
, "state" A..= state
]
toJSON PlayTrace{..} =
A.object
[
"request" A..= ("playtrace" :: String)
, "transactionInputs" A..= transactionInputs
, "coreContract" A..= contract
, "initialTime" A..= getPOSIXTime initialTime
]
toJSON EvalValue{..} =
A.object
[
"request" A..= ("eval-value" :: String)
, "environment" A..= environment
, "state" A..= state
, "value" A..= value
]
data Response =
InvalidRequest
{
errorInvalid :: String
}
| UnknownRequest
| RequestResponse
{
valueResponse :: A.Value
}
| RequestNotImplemented
| RequestTimeOut
| ResponseFailure
{
failureResponse :: String
}
deriving (Eq, Ord, Read, Show)
instance FromJSON Response where
parseJSON (A.String "UnknownRequest") = return UnknownRequest
parseJSON (A.String "RequestNotImplemented") = return RequestNotImplemented
parseJSON (A.Object v) = (InvalidRequest <$> v A..: "invalid-request") <|> (RequestResponse <$> v A..: "request-response")
parseJSON _ = fail "Response must be either a string or an A.object"
instance ToJSON Response where
toJSON UnknownRequest = "UnknownRequest"
toJSON RequestNotImplemented = "RequestNotImplemented"
toJSON RequestTimeOut = "RequestTimeOut"
toJSON (InvalidRequest err) = A.object . pure $ "invalid-request" A..= err
toJSON (RequestResponse res) = A.object . pure $ "request-response" A..= res
toJSON (ResponseFailure err) = A.object . pure $ "invalid-request" A..= err
|
7adf841f64a81122daff554b79b2d5d74acf4b9a1e851b984b99584942a618cb | OCamlPro/freeton_wallet | commandAccountInfo.ml | (**************************************************************************)
(* *)
Copyright ( c ) 2021 OCamlPro SAS
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
open Ezcmd.V2
open EZCMD.TYPES
open Types
let get_key_info key ~json ~secrets =
if json then
let json = EzEncoding.construct ~compact:false Encoding.key key in
Printf.printf "%s\n%!" json
else begin
Printf.printf "Name: %s\n" key.key_name ;
Option.iter (fun acc ->
Printf.printf " %s\n%!" (ADDRESS.to_string acc.acc_address);
Option.iter (fun s ->
Printf.printf " Contract: %s\n" s
) acc.acc_contract
) key.key_account;
Option.iter (fun pair ->
Printf.printf " Public: %s\n" (PUBKEY.to_json_string pair.public) ;
if secrets then
Option.iter (fun s ->
Printf.printf " Secret: %s\n" s ;
) pair.secret
) key.key_pair ;
if secrets then
Option.iter (fun s ->
Printf.printf " Passphrase:\n %s\n" s) key.key_passphrase;
Printf.printf "%!";
end
let get_account_info accounts ~json ~secrets =
let config = Config.config () in
let net = Config.current_network config in
match accounts with
| [] -> List.iter (fun key ->
match key.key_account with
| None -> ()
| Some _ ->
get_key_info key ~json ~secrets
) net.net_keys
| names ->
List.iter (fun name ->
match Misc.find_key net name with
| None ->
Error.raise "No key %S in network %S" name net.net_name
| Some key ->
get_key_info key ~json ~secrets
) names
let action accounts ~json ~secrets =
get_account_info accounts ~json ~secrets
let cmd =
let accounts = ref [] in
let secrets = ref false in
let json = ref false in
EZCMD.sub
"account info"
(fun () -> action
!accounts ~json:!json ~secrets:!secrets
)
~args:
[ [],
Arg.Anons (fun args -> accounts := args),
EZCMD.info "Name of account" ;
[ "json" ], Arg.Set json,
EZCMD.info "Print in json format";
[ "S"; "secrets" ], Arg.Set secrets,
EZCMD.info "Print passphrase and secret key (default with --json)";
]
~man:[
`S "DESCRIPTION";
`Blocks [
`P "This command displays information on given accounts, either locally or from the blockchain";
`P "Examples:";
`Pre {|ft account info MY-ACCOUNT|};
];
]
~doc:
"Get account info (local or from blockchain)."
| null | https://raw.githubusercontent.com/OCamlPro/freeton_wallet/b97877379e51d96cb3544141d386d502348cfca9/src/freeton_wallet_lib/commandAccountInfo.ml | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************ | Copyright ( c ) 2021 OCamlPro SAS
Public License version 2.1 , with the special exception on linking
open Ezcmd.V2
open EZCMD.TYPES
open Types
let get_key_info key ~json ~secrets =
if json then
let json = EzEncoding.construct ~compact:false Encoding.key key in
Printf.printf "%s\n%!" json
else begin
Printf.printf "Name: %s\n" key.key_name ;
Option.iter (fun acc ->
Printf.printf " %s\n%!" (ADDRESS.to_string acc.acc_address);
Option.iter (fun s ->
Printf.printf " Contract: %s\n" s
) acc.acc_contract
) key.key_account;
Option.iter (fun pair ->
Printf.printf " Public: %s\n" (PUBKEY.to_json_string pair.public) ;
if secrets then
Option.iter (fun s ->
Printf.printf " Secret: %s\n" s ;
) pair.secret
) key.key_pair ;
if secrets then
Option.iter (fun s ->
Printf.printf " Passphrase:\n %s\n" s) key.key_passphrase;
Printf.printf "%!";
end
let get_account_info accounts ~json ~secrets =
let config = Config.config () in
let net = Config.current_network config in
match accounts with
| [] -> List.iter (fun key ->
match key.key_account with
| None -> ()
| Some _ ->
get_key_info key ~json ~secrets
) net.net_keys
| names ->
List.iter (fun name ->
match Misc.find_key net name with
| None ->
Error.raise "No key %S in network %S" name net.net_name
| Some key ->
get_key_info key ~json ~secrets
) names
let action accounts ~json ~secrets =
get_account_info accounts ~json ~secrets
let cmd =
let accounts = ref [] in
let secrets = ref false in
let json = ref false in
EZCMD.sub
"account info"
(fun () -> action
!accounts ~json:!json ~secrets:!secrets
)
~args:
[ [],
Arg.Anons (fun args -> accounts := args),
EZCMD.info "Name of account" ;
[ "json" ], Arg.Set json,
EZCMD.info "Print in json format";
[ "S"; "secrets" ], Arg.Set secrets,
EZCMD.info "Print passphrase and secret key (default with --json)";
]
~man:[
`S "DESCRIPTION";
`Blocks [
`P "This command displays information on given accounts, either locally or from the blockchain";
`P "Examples:";
`Pre {|ft account info MY-ACCOUNT|};
];
]
~doc:
"Get account info (local or from blockchain)."
|
bbcccb4407de1a0e740e4dd5046176026d2a2308032701af02b228df15da1faf | Gbury/archsat | semantics.ml | This file is free software , part of Archsat . See file " LICENSE " for more details .
Semantic extensions
(* ************************************************************************ *)
let section = Section.make ~parent:Type.section "addons"
type ext = {
builtins : In.language -> Type.builtin_symbols;
}
let default = fun _ _ _ _ -> None
let mk_ext
?(tptp=default)
?(smtlib=default)
?(zf=default)
() =
{ builtins = (function
| In.Dimacs | In.ICNF -> default
| In.Smtlib -> smtlib
| In.Tptp -> tptp
| In.Zf -> zf
);
}
(* Addons *)
(* ************************************************************************ *)
(* Instantiate the extension functor *)
module Addon = Extension.Make(struct
type t = ext
let section = section
let neutral = mk_ext ()
let merge ~high ~low = {
builtins = (fun input_format env ast id args ->
match high.builtins input_format env ast id args with
| Some x -> Some x
| None -> low.builtins input_format env ast id args
);
}
end)
(* Convenience function to get the builtin function for type-checking *)
let type_env input =
let f = (Addon.get_res ()).builtins input in
(fun env ast id args ->
Util.enter_prof section;
let res = f env ast id args in
Util.exit_prof section;
res)
| null | https://raw.githubusercontent.com/Gbury/archsat/322fbefa4a58023ddafb3fa1a51f8199c25cde3d/src/input/semantics.ml | ocaml | ************************************************************************
Addons
************************************************************************
Instantiate the extension functor
Convenience function to get the builtin function for type-checking | This file is free software , part of Archsat . See file " LICENSE " for more details .
Semantic extensions
let section = Section.make ~parent:Type.section "addons"
type ext = {
builtins : In.language -> Type.builtin_symbols;
}
let default = fun _ _ _ _ -> None
let mk_ext
?(tptp=default)
?(smtlib=default)
?(zf=default)
() =
{ builtins = (function
| In.Dimacs | In.ICNF -> default
| In.Smtlib -> smtlib
| In.Tptp -> tptp
| In.Zf -> zf
);
}
module Addon = Extension.Make(struct
type t = ext
let section = section
let neutral = mk_ext ()
let merge ~high ~low = {
builtins = (fun input_format env ast id args ->
match high.builtins input_format env ast id args with
| Some x -> Some x
| None -> low.builtins input_format env ast id args
);
}
end)
let type_env input =
let f = (Addon.get_res ()).builtins input in
(fun env ast id args ->
Util.enter_prof section;
let res = f env ast id args in
Util.exit_prof section;
res)
|
e3f0d1f306eef2b022be52d626c01150fcb48bc2d1b8559a77bfb7637a9bab81 | snmsts/cl-langserver | start-slynk.lisp | ;;; This file is intended to be loaded by an implementation to
;;; get a running slynk server
e.g. sbcl --load start-slynk.lisp
;;;
Default port is 4005
;;; For additional slynk-side configurations see
6.2 section of the Slime user manual .
(load (make-pathname :name "slynk-loader" :type "lisp"
:defaults *load-truename*))
(ls-loader:init
:delete nil ; delete any existing SLYNK packages
:reload nil) ; reload SLYNK, even if the SLYNK package already exists
(ls-base:create-server :port 4005
;; if non-nil the connection won't be closed
;; after connecting
:dont-close t)
| null | https://raw.githubusercontent.com/snmsts/cl-langserver/3b1246a5d0bd58459e7a64708f820bf718cf7175/src/helitage/start-slynk.lisp | lisp | This file is intended to be loaded by an implementation to
get a running slynk server
For additional slynk-side configurations see
delete any existing SLYNK packages
reload SLYNK, even if the SLYNK package already exists
if non-nil the connection won't be closed
after connecting | e.g. sbcl --load start-slynk.lisp
Default port is 4005
6.2 section of the Slime user manual .
(load (make-pathname :name "slynk-loader" :type "lisp"
:defaults *load-truename*))
(ls-loader:init
(ls-base:create-server :port 4005
:dont-close t)
|
04d2e689321e6a808fb153a73fe528615c59ef64f4dda0a92b4b4cf8e20658ca | g1eny0ung/github-colorful-contributions-graph | core.cljs | (ns ^:figwheel-hooks github-colorful-contributions.core
(:require [goog.dom :as gdom]
[reagent.dom :as rdom]
[github-colorful-contributions.components.header :refer [header]]
[github-colorful-contributions.components.panel :refer [panel]]
[github-colorful-contributions.components.footer :refer [footer]]))
(defn get-app-element []
(gdom/getElement "app"))
(defn github-colorful-contributions []
[:div.container
[header]
[panel]
[footer]])
(defn mount [el]
(rdom/render [github-colorful-contributions] el))
(defn mount-app-element []
(when-let [el (get-app-element)]
(mount el)))
;; conditionally start your application based on the presence of an "app" element
;; this is particularly helpful for testing this ns without launching the app
(mount-app-element)
;; specify reload hook with ^:after-load metadata
(defn ^:after-load on-reload []
(mount-app-element)
;; optionally touch your app-state to force rerendering depending on
;; your application
;; (swap! app-state update-in [:__figwheel_counter] inc)
)
| null | https://raw.githubusercontent.com/g1eny0ung/github-colorful-contributions-graph/e7cf654ea43c71109e0fa28d1a625371cfbdd9fc/src/github_colorful_contributions/core.cljs | clojure | conditionally start your application based on the presence of an "app" element
this is particularly helpful for testing this ns without launching the app
specify reload hook with ^:after-load metadata
optionally touch your app-state to force rerendering depending on
your application
(swap! app-state update-in [:__figwheel_counter] inc) | (ns ^:figwheel-hooks github-colorful-contributions.core
(:require [goog.dom :as gdom]
[reagent.dom :as rdom]
[github-colorful-contributions.components.header :refer [header]]
[github-colorful-contributions.components.panel :refer [panel]]
[github-colorful-contributions.components.footer :refer [footer]]))
(defn get-app-element []
(gdom/getElement "app"))
(defn github-colorful-contributions []
[:div.container
[header]
[panel]
[footer]])
(defn mount [el]
(rdom/render [github-colorful-contributions] el))
(defn mount-app-element []
(when-let [el (get-app-element)]
(mount el)))
(mount-app-element)
(defn ^:after-load on-reload []
(mount-app-element)
)
|
093c22f56ea6ff43697eb3928aa88ad2805c23cb7a75b5f54f0b704092b0da3c | pirapira/coq2rust | workerPool.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
type worker_id = string
type 'a cpanel = {
called by manager to exit instead of Thread.exit
cancelled : unit -> bool; (* manager checks for a request of termination *)
extra : 'a; (* extra stuff to pass to the manager *)
}
module type PoolModel = sig
(* this shall come from a Spawn.* model *)
type process
val spawn : int -> worker_id * process * CThread.thread_ic * out_channel
(* this defines the main loop of the manager *)
type extra
val manager :
extra cpanel -> worker_id * process * CThread.thread_ic * out_channel -> unit
end
module Make(Model : PoolModel) = struct
type worker = {
name : worker_id;
cancel : bool ref;
manager : Thread.t;
process : Model.process;
}
type pre_pool = {
workers : worker list ref;
count : int ref;
extra_arg : Model.extra;
}
type pool = { lock : Mutex.t; pool : pre_pool }
let magic_no = 17
let master_handshake worker_id ic oc =
try
Marshal.to_channel oc magic_no []; flush oc;
let n = (CThread.thread_friendly_input_value ic : int) in
if n <> magic_no then begin
Printf.eprintf "Handshake with %s failed: protocol mismatch\n" worker_id;
exit 1;
end
with e when Errors.noncritical e ->
Printf.eprintf "Handshake with %s failed: %s\n"
worker_id (Printexc.to_string e);
exit 1
let worker_handshake slave_ic slave_oc =
try
let v = (CThread.thread_friendly_input_value slave_ic : int) in
if v <> magic_no then begin
prerr_endline "Handshake failed: protocol mismatch\n";
exit 1;
end;
Marshal.to_channel slave_oc v []; flush slave_oc;
with e when Errors.noncritical e ->
prerr_endline ("Handshake failed: " ^ Printexc.to_string e);
exit 1
let locking { lock; pool = p } f =
try
Mutex.lock lock;
let x = f p in
Mutex.unlock lock;
x
with e -> Mutex.unlock lock; raise e
let rec create_worker extra pool id =
let cancel = ref false in
let name, process, ic, oc as worker = Model.spawn id in
master_handshake name ic oc;
let exit () = cancel := true; cleanup pool; Thread.exit () in
let cancelled () = !cancel in
let cpanel = { exit; cancelled; extra } in
let manager = Thread.create (Model.manager cpanel) worker in
{ name; cancel; manager; process }
and cleanup x = locking x begin fun { workers; count; extra_arg } ->
workers := List.map (function
| { cancel } as w when !cancel = false -> w
| _ -> let n = !count in incr count; create_worker extra_arg x n)
!workers
end
let n_workers x = locking x begin fun { workers } ->
List.length !workers
end
let is_empty x = locking x begin fun { workers } -> !workers = [] end
let create extra_arg ~size = let x = {
lock = Mutex.create ();
pool = {
extra_arg;
workers = ref [];
count = ref size;
}} in
locking x begin fun { workers } ->
workers := CList.init size (create_worker extra_arg x)
end;
x
let cancel n x = locking x begin fun { workers } ->
List.iter (fun { name; cancel } -> if n = name then cancel := true) !workers
end
let cancel_all x = locking x begin fun { workers } ->
List.iter (fun { cancel } -> cancel := true) !workers
end
let destroy x = locking x begin fun { workers } ->
List.iter (fun { cancel } -> cancel := true) !workers;
workers := []
end
end
| null | https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/stm/workerPool.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
manager checks for a request of termination
extra stuff to pass to the manager
this shall come from a Spawn.* model
this defines the main loop of the manager | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
type worker_id = string
type 'a cpanel = {
called by manager to exit instead of Thread.exit
}
module type PoolModel = sig
type process
val spawn : int -> worker_id * process * CThread.thread_ic * out_channel
type extra
val manager :
extra cpanel -> worker_id * process * CThread.thread_ic * out_channel -> unit
end
module Make(Model : PoolModel) = struct
type worker = {
name : worker_id;
cancel : bool ref;
manager : Thread.t;
process : Model.process;
}
type pre_pool = {
workers : worker list ref;
count : int ref;
extra_arg : Model.extra;
}
type pool = { lock : Mutex.t; pool : pre_pool }
let magic_no = 17
let master_handshake worker_id ic oc =
try
Marshal.to_channel oc magic_no []; flush oc;
let n = (CThread.thread_friendly_input_value ic : int) in
if n <> magic_no then begin
Printf.eprintf "Handshake with %s failed: protocol mismatch\n" worker_id;
exit 1;
end
with e when Errors.noncritical e ->
Printf.eprintf "Handshake with %s failed: %s\n"
worker_id (Printexc.to_string e);
exit 1
let worker_handshake slave_ic slave_oc =
try
let v = (CThread.thread_friendly_input_value slave_ic : int) in
if v <> magic_no then begin
prerr_endline "Handshake failed: protocol mismatch\n";
exit 1;
end;
Marshal.to_channel slave_oc v []; flush slave_oc;
with e when Errors.noncritical e ->
prerr_endline ("Handshake failed: " ^ Printexc.to_string e);
exit 1
let locking { lock; pool = p } f =
try
Mutex.lock lock;
let x = f p in
Mutex.unlock lock;
x
with e -> Mutex.unlock lock; raise e
let rec create_worker extra pool id =
let cancel = ref false in
let name, process, ic, oc as worker = Model.spawn id in
master_handshake name ic oc;
let exit () = cancel := true; cleanup pool; Thread.exit () in
let cancelled () = !cancel in
let cpanel = { exit; cancelled; extra } in
let manager = Thread.create (Model.manager cpanel) worker in
{ name; cancel; manager; process }
and cleanup x = locking x begin fun { workers; count; extra_arg } ->
workers := List.map (function
| { cancel } as w when !cancel = false -> w
| _ -> let n = !count in incr count; create_worker extra_arg x n)
!workers
end
let n_workers x = locking x begin fun { workers } ->
List.length !workers
end
let is_empty x = locking x begin fun { workers } -> !workers = [] end
let create extra_arg ~size = let x = {
lock = Mutex.create ();
pool = {
extra_arg;
workers = ref [];
count = ref size;
}} in
locking x begin fun { workers } ->
workers := CList.init size (create_worker extra_arg x)
end;
x
let cancel n x = locking x begin fun { workers } ->
List.iter (fun { name; cancel } -> if n = name then cancel := true) !workers
end
let cancel_all x = locking x begin fun { workers } ->
List.iter (fun { cancel } -> cancel := true) !workers
end
let destroy x = locking x begin fun { workers } ->
List.iter (fun { cancel } -> cancel := true) !workers;
workers := []
end
end
|
170ad26dc3e27bc58f4a17ae91d2e3fd5f1e0ed08ad245a36de191ae68750c13 | janestreet/universe | ml_bundle.ml | open! Core
open! Async
type t =
{ ml : string
; mli : string option
; module_name : string
}
[@@deriving sexp, compare] [@@sexp.allow_extra_fields]
type tmp_t =
{ mutable tmp_ml : string option
; mutable tmp_mli : string option
; tmp_module_name : string
}
let valid_module_name s =
not (String.is_empty s) &&
match s.[0] with
| 'A'..'Z' ->
String.for_all s ~f:(function
| 'a'..'z' | 'A'..'Z' | '_' | '0'..'9' | '\'' -> true
| _ -> false
)
| _ -> false
;;
let module_name ~full_path ~path_no_ext =
let basename = Filename.basename path_no_ext in
let unchecked_module_name = String.capitalize basename in
if valid_module_name unchecked_module_name then
unchecked_module_name
else
invalid_argf "%s is not a valid ocaml filename" full_path ()
;;
let enrich_bundle ({ ml; mli; module_name = _ } as bundle) =
match mli with
| Some _ -> return bundle
| None ->
let mli = Filename.chop_extension ml ^ ".mli" in
Sys.file_exists mli >>| function
| `Yes -> { bundle with mli = Some mli }
| `No -> bundle
| `Unknown -> raise_s [%sexp "File_in_unknown_state", (mli : string), [%here]]
;;
let ml_with_mli_reorder filenames =
let tbl = String.Table.create () in
let init_bundle acc str =
let path_no_ext, ext_opt = Filename.split_extension str in
let ext =
match ext_opt with
| None -> `none
| Some "ml" -> `ml
| Some "mli" -> `mli
| Some ext -> invalid_argf "Expected .ml or .mli files, got : %s" ext ()
in
giving this error after the one about extensions
let module_name = module_name ~full_path:str ~path_no_ext in
let acc, data =
match Hashtbl.find tbl module_name with
| None ->
let data = { tmp_ml = None; tmp_mli = None; tmp_module_name = module_name } in
Hashtbl.add_exn tbl ~key:module_name ~data;
data :: acc, data
| Some data -> acc, data
in
begin match ext, data with
| (`ml | `none), { tmp_ml = Some old_ml; _ } ->
invalid_argf "Several implementations provided for %s: %s and %s"
module_name str old_ml ()
| `mli, { tmp_mli = Some old_mli; _ } ->
invalid_argf "Several interfaces provided for %s: %s and %s"
module_name str old_mli ()
| `none , { tmp_ml = None; _ } ->
data.tmp_ml <- Some (str ^ ".ml")
| `ml, { tmp_ml = None; _ } ->
data.tmp_ml <- Some str
| `mli, { tmp_mli = None; _ } ->
data.tmp_mli <- Some str
end;
acc
in
let rev_paths = List.fold_left filenames ~init:[] ~f:init_bundle in
List.rev_map rev_paths ~f:(
fun { tmp_ml; tmp_mli = mli; tmp_module_name = module_name } ->
let ml =
match tmp_ml with
| None ->
(* same behaviour as before *)
Filename.chop_extension (Option.value_exn mli) ^ ".ml"
| Some ml -> ml
in
{ ml; mli; module_name }
)
;;
let from_filenames filenames =
Deferred.Or_error.try_with ~extract_exn:true (fun () ->
let pairs = ml_with_mli_reorder filenames in
Deferred.List.map pairs ~f:enrich_bundle
)
;;
let to_pathnames { ml; mli; module_name } =
`ml ml, `mli mli, `module_name module_name
;;
let module_name t = t.module_name
;;
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/ocaml_plugin/src/ml_bundle.ml | ocaml | same behaviour as before | open! Core
open! Async
type t =
{ ml : string
; mli : string option
; module_name : string
}
[@@deriving sexp, compare] [@@sexp.allow_extra_fields]
type tmp_t =
{ mutable tmp_ml : string option
; mutable tmp_mli : string option
; tmp_module_name : string
}
let valid_module_name s =
not (String.is_empty s) &&
match s.[0] with
| 'A'..'Z' ->
String.for_all s ~f:(function
| 'a'..'z' | 'A'..'Z' | '_' | '0'..'9' | '\'' -> true
| _ -> false
)
| _ -> false
;;
let module_name ~full_path ~path_no_ext =
let basename = Filename.basename path_no_ext in
let unchecked_module_name = String.capitalize basename in
if valid_module_name unchecked_module_name then
unchecked_module_name
else
invalid_argf "%s is not a valid ocaml filename" full_path ()
;;
let enrich_bundle ({ ml; mli; module_name = _ } as bundle) =
match mli with
| Some _ -> return bundle
| None ->
let mli = Filename.chop_extension ml ^ ".mli" in
Sys.file_exists mli >>| function
| `Yes -> { bundle with mli = Some mli }
| `No -> bundle
| `Unknown -> raise_s [%sexp "File_in_unknown_state", (mli : string), [%here]]
;;
let ml_with_mli_reorder filenames =
let tbl = String.Table.create () in
let init_bundle acc str =
let path_no_ext, ext_opt = Filename.split_extension str in
let ext =
match ext_opt with
| None -> `none
| Some "ml" -> `ml
| Some "mli" -> `mli
| Some ext -> invalid_argf "Expected .ml or .mli files, got : %s" ext ()
in
giving this error after the one about extensions
let module_name = module_name ~full_path:str ~path_no_ext in
let acc, data =
match Hashtbl.find tbl module_name with
| None ->
let data = { tmp_ml = None; tmp_mli = None; tmp_module_name = module_name } in
Hashtbl.add_exn tbl ~key:module_name ~data;
data :: acc, data
| Some data -> acc, data
in
begin match ext, data with
| (`ml | `none), { tmp_ml = Some old_ml; _ } ->
invalid_argf "Several implementations provided for %s: %s and %s"
module_name str old_ml ()
| `mli, { tmp_mli = Some old_mli; _ } ->
invalid_argf "Several interfaces provided for %s: %s and %s"
module_name str old_mli ()
| `none , { tmp_ml = None; _ } ->
data.tmp_ml <- Some (str ^ ".ml")
| `ml, { tmp_ml = None; _ } ->
data.tmp_ml <- Some str
| `mli, { tmp_mli = None; _ } ->
data.tmp_mli <- Some str
end;
acc
in
let rev_paths = List.fold_left filenames ~init:[] ~f:init_bundle in
List.rev_map rev_paths ~f:(
fun { tmp_ml; tmp_mli = mli; tmp_module_name = module_name } ->
let ml =
match tmp_ml with
| None ->
Filename.chop_extension (Option.value_exn mli) ^ ".ml"
| Some ml -> ml
in
{ ml; mli; module_name }
)
;;
let from_filenames filenames =
Deferred.Or_error.try_with ~extract_exn:true (fun () ->
let pairs = ml_with_mli_reorder filenames in
Deferred.List.map pairs ~f:enrich_bundle
)
;;
let to_pathnames { ml; mli; module_name } =
`ml ml, `mli mli, `module_name module_name
;;
let module_name t = t.module_name
;;
|
9e58d93514979a390a6ef48e29afb4c13150bbfa1997f5201622156d9ae7cc10 | RedPRL/asai | LspEio.mli | open Eio
module RPC := Jsonrpc
* An abstract type representing all of the various resources required to run the LSP server .
type io
(** Initialize the abstract io type. *)
val init : Stdenv.t -> io
* Receive a JSON RPC packet on stdin .
val recv : io -> RPC.Packet.t option
(** Send a JSON RPC packet on stdout. *)
val send : io -> RPC.Packet.t -> unit
| null | https://raw.githubusercontent.com/RedPRL/asai/949cf80c2971474931f3a890c51bcb1a0ad687c0/src/lsp/LspEio.mli | ocaml | * Initialize the abstract io type.
* Send a JSON RPC packet on stdout. | open Eio
module RPC := Jsonrpc
* An abstract type representing all of the various resources required to run the LSP server .
type io
val init : Stdenv.t -> io
* Receive a JSON RPC packet on stdin .
val recv : io -> RPC.Packet.t option
val send : io -> RPC.Packet.t -> unit
|
978563a2966e98c3a3d4ab2194f9d95203cd695640c2db0c7bf421f4052c11f8 | nuprl/gradual-typing-performance | structs-tree-contains.rkt | #lang typed/racket/base
(provide tree-contains?)
;; -----------------------------------------------------------------------------
(require "data-node-adapted.rkt"
"data-label-adapted.rkt"
"data-suffix-tree-adapted.rkt"
benchmark-util)
(require/typed/check "structs-node-follow-k.rkt"
[node-follow/k (-> Node
Label
(-> Node Boolean)
(-> Node Index Boolean)
(-> Node Label Index Boolean)
(-> Node Index Label Index Boolean)
Boolean)])
;; =============================================================================
;; tree-contains?: tree label -> boolean
;; Returns true if the tree contains the given label.
(: tree-contains? (-> Tree Label Boolean))
(define (tree-contains? tree label)
(node-follow/k (suffix-tree-root tree)
label
(lambda args #t)
(lambda args #t)
(lambda args #f)
(lambda args #f)))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/micro/suffixtree/typed/structs-tree-contains.rkt | racket | -----------------------------------------------------------------------------
=============================================================================
tree-contains?: tree label -> boolean
Returns true if the tree contains the given label. | #lang typed/racket/base
(provide tree-contains?)
(require "data-node-adapted.rkt"
"data-label-adapted.rkt"
"data-suffix-tree-adapted.rkt"
benchmark-util)
(require/typed/check "structs-node-follow-k.rkt"
[node-follow/k (-> Node
Label
(-> Node Boolean)
(-> Node Index Boolean)
(-> Node Label Index Boolean)
(-> Node Index Label Index Boolean)
Boolean)])
(: tree-contains? (-> Tree Label Boolean))
(define (tree-contains? tree label)
(node-follow/k (suffix-tree-root tree)
label
(lambda args #t)
(lambda args #t)
(lambda args #f)
(lambda args #f)))
|
9a1f08a16b3ccaef95a5600d145d09a8dcbf683859161d62c1f5e2cb0d52f9bf | discus-lang/ddc | TransformModX.hs |
-- | Helper for transforming the bindings in a module
module DDC.Core.Transform.TransformModX
( transformModX
, transformModLet
)
where
import DDC.Core.Module
import DDC.Core.Exp.Annot
import Control.Arrow
-- | Apply transform to each expression let binding in module
transformModX :: (Exp a n -> Exp a n)
-> Module a n
-> Module a n
transformModX f mm
= transformModLet (const f) mm
-- | Apply transform to each expression let binding in module, with bind too
transformModLet :: (Bind n -> Exp a n -> Exp a n)
-> Module a n
-> Module a n
transformModLet f mm
= let body = moduleBody mm
(lets,xx) = splitXLetsAnnot body
lets' = map (first go) lets
body' = xLetsAnnot lets' xx
in mm { moduleBody = body' }
where
go (LRec binds)
= LRec [ (b, f b x)
| (b, x) <- binds]
go (LLet b x)
= LLet b (f b x)
go l
= l
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Core/Transform/TransformModX.hs | haskell | | Helper for transforming the bindings in a module
| Apply transform to each expression let binding in module
| Apply transform to each expression let binding in module, with bind too |
module DDC.Core.Transform.TransformModX
( transformModX
, transformModLet
)
where
import DDC.Core.Module
import DDC.Core.Exp.Annot
import Control.Arrow
transformModX :: (Exp a n -> Exp a n)
-> Module a n
-> Module a n
transformModX f mm
= transformModLet (const f) mm
transformModLet :: (Bind n -> Exp a n -> Exp a n)
-> Module a n
-> Module a n
transformModLet f mm
= let body = moduleBody mm
(lets,xx) = splitXLetsAnnot body
lets' = map (first go) lets
body' = xLetsAnnot lets' xx
in mm { moduleBody = body' }
where
go (LRec binds)
= LRec [ (b, f b x)
| (b, x) <- binds]
go (LLet b x)
= LLet b (f b x)
go l
= l
|
83b2a6a944b696cd1e49e4653d5a426f2fb836ed686c17eaff1164baaf800e94 | biocad/openapi3 | CommonTestTypes.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE QuasiQuotes #-}
# LANGUAGE ScopedTypeVariables #
module Data.OpenApi.CommonTestTypes where
import Prelude ()
import Prelude.Compat
import Data.Aeson (ToJSON (..), ToJSONKey (..), Value)
import Data.Aeson.QQ.Simple
import Data.Aeson.Types (toJSONKeyText)
import Data.Char
import Data.Map (Map)
import Data.Proxy
import Data.Set (Set)
import qualified Data.Text as Text
import Data.Word
import GHC.Generics
import Data.OpenApi
-- ========================================================================
-- Unit type
-- ========================================================================
data Unit = Unit deriving (Generic)
instance ToParamSchema Unit
instance ToSchema Unit
unitSchemaJSON :: Value
unitSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Unit"]
}
|]
-- ========================================================================
Color ( enum )
-- ========================================================================
data Color
= Red
| Green
| Blue
deriving (Generic)
instance ToParamSchema Color
instance ToSchema Color
colorSchemaJSON :: Value
colorSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Red", "Green", "Blue"]
}
|]
-- ========================================================================
-- Shade (paramSchemaToNamedSchema)
-- ========================================================================
data Shade = Dim | Bright deriving (Generic)
instance ToParamSchema Shade
instance ToSchema Shade where declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions
shadeSchemaJSON :: Value
shadeSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Dim", "Bright"]
}
|]
-- ========================================================================
-- Paint (record with bounded enum property)
-- ========================================================================
newtype Paint = Paint { color :: Color }
deriving (Generic)
instance ToSchema Paint
paintSchemaJSON :: Value
paintSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"color":
{
"$ref": "#/components/schemas/Color"
}
},
"required": ["color"]
}
|]
paintInlinedSchemaJSON :: Value
paintInlinedSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"color":
{
"type": "string",
"enum": ["Red", "Green", "Blue"]
}
},
"required": ["color"]
}
|]
-- ========================================================================
Status ( )
-- ========================================================================
data Status
= StatusOk
| StatusError
deriving (Generic)
instance ToParamSchema Status where
toParamSchema = genericToParamSchema defaultSchemaOptions
{ constructorTagModifier = map toLower . drop (length "Status") }
instance ToSchema Status where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ constructorTagModifier = map toLower . drop (length "Status") }
statusSchemaJSON :: Value
statusSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["ok", "error"]
}
|]
-- ========================================================================
Email ( newtype with set to True )
-- ========================================================================
newtype Email = Email { getEmail :: String }
deriving (Generic)
instance ToParamSchema Email
instance ToSchema Email where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ unwrapUnaryRecords = True }
emailSchemaJSON :: Value
emailSchemaJSON = [aesonQQ|
{
"type": "string"
}
|]
-- ========================================================================
-- UserId (non-record newtype)
-- ========================================================================
newtype UserId = UserId Integer
deriving (Eq, Ord, Generic)
instance ToParamSchema UserId
instance ToSchema UserId
userIdSchemaJSON :: Value
userIdSchemaJSON = [aesonQQ|
{
"type": "integer"
}
|]
-- ========================================================================
UserGroup ( set newtype )
-- ========================================================================
newtype UserGroup = UserGroup (Set UserId)
deriving (Generic)
instance ToSchema UserGroup
userGroupSchemaJSON :: Value
userGroupSchemaJSON = [aesonQQ|
{
"type": "array",
"items": { "$ref": "#/components/schemas/UserId" },
"uniqueItems": true
}
|]
-- ========================================================================
-- Person (simple record with optional fields)
-- ========================================================================
data Person = Person
{ name :: String
, phone :: Integer
, email :: Maybe String
} deriving (Generic)
instance ToSchema Person
personSchemaJSON :: Value
personSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"name": { "type": "string" },
"phone": { "type": "integer" },
"email": { "type": "string" }
},
"required": ["name", "phone"]
}
|]
-- ========================================================================
-- Player (record newtype)
-- ========================================================================
newtype Player = Player
{ position :: Point
} deriving (Generic)
instance ToSchema Player
playerSchemaJSON :: Value
playerSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"position":
{
"$ref": "#/components/schemas/Point"
}
},
"required": ["position"]
}
|]
newtype Players = Players [Inlined Player]
deriving (Generic)
instance ToSchema Players
playersSchemaJSON :: Value
playersSchemaJSON = [aesonQQ|
{
"type": "array",
"items":
{
"type": "object",
"properties":
{
"position":
{
"$ref": "#/components/schemas/Point"
}
},
"required": ["position"]
}
}
|]
-- ========================================================================
-- Character (sum type with ref and record in alternative)
-- ========================================================================
data Character
= PC Player
| NPC { npcName :: String, npcPosition :: Point }
deriving (Generic)
instance ToSchema Character
characterSchemaJSON :: Value
characterSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"$ref": "#/components/schemas/Player"
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"$ref": "#/components/schemas/Point"
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
characterInlinedSchemaJSON :: Value
characterInlinedSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"required": [
"position"
],
"type": "object",
"properties": {
"position": {
"required": [
"x",
"y"
],
"type": "object",
"properties": {
"x": {
"format": "double",
"type": "number"
},
"y": {
"format": "double",
"type": "number"
}
}
}
}
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"required": [
"x",
"y"
],
"type": "object",
"properties": {
"x": {
"format": "double",
"type": "number"
},
"y": {
"format": "double",
"type": "number"
}
}
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
characterInlinedPlayerSchemaJSON :: Value
characterInlinedPlayerSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"required": [
"position"
],
"type": "object",
"properties": {
"position": {
"$ref": "#/components/schemas/Point"
}
}
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"$ref": "#/components/schemas/Point"
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
-- ========================================================================
ISPair ( non - record product data type )
-- ========================================================================
data ISPair = ISPair Integer String
deriving (Generic)
instance ToSchema ISPair
ispairSchemaJSON :: Value
ispairSchemaJSON = [aesonQQ|
{
"type": "array",
"items":
[
{ "type": "integer" },
{ "type": "string" }
],
"minItems": 2,
"maxItems": 2
}
|]
-- ========================================================================
Point ( record data type with custom fieldLabelModifier )
-- ========================================================================
data Point = Point
{ pointX :: Double
, pointY :: Double
} deriving (Generic)
instance ToSchema Point where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "point") }
pointSchemaJSON :: Value
pointSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"x": { "type": "number", "format": "double" },
"y": { "type": "number", "format": "double" }
},
"required": ["x", "y"]
}
|]
-- ========================================================================
Point ( record data type with multiple fields )
-- ========================================================================
data Point5 = Point5
{ point5X :: Double
, point5Y :: Double
, point5Z :: Double
, point5U :: Double
5 dimensional !
} deriving (Generic)
instance ToSchema Point5 where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "point5") }
point5SchemaJSON :: Value
point5SchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"x": { "type": "number", "format": "double" },
"y": { "type": "number", "format": "double" },
"z": { "type": "number", "format": "double" },
"u": { "type": "number", "format": "double" },
"v": { "type": "number", "format": "double" }
},
"required": ["x", "y", "z", "u", "v"]
}
|]
point5Properties :: [String]
point5Properties = ["x", "y", "z", "u", "v"]
-- ========================================================================
-- MyRoseTree (custom datatypeNameModifier)
-- ========================================================================
data MyRoseTree = MyRoseTree
{ root :: String
, trees :: [MyRoseTree]
} deriving (Generic)
instance ToSchema MyRoseTree where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ datatypeNameModifier = drop (length "My") }
myRoseTreeSchemaJSON :: Value
myRoseTreeSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"root": { "type": "string" },
"trees":
{
"type": "array",
"items":
{
"$ref": "#/components/schemas/RoseTree"
}
}
},
"required": ["root", "trees"]
}
|]
data MyRoseTree' = MyRoseTree'
{ root' :: String
, trees' :: [MyRoseTree']
} deriving (Generic)
instance ToSchema MyRoseTree' where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ datatypeNameModifier = map toLower }
myRoseTreeSchemaJSON' :: Value
myRoseTreeSchemaJSON' = [aesonQQ|
{
"type": "object",
"properties":
{
"root'": { "type": "string" },
"trees'":
{
"type": "array",
"items":
{
"$ref": "#/components/schemas/myrosetree'"
}
}
},
"required": ["root'", "trees'"]
}
|]
-- ========================================================================
Inlined ( newtype for inlining schemas )
-- ========================================================================
newtype Inlined a = Inlined { getInlined :: a }
instance ToSchema a => ToSchema (Inlined a) where
declareNamedSchema _ = unname <$> declareNamedSchema (Proxy :: Proxy a)
where
unname (NamedSchema _ s) = NamedSchema Nothing s
-- ========================================================================
Light ( sum type with )
-- ========================================================================
data Light
= NoLight
| LightFreq Double
| LightColor Color
| LightWaveLength { waveLength :: Double }
deriving (Generic)
instance ToSchema Light where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ unwrapUnaryRecords = True }
lightSchemaJSON :: Value
lightSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NoLight"
]
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightFreq"
]
},
"contents": {
"format": "double",
"type": "number"
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightColor"
]
},
"contents": {
"$ref": "#/components/schemas/Color"
}
}
},
{
"required": [
"waveLength",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightWaveLength"
]
},
"waveLength": {
"format": "double",
"type": "number"
}
}
}
]
}
|]
lightInlinedSchemaJSON :: Value
lightInlinedSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NoLight"
]
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightFreq"
]
},
"contents": {
"format": "double",
"type": "number"
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightColor"
]
},
"contents": {
"type": "string",
"enum": [
"Red",
"Green",
"Blue"
]
}
}
},
{
"required": [
"waveLength",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightWaveLength"
]
},
"waveLength": {
"format": "double",
"type": "number"
}
}
}
]
}
|]
-- ========================================================================
ResourceId ( series of newtypes )
-- ========================================================================
newtype Id = Id String deriving (Generic)
instance ToSchema Id
newtype ResourceId = ResourceId Id deriving (Generic)
instance ToSchema ResourceId
-- ========================================================================
-- ButtonImages (bounded enum key mapping)
-- ========================================================================
data ButtonState = Neutral | Focus | Active | Hover | Disabled
deriving (Show, Bounded, Enum, Generic)
instance ToJSON ButtonState
instance ToSchema ButtonState
instance ToJSONKey ButtonState where toJSONKey = toJSONKeyText (Text.pack . show)
type ImageUrl = Text.Text
newtype ButtonImages = ButtonImages { getButtonImages :: Map ButtonState ImageUrl }
deriving (Generic)
instance ToJSON ButtonImages where
toJSON = toJSON . getButtonImages
instance ToSchema ButtonImages where
declareNamedSchema = genericDeclareNamedSchemaNewtype defaultSchemaOptions
declareSchemaBoundedEnumKeyMapping
buttonImagesSchemaJSON :: Value
buttonImagesSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"Neutral": { "type": "string" },
"Focus": { "type": "string" },
"Active": { "type": "string" },
"Hover": { "type": "string" },
"Disabled": { "type": "string" }
}
}
|]
-- ========================================================================
-- SingleMaybeField (single field data with optional field)
-- ========================================================================
data SingleMaybeField = SingleMaybeField { singleMaybeField :: Maybe String }
deriving (Show, Generic)
instance ToJSON SingleMaybeField
instance ToSchema SingleMaybeField
singleMaybeFieldSchemaJSON :: Value
singleMaybeFieldSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"singleMaybeField": { "type": "string" }
}
}
|]
-- ========================================================================
-- Natural Language (single field data with recursive fields)
-- ========================================================================
data Predicate
= PredicateNoun Noun
| PredicateOmitted Omitted
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Predicate
instance ToSchema Predicate
data Noun
= Noun
{ nounSurf :: LangWord
, nounModify :: [Modifier]
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Noun
instance ToSchema Noun
data LangWord
= LangWord
{ langWordSurf :: String
, langWordBase :: String
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON LangWord
instance ToSchema LangWord
data Modifier
= ModifierNoun Noun
| ModifierOmitted Omitted
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Modifier
instance ToSchema Modifier
newtype Omitted
= Omitted
{ omittedModify :: [Modifier]
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Omitted
instance ToSchema Omitted
predicateSchemaDeclareJSON :: Value
predicateSchemaDeclareJSON = [aesonQQ|
[
{
"Predicate": {
"oneOf": [
{
"properties": {
"contents": { "$ref": "#/components/schemas/Noun" },
"tag": { "enum": ["PredicateNoun"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
},
{
"properties": {
"contents": { "$ref": "#/components/schemas/Omitted" },
"tag": { "enum": ["PredicateOmitted"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
}
]
},
"Noun": {
"properties": {
"nounModify": {
"items": { "$ref": "#/components/schemas/Modifier" },
"type": "array"
},
"nounSurf": { "$ref": "#/components/schemas/LangWord" }
},
"required": ["nounSurf", "nounModify"],
"type": "object"
},
"LangWord": {
"properties": {
"langWordBase": { "type": "string" },
"langWordSurf": { "type": "string" }
},
"required": ["langWordSurf", "langWordBase"],
"type": "object"
},
"Modifier": {
"oneOf": [
{
"properties": {
"contents": { "$ref": "#/components/schemas/Noun" },
"tag": { "enum": ["ModifierNoun"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
},
{
"properties": {
"contents": { "$ref": "#/components/schemas/Omitted" },
"tag": { "enum": ["ModifierOmitted"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
}
]
},
"Omitted": {
"properties": {
"omittedModify": {
"items": { "$ref": "#/components/schemas/Modifier" },
"type": "array"
}
},
"required": ["omittedModify"],
"type": "object"
}
},
{ "$ref": "#/components/schemas/Predicate" }
]
|]
-- ========================================================================
-- ========================================================================
data TimeOfDay
= Int
| Pico
deriving (Generic)
instance ToSchema TimeOfDay
instance ToParamSchema TimeOfDay
timeOfDaySchemaJSON :: Value
timeOfDaySchemaJSON = [aesonQQ|
{
"example": "12:33:15",
"type": "string",
"format": "hh:MM:ss"
}
|]
timeOfDayParamSchemaJSON :: Value
timeOfDayParamSchemaJSON = [aesonQQ|
{
"type": "string",
"format": "hh:MM:ss"
}
|]
-- ========================================================================
UnsignedInts
-- ========================================================================
data UnsignedInts = UnsignedInts
{ unsignedIntsUint32 :: Word32
, unsignedIntsUint64 :: Word64
} deriving (Generic)
instance ToSchema UnsignedInts where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "unsignedInts") }
unsignedIntsSchemaJSON :: Value
unsignedIntsSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"uint32": { "type": "integer", "format": "int32", "minimum": 0, "maximum": 4294967295 },
"uint64": { "type": "integer", "format": "int64", "minimum": 0, "maximum": 18446744073709551615 }
},
"required": ["uint32", "uint64"]
}
|]
| null | https://raw.githubusercontent.com/biocad/openapi3/acc5ea71a3c58e0f8b25861f6b6feab2cf0cb87e/test/Data/OpenApi/CommonTestTypes.hs | haskell | # LANGUAGE QuasiQuotes #
========================================================================
Unit type
========================================================================
========================================================================
========================================================================
========================================================================
Shade (paramSchemaToNamedSchema)
========================================================================
========================================================================
Paint (record with bounded enum property)
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
UserId (non-record newtype)
========================================================================
========================================================================
========================================================================
========================================================================
Person (simple record with optional fields)
========================================================================
========================================================================
Player (record newtype)
========================================================================
========================================================================
Character (sum type with ref and record in alternative)
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
MyRoseTree (custom datatypeNameModifier)
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
========================================================================
ButtonImages (bounded enum key mapping)
========================================================================
========================================================================
SingleMaybeField (single field data with optional field)
========================================================================
========================================================================
Natural Language (single field data with recursive fields)
========================================================================
========================================================================
========================================================================
========================================================================
======================================================================== | # LANGUAGE DeriveGeneric #
# LANGUAGE ScopedTypeVariables #
module Data.OpenApi.CommonTestTypes where
import Prelude ()
import Prelude.Compat
import Data.Aeson (ToJSON (..), ToJSONKey (..), Value)
import Data.Aeson.QQ.Simple
import Data.Aeson.Types (toJSONKeyText)
import Data.Char
import Data.Map (Map)
import Data.Proxy
import Data.Set (Set)
import qualified Data.Text as Text
import Data.Word
import GHC.Generics
import Data.OpenApi
data Unit = Unit deriving (Generic)
instance ToParamSchema Unit
instance ToSchema Unit
unitSchemaJSON :: Value
unitSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Unit"]
}
|]
Color ( enum )
data Color
= Red
| Green
| Blue
deriving (Generic)
instance ToParamSchema Color
instance ToSchema Color
colorSchemaJSON :: Value
colorSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Red", "Green", "Blue"]
}
|]
data Shade = Dim | Bright deriving (Generic)
instance ToParamSchema Shade
instance ToSchema Shade where declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions
shadeSchemaJSON :: Value
shadeSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["Dim", "Bright"]
}
|]
newtype Paint = Paint { color :: Color }
deriving (Generic)
instance ToSchema Paint
paintSchemaJSON :: Value
paintSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"color":
{
"$ref": "#/components/schemas/Color"
}
},
"required": ["color"]
}
|]
paintInlinedSchemaJSON :: Value
paintInlinedSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"color":
{
"type": "string",
"enum": ["Red", "Green", "Blue"]
}
},
"required": ["color"]
}
|]
Status ( )
data Status
= StatusOk
| StatusError
deriving (Generic)
instance ToParamSchema Status where
toParamSchema = genericToParamSchema defaultSchemaOptions
{ constructorTagModifier = map toLower . drop (length "Status") }
instance ToSchema Status where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ constructorTagModifier = map toLower . drop (length "Status") }
statusSchemaJSON :: Value
statusSchemaJSON = [aesonQQ|
{
"type": "string",
"enum": ["ok", "error"]
}
|]
Email ( newtype with set to True )
newtype Email = Email { getEmail :: String }
deriving (Generic)
instance ToParamSchema Email
instance ToSchema Email where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ unwrapUnaryRecords = True }
emailSchemaJSON :: Value
emailSchemaJSON = [aesonQQ|
{
"type": "string"
}
|]
newtype UserId = UserId Integer
deriving (Eq, Ord, Generic)
instance ToParamSchema UserId
instance ToSchema UserId
userIdSchemaJSON :: Value
userIdSchemaJSON = [aesonQQ|
{
"type": "integer"
}
|]
UserGroup ( set newtype )
newtype UserGroup = UserGroup (Set UserId)
deriving (Generic)
instance ToSchema UserGroup
userGroupSchemaJSON :: Value
userGroupSchemaJSON = [aesonQQ|
{
"type": "array",
"items": { "$ref": "#/components/schemas/UserId" },
"uniqueItems": true
}
|]
data Person = Person
{ name :: String
, phone :: Integer
, email :: Maybe String
} deriving (Generic)
instance ToSchema Person
personSchemaJSON :: Value
personSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"name": { "type": "string" },
"phone": { "type": "integer" },
"email": { "type": "string" }
},
"required": ["name", "phone"]
}
|]
newtype Player = Player
{ position :: Point
} deriving (Generic)
instance ToSchema Player
playerSchemaJSON :: Value
playerSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"position":
{
"$ref": "#/components/schemas/Point"
}
},
"required": ["position"]
}
|]
newtype Players = Players [Inlined Player]
deriving (Generic)
instance ToSchema Players
playersSchemaJSON :: Value
playersSchemaJSON = [aesonQQ|
{
"type": "array",
"items":
{
"type": "object",
"properties":
{
"position":
{
"$ref": "#/components/schemas/Point"
}
},
"required": ["position"]
}
}
|]
data Character
= PC Player
| NPC { npcName :: String, npcPosition :: Point }
deriving (Generic)
instance ToSchema Character
characterSchemaJSON :: Value
characterSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"$ref": "#/components/schemas/Player"
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"$ref": "#/components/schemas/Point"
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
characterInlinedSchemaJSON :: Value
characterInlinedSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"required": [
"position"
],
"type": "object",
"properties": {
"position": {
"required": [
"x",
"y"
],
"type": "object",
"properties": {
"x": {
"format": "double",
"type": "number"
},
"y": {
"format": "double",
"type": "number"
}
}
}
}
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"required": [
"x",
"y"
],
"type": "object",
"properties": {
"x": {
"format": "double",
"type": "number"
},
"y": {
"format": "double",
"type": "number"
}
}
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
characterInlinedPlayerSchemaJSON :: Value
characterInlinedPlayerSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"PC"
]
},
"contents": {
"required": [
"position"
],
"type": "object",
"properties": {
"position": {
"$ref": "#/components/schemas/Point"
}
}
}
}
},
{
"required": [
"npcName",
"npcPosition",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NPC"
]
},
"npcPosition": {
"$ref": "#/components/schemas/Point"
},
"npcName": {
"type": "string"
}
}
}
]
}
|]
ISPair ( non - record product data type )
data ISPair = ISPair Integer String
deriving (Generic)
instance ToSchema ISPair
ispairSchemaJSON :: Value
ispairSchemaJSON = [aesonQQ|
{
"type": "array",
"items":
[
{ "type": "integer" },
{ "type": "string" }
],
"minItems": 2,
"maxItems": 2
}
|]
Point ( record data type with custom fieldLabelModifier )
data Point = Point
{ pointX :: Double
, pointY :: Double
} deriving (Generic)
instance ToSchema Point where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "point") }
pointSchemaJSON :: Value
pointSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"x": { "type": "number", "format": "double" },
"y": { "type": "number", "format": "double" }
},
"required": ["x", "y"]
}
|]
Point ( record data type with multiple fields )
data Point5 = Point5
{ point5X :: Double
, point5Y :: Double
, point5Z :: Double
, point5U :: Double
5 dimensional !
} deriving (Generic)
instance ToSchema Point5 where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "point5") }
point5SchemaJSON :: Value
point5SchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"x": { "type": "number", "format": "double" },
"y": { "type": "number", "format": "double" },
"z": { "type": "number", "format": "double" },
"u": { "type": "number", "format": "double" },
"v": { "type": "number", "format": "double" }
},
"required": ["x", "y", "z", "u", "v"]
}
|]
point5Properties :: [String]
point5Properties = ["x", "y", "z", "u", "v"]
data MyRoseTree = MyRoseTree
{ root :: String
, trees :: [MyRoseTree]
} deriving (Generic)
instance ToSchema MyRoseTree where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ datatypeNameModifier = drop (length "My") }
myRoseTreeSchemaJSON :: Value
myRoseTreeSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"root": { "type": "string" },
"trees":
{
"type": "array",
"items":
{
"$ref": "#/components/schemas/RoseTree"
}
}
},
"required": ["root", "trees"]
}
|]
data MyRoseTree' = MyRoseTree'
{ root' :: String
, trees' :: [MyRoseTree']
} deriving (Generic)
instance ToSchema MyRoseTree' where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ datatypeNameModifier = map toLower }
myRoseTreeSchemaJSON' :: Value
myRoseTreeSchemaJSON' = [aesonQQ|
{
"type": "object",
"properties":
{
"root'": { "type": "string" },
"trees'":
{
"type": "array",
"items":
{
"$ref": "#/components/schemas/myrosetree'"
}
}
},
"required": ["root'", "trees'"]
}
|]
Inlined ( newtype for inlining schemas )
newtype Inlined a = Inlined { getInlined :: a }
instance ToSchema a => ToSchema (Inlined a) where
declareNamedSchema _ = unname <$> declareNamedSchema (Proxy :: Proxy a)
where
unname (NamedSchema _ s) = NamedSchema Nothing s
Light ( sum type with )
data Light
= NoLight
| LightFreq Double
| LightColor Color
| LightWaveLength { waveLength :: Double }
deriving (Generic)
instance ToSchema Light where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ unwrapUnaryRecords = True }
lightSchemaJSON :: Value
lightSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NoLight"
]
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightFreq"
]
},
"contents": {
"format": "double",
"type": "number"
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightColor"
]
},
"contents": {
"$ref": "#/components/schemas/Color"
}
}
},
{
"required": [
"waveLength",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightWaveLength"
]
},
"waveLength": {
"format": "double",
"type": "number"
}
}
}
]
}
|]
lightInlinedSchemaJSON :: Value
lightInlinedSchemaJSON = [aesonQQ|
{
"oneOf": [
{
"required": [
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"NoLight"
]
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightFreq"
]
},
"contents": {
"format": "double",
"type": "number"
}
}
},
{
"required": [
"tag",
"contents"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightColor"
]
},
"contents": {
"type": "string",
"enum": [
"Red",
"Green",
"Blue"
]
}
}
},
{
"required": [
"waveLength",
"tag"
],
"type": "object",
"properties": {
"tag": {
"type": "string",
"enum": [
"LightWaveLength"
]
},
"waveLength": {
"format": "double",
"type": "number"
}
}
}
]
}
|]
ResourceId ( series of newtypes )
newtype Id = Id String deriving (Generic)
instance ToSchema Id
newtype ResourceId = ResourceId Id deriving (Generic)
instance ToSchema ResourceId
data ButtonState = Neutral | Focus | Active | Hover | Disabled
deriving (Show, Bounded, Enum, Generic)
instance ToJSON ButtonState
instance ToSchema ButtonState
instance ToJSONKey ButtonState where toJSONKey = toJSONKeyText (Text.pack . show)
type ImageUrl = Text.Text
newtype ButtonImages = ButtonImages { getButtonImages :: Map ButtonState ImageUrl }
deriving (Generic)
instance ToJSON ButtonImages where
toJSON = toJSON . getButtonImages
instance ToSchema ButtonImages where
declareNamedSchema = genericDeclareNamedSchemaNewtype defaultSchemaOptions
declareSchemaBoundedEnumKeyMapping
buttonImagesSchemaJSON :: Value
buttonImagesSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"Neutral": { "type": "string" },
"Focus": { "type": "string" },
"Active": { "type": "string" },
"Hover": { "type": "string" },
"Disabled": { "type": "string" }
}
}
|]
data SingleMaybeField = SingleMaybeField { singleMaybeField :: Maybe String }
deriving (Show, Generic)
instance ToJSON SingleMaybeField
instance ToSchema SingleMaybeField
singleMaybeFieldSchemaJSON :: Value
singleMaybeFieldSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"singleMaybeField": { "type": "string" }
}
}
|]
data Predicate
= PredicateNoun Noun
| PredicateOmitted Omitted
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Predicate
instance ToSchema Predicate
data Noun
= Noun
{ nounSurf :: LangWord
, nounModify :: [Modifier]
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Noun
instance ToSchema Noun
data LangWord
= LangWord
{ langWordSurf :: String
, langWordBase :: String
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON LangWord
instance ToSchema LangWord
data Modifier
= ModifierNoun Noun
| ModifierOmitted Omitted
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Modifier
instance ToSchema Modifier
newtype Omitted
= Omitted
{ omittedModify :: [Modifier]
}
deriving (Eq, Ord, Read, Show, Generic)
instance ToJSON Omitted
instance ToSchema Omitted
predicateSchemaDeclareJSON :: Value
predicateSchemaDeclareJSON = [aesonQQ|
[
{
"Predicate": {
"oneOf": [
{
"properties": {
"contents": { "$ref": "#/components/schemas/Noun" },
"tag": { "enum": ["PredicateNoun"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
},
{
"properties": {
"contents": { "$ref": "#/components/schemas/Omitted" },
"tag": { "enum": ["PredicateOmitted"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
}
]
},
"Noun": {
"properties": {
"nounModify": {
"items": { "$ref": "#/components/schemas/Modifier" },
"type": "array"
},
"nounSurf": { "$ref": "#/components/schemas/LangWord" }
},
"required": ["nounSurf", "nounModify"],
"type": "object"
},
"LangWord": {
"properties": {
"langWordBase": { "type": "string" },
"langWordSurf": { "type": "string" }
},
"required": ["langWordSurf", "langWordBase"],
"type": "object"
},
"Modifier": {
"oneOf": [
{
"properties": {
"contents": { "$ref": "#/components/schemas/Noun" },
"tag": { "enum": ["ModifierNoun"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
},
{
"properties": {
"contents": { "$ref": "#/components/schemas/Omitted" },
"tag": { "enum": ["ModifierOmitted"], "type": "string" }
},
"required": ["tag", "contents"],
"type": "object"
}
]
},
"Omitted": {
"properties": {
"omittedModify": {
"items": { "$ref": "#/components/schemas/Modifier" },
"type": "array"
}
},
"required": ["omittedModify"],
"type": "object"
}
},
{ "$ref": "#/components/schemas/Predicate" }
]
|]
data TimeOfDay
= Int
| Pico
deriving (Generic)
instance ToSchema TimeOfDay
instance ToParamSchema TimeOfDay
timeOfDaySchemaJSON :: Value
timeOfDaySchemaJSON = [aesonQQ|
{
"example": "12:33:15",
"type": "string",
"format": "hh:MM:ss"
}
|]
timeOfDayParamSchemaJSON :: Value
timeOfDayParamSchemaJSON = [aesonQQ|
{
"type": "string",
"format": "hh:MM:ss"
}
|]
UnsignedInts
data UnsignedInts = UnsignedInts
{ unsignedIntsUint32 :: Word32
, unsignedIntsUint64 :: Word64
} deriving (Generic)
instance ToSchema UnsignedInts where
declareNamedSchema = genericDeclareNamedSchema defaultSchemaOptions
{ fieldLabelModifier = map toLower . drop (length "unsignedInts") }
unsignedIntsSchemaJSON :: Value
unsignedIntsSchemaJSON = [aesonQQ|
{
"type": "object",
"properties":
{
"uint32": { "type": "integer", "format": "int32", "minimum": 0, "maximum": 4294967295 },
"uint64": { "type": "integer", "format": "int64", "minimum": 0, "maximum": 18446744073709551615 }
},
"required": ["uint32", "uint64"]
}
|]
|
894c6bb0b9dd5dfb3b281b756b2368e608763a499ae000c7fbe59ed7fa9c19bc | slegrand45/examples_ocsigen | main.ml | open Lwt.Infix
open Types
open Js_of_ocaml
let main _ =
let doc = Dom_html.document in
let parent =
Js.Opt.get (doc##getElementById(Js.string "main"))
(fun () -> assert false)
in
let m = Model.empty_game in
let rp = React.S.create m in
Dom.appendChild parent (Js_of_ocaml_tyxml.Tyxml_js.To_dom.of_div (View.view rp)) ;
Lwt.return ()
let _ = Js_of_ocaml_lwt.Lwt_js_events.onload () >>= main
| null | https://raw.githubusercontent.com/slegrand45/examples_ocsigen/e2f5efe57caf7a644795ac6b14f6d6e04168e4be/jsoo/tic-tac-toe/main.ml | ocaml | open Lwt.Infix
open Types
open Js_of_ocaml
let main _ =
let doc = Dom_html.document in
let parent =
Js.Opt.get (doc##getElementById(Js.string "main"))
(fun () -> assert false)
in
let m = Model.empty_game in
let rp = React.S.create m in
Dom.appendChild parent (Js_of_ocaml_tyxml.Tyxml_js.To_dom.of_div (View.view rp)) ;
Lwt.return ()
let _ = Js_of_ocaml_lwt.Lwt_js_events.onload () >>= main
| |
f3ecdcf76a9e9206f2dbdc2d49028bc02317c7cdb77670217fcc1e490a05c269 | smallmelon/sdzmmo | log.erl | %%%-----------------------------------
%%% @Module : log
@Author : xhg
%%% @Email :
@Created : 2010.07.23
%%% @Description: 公共函数
%%%-----------------------------------
-module(log).
-include("record.hrl").
-export([
log_stren/7,
log_quality_up/6,
log_quality_out/6,
log_hole/4,
log_compose/6,
log_inlay/5,
log_backout/3,
log_wash/3,
log_consume/4
]).
装备强化日志
log_stren(PlayerStatus, GoodsInfo, Stren_fail, StoneId, RuneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, stren=~p, stren_his=~p, stren_fail=~p, stone_id=~p, rune_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.stren, GoodsInfo#goods.stren_his, Stren_fail, StoneId, RuneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
%% 装备品质升级日志
log_quality_up(PlayerStatus, GoodsInfo, Quality_fail, StoneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, quality=~p, quality_his=~p, quality_fail=~p, stone_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.quality, GoodsInfo#goods.quality_his, Quality_fail, StoneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
%% 装备品质石拆除日志
log_quality_out(PlayerStatus, GoodsInfo, StoneId, StoneNum, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, quality=~p, stone_id=~p, stone_num=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.quality, StoneId, StoneNum, Cost, Status ]),
db_sql:execute(Sql),
ok.
%% 装备打孔日志
log_hole(PlayerStatus, GoodsInfo, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_hole` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, hole=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.hole, Cost, Status ]),
db_sql:execute(Sql),
ok.
%% 宝石合成日志
log_compose(PlayerStatus, Rule, Subtype, RuneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_compose` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, stone_num=~p, new_id=~p, rune_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, Rule#ets_goods_compose.goods_id, Subtype, Rule#ets_goods_compose.goods_num, Rule#ets_goods_compose.new_id, RuneId, Cost, Status]),
db_sql:execute(Sql),
ok.
%% 宝石镶嵌日志
log_inlay(PlayerStatus, GoodsInfo, StoneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_inlay` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, stone_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, StoneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
%% 宝石拆除日志
log_backout(PlayerStatus, GoodsInfo, Cost) ->
InlayNum = goods_util:get_inlay_num(GoodsInfo),
Sql = io_lib:format(<<"insert into `log_backout` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, inlay=~p, cost=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, InlayNum, Cost ]),
db_sql:execute(Sql),
ok.
%% 装备洗炼日志
log_wash(PlayerStatus, GoodsInfo, Cost) ->
Sql = io_lib:format(<<"insert into `log_wash` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, color=~p, cost=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.color, Cost ]),
db_sql:execute(Sql),
ok.
%% 消费日志
log_consume(Type, GoodsInfo, PlayerStatus, NewPlayerStatus) ->
ConsumeType = goods_util:get_consume_type(Type),
Cost_coin = PlayerStatus#player_status.coin - NewPlayerStatus#player_status.coin,
Cost_silver = PlayerStatus#player_status.silver - NewPlayerStatus#player_status.silver,
Cost_gold = PlayerStatus#player_status.gold - NewPlayerStatus#player_status.gold,
Cost_bcoin = PlayerStatus#player_status.bcoin - NewPlayerStatus#player_status.bcoin,
Remain_coin = NewPlayerStatus#player_status.coin,
Remain_silver = NewPlayerStatus#player_status.silver,
Remain_gold = NewPlayerStatus#player_status.gold,
Remain_bcoin = NewPlayerStatus#player_status.bcoin,
Sql = io_lib:format(<<"insert into `log_consume` set time=UNIX_TIMESTAMP(), consume_type=~p, player_id=~p, nickname='~s', gid=~p, goods_id=~p, cost_coin=~p, cost_silver=~p, cost_gold=~p, cost_bcoin=~p, remain_coin=~p, remain_silver=~p, remain_gold=~p, remain_bcoin=~p ">>,
[ConsumeType, PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.color, Cost_coin, Cost_silver, Cost_gold, Cost_bcoin, Remain_coin, Remain_silver, Remain_gold, Remain_bcoin ]),
db_sql:execute(Sql),
ok.
| null | https://raw.githubusercontent.com/smallmelon/sdzmmo/254ff430481de474527c0e96202c63fb0d2c29d2/src/lib/log.erl | erlang | -----------------------------------
@Module : log
@Email :
@Description: 公共函数
-----------------------------------
装备品质升级日志
装备品质石拆除日志
装备打孔日志
宝石合成日志
宝石镶嵌日志
宝石拆除日志
装备洗炼日志
消费日志
| @Author : xhg
@Created : 2010.07.23
-module(log).
-include("record.hrl").
-export([
log_stren/7,
log_quality_up/6,
log_quality_out/6,
log_hole/4,
log_compose/6,
log_inlay/5,
log_backout/3,
log_wash/3,
log_consume/4
]).
装备强化日志
log_stren(PlayerStatus, GoodsInfo, Stren_fail, StoneId, RuneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, stren=~p, stren_his=~p, stren_fail=~p, stone_id=~p, rune_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.stren, GoodsInfo#goods.stren_his, Stren_fail, StoneId, RuneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
log_quality_up(PlayerStatus, GoodsInfo, Quality_fail, StoneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, quality=~p, quality_his=~p, quality_fail=~p, stone_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.quality, GoodsInfo#goods.quality_his, Quality_fail, StoneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
log_quality_out(PlayerStatus, GoodsInfo, StoneId, StoneNum, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_stren` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, quality=~p, stone_id=~p, stone_num=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.quality, StoneId, StoneNum, Cost, Status ]),
db_sql:execute(Sql),
ok.
log_hole(PlayerStatus, GoodsInfo, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_hole` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, hole=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.hole, Cost, Status ]),
db_sql:execute(Sql),
ok.
log_compose(PlayerStatus, Rule, Subtype, RuneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_compose` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, stone_num=~p, new_id=~p, rune_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, Rule#ets_goods_compose.goods_id, Subtype, Rule#ets_goods_compose.goods_num, Rule#ets_goods_compose.new_id, RuneId, Cost, Status]),
db_sql:execute(Sql),
ok.
log_inlay(PlayerStatus, GoodsInfo, StoneId, Cost, Status) ->
Sql = io_lib:format(<<"insert into `log_inlay` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, stone_id=~p, cost=~p, status=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, StoneId, Cost, Status ]),
db_sql:execute(Sql),
ok.
log_backout(PlayerStatus, GoodsInfo, Cost) ->
InlayNum = goods_util:get_inlay_num(GoodsInfo),
Sql = io_lib:format(<<"insert into `log_backout` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, inlay=~p, cost=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, InlayNum, Cost ]),
db_sql:execute(Sql),
ok.
log_wash(PlayerStatus, GoodsInfo, Cost) ->
Sql = io_lib:format(<<"insert into `log_wash` set time=UNIX_TIMESTAMP(), player_id=~p, nickname='~s', gid=~p, goods_id=~p, subtype=~p, level=~p, color=~p, cost=~p ">>,
[PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.color, Cost ]),
db_sql:execute(Sql),
ok.
log_consume(Type, GoodsInfo, PlayerStatus, NewPlayerStatus) ->
ConsumeType = goods_util:get_consume_type(Type),
Cost_coin = PlayerStatus#player_status.coin - NewPlayerStatus#player_status.coin,
Cost_silver = PlayerStatus#player_status.silver - NewPlayerStatus#player_status.silver,
Cost_gold = PlayerStatus#player_status.gold - NewPlayerStatus#player_status.gold,
Cost_bcoin = PlayerStatus#player_status.bcoin - NewPlayerStatus#player_status.bcoin,
Remain_coin = NewPlayerStatus#player_status.coin,
Remain_silver = NewPlayerStatus#player_status.silver,
Remain_gold = NewPlayerStatus#player_status.gold,
Remain_bcoin = NewPlayerStatus#player_status.bcoin,
Sql = io_lib:format(<<"insert into `log_consume` set time=UNIX_TIMESTAMP(), consume_type=~p, player_id=~p, nickname='~s', gid=~p, goods_id=~p, cost_coin=~p, cost_silver=~p, cost_gold=~p, cost_bcoin=~p, remain_coin=~p, remain_silver=~p, remain_gold=~p, remain_bcoin=~p ">>,
[ConsumeType, PlayerStatus#player_status.id, PlayerStatus#player_status.nickname, GoodsInfo#goods.id, GoodsInfo#goods.goods_id, GoodsInfo#goods.subtype, GoodsInfo#goods.level, GoodsInfo#goods.color, Cost_coin, Cost_silver, Cost_gold, Cost_bcoin, Remain_coin, Remain_silver, Remain_gold, Remain_bcoin ]),
db_sql:execute(Sql),
ok.
|
b6a4d9da9f5406ae7203aa7e6a10d76df6dd12d2d52d3ff31f2bc7b71b66fd61 | timjs/tophat-haskell | Log.hs | # LANGUAGE TemplateHaskell #
module Polysemy.Log
( -- * Effect
Log (..),
Severity (..),
-- * Actions
log,
-- * Interpretations
logToOutput,
logToIO,
)
where
import Polysemy
import Polysemy.Output
---- Effect --------------------------------------------------------------------
data Severity
= Error
| Warning
| Info
instance Display Severity where
display = \case
Error -> "!!"
Warning -> "**"
Info -> "=="
data Log i m a where
Log :: Severity -> i -> Log i m ()
makeSem ''Log
---- Interpretations -----------------------------------------------------------
logToOutput :: (Display i) => Sem (Log i ': r) a -> Sem (Output Text ': r) a
logToOutput = reinterpret \case
Log s i -> output <| unwords [display s, display i]
logToIO :: forall i a r. (Member (Embed IO) r, Display i) => Sem (Log i ': r) a -> Sem r a
logToIO = logToOutput >> runOutputSem putTextLn
| null | https://raw.githubusercontent.com/timjs/tophat-haskell/665d620ded5b61c454b6c50b8590f0d3f21c055f/src/Polysemy/Log.hs | haskell | * Effect
* Actions
* Interpretations
-- Effect --------------------------------------------------------------------
-- Interpretations ----------------------------------------------------------- | # LANGUAGE TemplateHaskell #
module Polysemy.Log
Log (..),
Severity (..),
log,
logToOutput,
logToIO,
)
where
import Polysemy
import Polysemy.Output
data Severity
= Error
| Warning
| Info
instance Display Severity where
display = \case
Error -> "!!"
Warning -> "**"
Info -> "=="
data Log i m a where
Log :: Severity -> i -> Log i m ()
makeSem ''Log
logToOutput :: (Display i) => Sem (Log i ': r) a -> Sem (Output Text ': r) a
logToOutput = reinterpret \case
Log s i -> output <| unwords [display s, display i]
logToIO :: forall i a r. (Member (Embed IO) r, Display i) => Sem (Log i ': r) a -> Sem r a
logToIO = logToOutput >> runOutputSem putTextLn
|
d6dac196ef10c12654e84484d50efe3a159df101dcdbea0956c4267c356161d8 | beamparticle/beamparticle | beamparticle_highperf_http_handler.erl | %%%-------------------------------------------------------------------
@author neerajsharma
( C ) 2017 , < >
%%% @doc
%%%
%%% @end
%%% %CopyrightBegin%
%%%
Copyright < > 2017 .
All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% %CopyrightEnd%
%%%-------------------------------------------------------------------
-module(beamparticle_highperf_http_handler).
-behaviour(ranch_protocol).
-include("beamparticle_constants.hrl").
%% API
-export([start_link/4]).
-export([init/4]).
-define(SERVER, ?MODULE).
%%%===================================================================
%%% API
%%%===================================================================
%%%===================================================================
%%% ranch_protocol callbacks
%%%===================================================================
start_link(Ref, Socket, Transport, Opts) ->
Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
{ok, Pid}.
init(Ref, Socket, Transport, Opts) ->
ok = ranch:accept_ack(Ref),
{ok, {InetIp, InetPort}} = Transport:peername(Socket),
IpBinary = list_to_binary(inet:ntoa(InetIp)),
PortBinary = integer_to_binary(InetPort),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []).
loop(Socket, Transport, Opts, IpBinary, PortBinary, PartialDataList) ->
MaxBodyBytes = proplists:get_value(max_read_length, Opts, 5000),
MaxReadTimeoutMsec = proplists:get_value(max_read_timeout_msec, Opts, 5000),
case Transport:recv(Socket, 0, MaxReadTimeoutMsec) of
{ok, Data} when byte_size(Data) > 0 ->
Parsed = parse_http_request(Data, PartialDataList),
lager : debug("highperf transport = ~p , Data = ~p , PartialDataList = ~p " , [ , Data , PartialDataList ] ) ,
case Parsed of
{HttpMethod, <<"/post/", RestPath/binary>> = _RequestPath,
_HttpVersion, RequestHeaders, RequestBody} when
HttpMethod == <<"POST">> orelse
HttpMethod == <<"post">> ->
LowerRequestHeaders = string:lowercase(RequestHeaders),
ContentLength = request_content_length(LowerRequestHeaders),
R = process_http_post(RequestBody, RestPath,
Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength),
case R of
{ok, {loop, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []);
{ok, {close, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, close),
ok = Transport:close(Socket);
{ok, NewPartialDataList} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
NewPartialDataList);
{error, close} ->
ok = Transport:close(Socket)
end;
{HttpMethod, <<"/api/", RestPath/binary>> = _RequestPath,
_HttpVersion, RequestHeaders, RequestBody} when
HttpMethod == <<"POST">> orelse
HttpMethod == <<"post">> orelse
HttpMethod == <<"GET">> orelse
HttpMethod == <<"get">> ->
IsGet = case HttpMethod of
<<"GET">> -> true;
<<"get">> -> true;
_ -> false
end,
LowerRequestHeaders = string:lowercase(RequestHeaders),
{BodyData, ContentLength} = case IsGet of
true ->
{_FunctionName, QsParamsBin} = extract_function_and_params(RestPath),
QsParamParts = string:split(QsParamsBin, <<"&">>, all),
BodyAsTupleList =
lists:foldl(fun(<<>>, AccIn) ->
AccIn;
(E, AccIn) ->
case string:split(E, <<"=">>) of
[A] ->
[{A, <<"1">>} | AccIn];
[A, B] ->
[{A, B} | AccIn]
end
end, [], QsParamParts),
GetBody = jiffy:encode(maps:from_list(BodyAsTupleList)),
{GetBody, byte_size(GetBody)};
false ->
PostContentLength = request_content_length(LowerRequestHeaders),
{RequestBody, PostContentLength}
end,
R = process_http_post(BodyData, RestPath,
Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength),
case R of
{ok, {loop, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []);
{ok, {close, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, close),
ok = Transport:close(Socket);
{ok, NewPartialDataList} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
NewPartialDataList);
{error, close} ->
ok = Transport:close(Socket)
end;
{<<"GET">>, <<"/health">>, _HttpVersion, RequestHeaders, _RequestBody} ->
{ok, DateTimeBinary} =
beamparticle_date_server:datetime(binary),
Content = < < " { \"msg\ " : \"I am alive\ " , \"datetime\ " : \ " " ,
%% DateTimeBinary/binary,
%% "\", \"ip\": \"",
IpBinary / binary ,
" \ " , \"port\ " : " , PortBinary / binary ,
%% "}">>,
Content = [<<"{\"msg\": \"I am alive\", \"datetime\": \"">>,
DateTimeBinary,
<<"\", \"ip\": \"">>,
IpBinary,
<<"\", \"port\": ">>,
PortBinary,
<<"}">>],
LowerRequestHeaders = string:lowercase(RequestHeaders),
case string:find(LowerRequestHeaders, <<"connection: keep-alive">>) of
nomatch ->
send_http_response(Socket, Transport, Content, close),
ok = Transport:close(Socket);
_ ->
send_http_response(Socket, Transport, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, [])
end;
{_HttpMethod, _RequestPath, _HttpVersion, incomplete} ->
NewPartialDataList = [Data | PartialDataList],
TotalBytes = lists:foldl(fun(V, AccIn) ->
byte_size(V) + AccIn
end, 0, NewPartialDataList),
case TotalBytes > MaxBodyBytes of
false ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
[Data | PartialDataList]);
true ->
ok = Transport:close(Socket)
end;
{error, incomplete} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
[Data | PartialDataList]);
%%{error, bad_http_first_line}
_ ->
ok = Transport:close(Socket)
end;
_ ->
%% apart from read timeout or errors, well reach here when client
%% disconnected
ok = Transport:close(Socket)
end.
parse_http_request(Data, PartialDataList) ->
NewData = case PartialDataList of
[] -> Data;
_ -> iolist_to_binary(lists:reverse([Data | PartialDataList]))
end,
use lists : flatten/1 after string : split/2 when is [ binary ( ) ] ,
but at present is formed as binary ( )
case string:split(NewData, <<"\r\n">>) of
[FirstLine, Rest] ->
TrimmedFirstLine = string:trim(FirstLine),
case string:split(TrimmedFirstLine, <<" ">>, all) of
[HttpMethod, RequestPath, HttpVersion] ->
case Rest of
<<"\r\n", RequestBody/binary>> ->
RequestHeaders = <<>>,
{HttpMethod, RequestPath, HttpVersion, RequestHeaders, RequestBody};
_ ->
case string:split(Rest, <<"\r\n\r\n">>) of
[RequestHeaders, RequestBody] ->
{HttpMethod, RequestPath, HttpVersion, RequestHeaders, RequestBody};
_ ->
{HttpMethod, RequestPath, HttpVersion, incomplete}
end
end;
_E ->
%%lager:debug("bad_http_first_line, E = ~p", [E]),
{error, bad_http_first_line}
end;
_ ->
{error, incomplete}
end.
send_http_response(Socket, Transport, Content, CxnType) ->
send_http_response(Socket, Transport, 200, Content, CxnType).
send_http_response(Socket, Transport, StatusCode, Content, keep_alive) ->
ContentLength = case is_binary(Content) of
true -> byte_size(Content);
false ->
lists:foldl(fun(X, AccIn) ->
byte_size(X) + AccIn
end, 0, Content)
end,
StatusCodeBin = http_status_code_to_binary(StatusCode),
Response = [<<"HTTP/1.1 ">>, StatusCodeBin, <<"\r\ncontent-length: ">>,
integer_to_binary(ContentLength),
<<"\r\ncontent-type: application/json\r\n">>,
<<"connection: keep-alive\r\n">>,
<<"\r\n">>,
Content],
Transport:send(Socket, Response);
send_http_response(Socket, Transport, StatusCode, Content, _) ->
ContentLength = case is_binary(Content) of
true -> byte_size(Content);
false ->
lists:foldl(fun(X, AccIn) ->
byte_size(X) + AccIn
end, 0, Content)
end,
StatusCodeBin = http_status_code_to_binary(StatusCode),
Response = [<<"HTTP/1.1 ">>, StatusCodeBin, <<"\r\ncontent-length: ">>,
integer_to_binary(ContentLength),
<<"\r\ncontent-type: application/json\r\n">>,
<<"\r\n">>,
Content],
Transport:send(Socket, Response).
minimal_loop(Socket , Transport , Opts ) - >
MaxBodyBytes = proplists : get_value(max_read_length , Opts , 5000 ) ,
case Transport : recv(Socket , 0 , MaxBodyBytes ) of
{ ok , Data } when byte_size(Data ) > 4 - >
%% Content = <<"I am alive">>,
= byte_size(Content ) ,
Response = [ < < " HTTP/1.1 200 OK\r\ncontent - length : " > > ,
%% integer_to_binary(ContentLength),
%% <<"\r\n">>,
%% <<"connection: keep-alive\r\n">>,
%% <<"\r\n">>,
%% Content],
%% Transport:send(Socket, Response),
ok = Transport : ) ;
%% %% loop(Socket, Transport);
%% _ ->
%% ok = Transport:close(Socket)
%% end.
request_content_length(LowerRequestHeaders) ->
case string:find(LowerRequestHeaders, <<"content-length:">>) of
<<"content-length:", RestContentLength/binary>> ->
case string:split(RestContentLength, <<"\r\n">>) of
[ContentLengthBin | _] ->
binary_to_integer(string:trim(ContentLengthBin));
_ ->
-1
end;
_ ->
0
end.
data_bytes(Data) ->
lists:foldl(fun(V, AccIn) -> byte_size(V) + AccIn end, 0, Data).
extract_function_and_params(RestPath) ->
[FunctionName | InputQsParams] = string:split(RestPath, <<"?">>),
QsParamsBin = case InputQsParams of
[] -> <<>>;
[InputQsParamBin] -> http_uri:decode(InputQsParamBin)
end,
{FunctionName, QsParamsBin}.
is_keepalive(LowerRequestHeaders) ->
case string:find(LowerRequestHeaders, <<"connection: keep-alive">>) of
nomatch -> false;
_ -> true
end.
process_http_post(RequestBody, RestPath, Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength) ->
case ContentLength > byte_size(RequestBody) of
true ->
NewPartialDataList = [Data | PartialDataList],
TotalBytes = data_bytes(NewPartialDataList),
case TotalBytes > MaxBodyBytes of
false ->
loop(Socket , Transport , Opts , IpBinary , PortBinary ,
%% [Data | PartialDataList]);
{ok, [Data | PartialDataList]};
true ->
%%ok = Transport:close(Socket)
{error, close}
end;
false ->
{FunctionName, QsParamsBin} = extract_function_and_params(RestPath),
QsParamParts = string:split(QsParamsBin, <<"&">>, all),
erlang:erase(?LOG_ENV_KEY),
IsAllowed = beamparticle_config:is_function_allowed(FunctionName, highperf_http_rest),
{StatusCode, Content2} = case IsAllowed of
true ->
case lists:filter(fun(E) -> E =:= <<"env=2">> end, QsParamParts) of
[] ->
erlang:put(?CALL_ENV_KEY, prod);
_ ->
erlang:put(?CALL_ENV_KEY, stage)
end,
Arguments = [RequestBody,
<<"{\"qs\": \"",
QsParamsBin/binary,
"\"}">>],
Content = beamparticle_dynamic:get_raw_result(
FunctionName, Arguments),
%% as part of dynamic call configurations could be set,
%% so lets erase that before the next reuse
beamparticle_dynamic:erase_config(),
{200, Content};
false ->
{404, <<>>}
end,
case is_keepalive(LowerRequestHeaders) of
false ->
send_http_response(Socket , Transport , Content , close ) ,
ok = Transport : ) ;
{ok, {close, StatusCode, Content2}};
true ->
send_http_response(Socket , Transport , Content , keep_alive ) ,
loop(Socket , Transport , Opts , IpBinary , PortBinary , [ ] )
{ok, {loop, StatusCode, Content2}}
end
end.
http_status_code_to_binary(200) -> <<"200 OK">>;
http_status_code_to_binary(404) -> <<"404 Not Found">>;
http_status_code_to_binary(V) -> iolist_to_binary([integer_to_binary(V), <<" Something">>]).
| null | https://raw.githubusercontent.com/beamparticle/beamparticle/65dcea1569d06b331b08cd9f8018ece4b176b690/src/beamparticle_highperf_http_handler.erl | erlang | -------------------------------------------------------------------
@doc
@end
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
===================================================================
ranch_protocol callbacks
===================================================================
DateTimeBinary/binary,
"\", \"ip\": \"",
"}">>,
{error, bad_http_first_line}
apart from read timeout or errors, well reach here when client
disconnected
lager:debug("bad_http_first_line, E = ~p", [E]),
Content = <<"I am alive">>,
integer_to_binary(ContentLength),
<<"\r\n">>,
<<"connection: keep-alive\r\n">>,
<<"\r\n">>,
Content],
Transport:send(Socket, Response),
%% loop(Socket, Transport);
_ ->
ok = Transport:close(Socket)
end.
[Data | PartialDataList]);
ok = Transport:close(Socket)
as part of dynamic call configurations could be set,
so lets erase that before the next reuse | @author neerajsharma
( C ) 2017 , < >
Copyright < > 2017 .
All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(beamparticle_highperf_http_handler).
-behaviour(ranch_protocol).
-include("beamparticle_constants.hrl").
-export([start_link/4]).
-export([init/4]).
-define(SERVER, ?MODULE).
start_link(Ref, Socket, Transport, Opts) ->
Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
{ok, Pid}.
init(Ref, Socket, Transport, Opts) ->
ok = ranch:accept_ack(Ref),
{ok, {InetIp, InetPort}} = Transport:peername(Socket),
IpBinary = list_to_binary(inet:ntoa(InetIp)),
PortBinary = integer_to_binary(InetPort),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []).
loop(Socket, Transport, Opts, IpBinary, PortBinary, PartialDataList) ->
MaxBodyBytes = proplists:get_value(max_read_length, Opts, 5000),
MaxReadTimeoutMsec = proplists:get_value(max_read_timeout_msec, Opts, 5000),
case Transport:recv(Socket, 0, MaxReadTimeoutMsec) of
{ok, Data} when byte_size(Data) > 0 ->
Parsed = parse_http_request(Data, PartialDataList),
lager : debug("highperf transport = ~p , Data = ~p , PartialDataList = ~p " , [ , Data , PartialDataList ] ) ,
case Parsed of
{HttpMethod, <<"/post/", RestPath/binary>> = _RequestPath,
_HttpVersion, RequestHeaders, RequestBody} when
HttpMethod == <<"POST">> orelse
HttpMethod == <<"post">> ->
LowerRequestHeaders = string:lowercase(RequestHeaders),
ContentLength = request_content_length(LowerRequestHeaders),
R = process_http_post(RequestBody, RestPath,
Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength),
case R of
{ok, {loop, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []);
{ok, {close, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, close),
ok = Transport:close(Socket);
{ok, NewPartialDataList} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
NewPartialDataList);
{error, close} ->
ok = Transport:close(Socket)
end;
{HttpMethod, <<"/api/", RestPath/binary>> = _RequestPath,
_HttpVersion, RequestHeaders, RequestBody} when
HttpMethod == <<"POST">> orelse
HttpMethod == <<"post">> orelse
HttpMethod == <<"GET">> orelse
HttpMethod == <<"get">> ->
IsGet = case HttpMethod of
<<"GET">> -> true;
<<"get">> -> true;
_ -> false
end,
LowerRequestHeaders = string:lowercase(RequestHeaders),
{BodyData, ContentLength} = case IsGet of
true ->
{_FunctionName, QsParamsBin} = extract_function_and_params(RestPath),
QsParamParts = string:split(QsParamsBin, <<"&">>, all),
BodyAsTupleList =
lists:foldl(fun(<<>>, AccIn) ->
AccIn;
(E, AccIn) ->
case string:split(E, <<"=">>) of
[A] ->
[{A, <<"1">>} | AccIn];
[A, B] ->
[{A, B} | AccIn]
end
end, [], QsParamParts),
GetBody = jiffy:encode(maps:from_list(BodyAsTupleList)),
{GetBody, byte_size(GetBody)};
false ->
PostContentLength = request_content_length(LowerRequestHeaders),
{RequestBody, PostContentLength}
end,
R = process_http_post(BodyData, RestPath,
Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength),
case R of
{ok, {loop, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, []);
{ok, {close, StatusCode, Content}} ->
send_http_response(Socket, Transport, StatusCode, Content, close),
ok = Transport:close(Socket);
{ok, NewPartialDataList} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
NewPartialDataList);
{error, close} ->
ok = Transport:close(Socket)
end;
{<<"GET">>, <<"/health">>, _HttpVersion, RequestHeaders, _RequestBody} ->
{ok, DateTimeBinary} =
beamparticle_date_server:datetime(binary),
Content = < < " { \"msg\ " : \"I am alive\ " , \"datetime\ " : \ " " ,
IpBinary / binary ,
" \ " , \"port\ " : " , PortBinary / binary ,
Content = [<<"{\"msg\": \"I am alive\", \"datetime\": \"">>,
DateTimeBinary,
<<"\", \"ip\": \"">>,
IpBinary,
<<"\", \"port\": ">>,
PortBinary,
<<"}">>],
LowerRequestHeaders = string:lowercase(RequestHeaders),
case string:find(LowerRequestHeaders, <<"connection: keep-alive">>) of
nomatch ->
send_http_response(Socket, Transport, Content, close),
ok = Transport:close(Socket);
_ ->
send_http_response(Socket, Transport, Content, keep_alive),
loop(Socket, Transport, Opts, IpBinary, PortBinary, [])
end;
{_HttpMethod, _RequestPath, _HttpVersion, incomplete} ->
NewPartialDataList = [Data | PartialDataList],
TotalBytes = lists:foldl(fun(V, AccIn) ->
byte_size(V) + AccIn
end, 0, NewPartialDataList),
case TotalBytes > MaxBodyBytes of
false ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
[Data | PartialDataList]);
true ->
ok = Transport:close(Socket)
end;
{error, incomplete} ->
loop(Socket, Transport, Opts, IpBinary, PortBinary,
[Data | PartialDataList]);
_ ->
ok = Transport:close(Socket)
end;
_ ->
ok = Transport:close(Socket)
end.
parse_http_request(Data, PartialDataList) ->
NewData = case PartialDataList of
[] -> Data;
_ -> iolist_to_binary(lists:reverse([Data | PartialDataList]))
end,
use lists : flatten/1 after string : split/2 when is [ binary ( ) ] ,
but at present is formed as binary ( )
case string:split(NewData, <<"\r\n">>) of
[FirstLine, Rest] ->
TrimmedFirstLine = string:trim(FirstLine),
case string:split(TrimmedFirstLine, <<" ">>, all) of
[HttpMethod, RequestPath, HttpVersion] ->
case Rest of
<<"\r\n", RequestBody/binary>> ->
RequestHeaders = <<>>,
{HttpMethod, RequestPath, HttpVersion, RequestHeaders, RequestBody};
_ ->
case string:split(Rest, <<"\r\n\r\n">>) of
[RequestHeaders, RequestBody] ->
{HttpMethod, RequestPath, HttpVersion, RequestHeaders, RequestBody};
_ ->
{HttpMethod, RequestPath, HttpVersion, incomplete}
end
end;
_E ->
{error, bad_http_first_line}
end;
_ ->
{error, incomplete}
end.
send_http_response(Socket, Transport, Content, CxnType) ->
send_http_response(Socket, Transport, 200, Content, CxnType).
send_http_response(Socket, Transport, StatusCode, Content, keep_alive) ->
ContentLength = case is_binary(Content) of
true -> byte_size(Content);
false ->
lists:foldl(fun(X, AccIn) ->
byte_size(X) + AccIn
end, 0, Content)
end,
StatusCodeBin = http_status_code_to_binary(StatusCode),
Response = [<<"HTTP/1.1 ">>, StatusCodeBin, <<"\r\ncontent-length: ">>,
integer_to_binary(ContentLength),
<<"\r\ncontent-type: application/json\r\n">>,
<<"connection: keep-alive\r\n">>,
<<"\r\n">>,
Content],
Transport:send(Socket, Response);
send_http_response(Socket, Transport, StatusCode, Content, _) ->
ContentLength = case is_binary(Content) of
true -> byte_size(Content);
false ->
lists:foldl(fun(X, AccIn) ->
byte_size(X) + AccIn
end, 0, Content)
end,
StatusCodeBin = http_status_code_to_binary(StatusCode),
Response = [<<"HTTP/1.1 ">>, StatusCodeBin, <<"\r\ncontent-length: ">>,
integer_to_binary(ContentLength),
<<"\r\ncontent-type: application/json\r\n">>,
<<"\r\n">>,
Content],
Transport:send(Socket, Response).
minimal_loop(Socket , Transport , Opts ) - >
MaxBodyBytes = proplists : get_value(max_read_length , Opts , 5000 ) ,
case Transport : recv(Socket , 0 , MaxBodyBytes ) of
{ ok , Data } when byte_size(Data ) > 4 - >
= byte_size(Content ) ,
Response = [ < < " HTTP/1.1 200 OK\r\ncontent - length : " > > ,
ok = Transport : ) ;
request_content_length(LowerRequestHeaders) ->
case string:find(LowerRequestHeaders, <<"content-length:">>) of
<<"content-length:", RestContentLength/binary>> ->
case string:split(RestContentLength, <<"\r\n">>) of
[ContentLengthBin | _] ->
binary_to_integer(string:trim(ContentLengthBin));
_ ->
-1
end;
_ ->
0
end.
data_bytes(Data) ->
lists:foldl(fun(V, AccIn) -> byte_size(V) + AccIn end, 0, Data).
extract_function_and_params(RestPath) ->
[FunctionName | InputQsParams] = string:split(RestPath, <<"?">>),
QsParamsBin = case InputQsParams of
[] -> <<>>;
[InputQsParamBin] -> http_uri:decode(InputQsParamBin)
end,
{FunctionName, QsParamsBin}.
is_keepalive(LowerRequestHeaders) ->
case string:find(LowerRequestHeaders, <<"connection: keep-alive">>) of
nomatch -> false;
_ -> true
end.
process_http_post(RequestBody, RestPath, Data, PartialDataList, MaxBodyBytes, LowerRequestHeaders, ContentLength) ->
case ContentLength > byte_size(RequestBody) of
true ->
NewPartialDataList = [Data | PartialDataList],
TotalBytes = data_bytes(NewPartialDataList),
case TotalBytes > MaxBodyBytes of
false ->
loop(Socket , Transport , Opts , IpBinary , PortBinary ,
{ok, [Data | PartialDataList]};
true ->
{error, close}
end;
false ->
{FunctionName, QsParamsBin} = extract_function_and_params(RestPath),
QsParamParts = string:split(QsParamsBin, <<"&">>, all),
erlang:erase(?LOG_ENV_KEY),
IsAllowed = beamparticle_config:is_function_allowed(FunctionName, highperf_http_rest),
{StatusCode, Content2} = case IsAllowed of
true ->
case lists:filter(fun(E) -> E =:= <<"env=2">> end, QsParamParts) of
[] ->
erlang:put(?CALL_ENV_KEY, prod);
_ ->
erlang:put(?CALL_ENV_KEY, stage)
end,
Arguments = [RequestBody,
<<"{\"qs\": \"",
QsParamsBin/binary,
"\"}">>],
Content = beamparticle_dynamic:get_raw_result(
FunctionName, Arguments),
beamparticle_dynamic:erase_config(),
{200, Content};
false ->
{404, <<>>}
end,
case is_keepalive(LowerRequestHeaders) of
false ->
send_http_response(Socket , Transport , Content , close ) ,
ok = Transport : ) ;
{ok, {close, StatusCode, Content2}};
true ->
send_http_response(Socket , Transport , Content , keep_alive ) ,
loop(Socket , Transport , Opts , IpBinary , PortBinary , [ ] )
{ok, {loop, StatusCode, Content2}}
end
end.
http_status_code_to_binary(200) -> <<"200 OK">>;
http_status_code_to_binary(404) -> <<"404 Not Found">>;
http_status_code_to_binary(V) -> iolist_to_binary([integer_to_binary(V), <<" Something">>]).
|
c6a99af199c8733744ad6446fa03bc171a3bc70616e291254606be2842f3839a | imrehg/ypsilon | hseparator.scm | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon gtk hseparator)
(export gtk_hseparator_get_type
gtk_hseparator_new)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libgtk-x11-2.0.so.0")
(on-sunos "libgtk-x11-2.0.so.0")
(on-freebsd "libgtk-x11-2.0.so.0")
(on-openbsd "libgtk-x11-2.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libgtk-win32-2.0-0.dll")
(else
(assertion-violation #f "can not locate GTK library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
(define-syntax define-function/va_list
(syntax-rules ()
((_ ret name args)
(define name (lambda x (assertion-violation 'name "va_list argument not supported"))))))
GType gtk_hseparator_get_type ( void )
(define-function unsigned-long gtk_hseparator_get_type ())
GtkWidget * gtk_hseparator_new ( void )
(define-function void* gtk_hseparator_new ())
) ;[end]
| null | https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/sitelib/ypsilon/gtk/hseparator.scm | scheme | [end] | #!nobacktrace
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
(library (ypsilon gtk hseparator)
(export gtk_hseparator_get_type
gtk_hseparator_new)
(import (rnrs) (ypsilon ffi))
(define lib-name
(cond (on-linux "libgtk-x11-2.0.so.0")
(on-sunos "libgtk-x11-2.0.so.0")
(on-freebsd "libgtk-x11-2.0.so.0")
(on-openbsd "libgtk-x11-2.0.so.0")
(on-darwin "Gtk.framework/Gtk")
(on-windows "libgtk-win32-2.0-0.dll")
(else
(assertion-violation #f "can not locate GTK library, unknown operating system"))))
(define lib (load-shared-object lib-name))
(define-syntax define-function
(syntax-rules ()
((_ ret name args)
(define name (c-function lib lib-name ret name args)))))
(define-syntax define-function/va_list
(syntax-rules ()
((_ ret name args)
(define name (lambda x (assertion-violation 'name "va_list argument not supported"))))))
GType gtk_hseparator_get_type ( void )
(define-function unsigned-long gtk_hseparator_get_type ())
GtkWidget * gtk_hseparator_new ( void )
(define-function void* gtk_hseparator_new ())
|
cb8f84c61a3063f099dc100f9441cb11d8c1db75a6afb2b86a2ac3eab0b4f9c5 | input-output-hk/Alonzo-testnet | plutus-helloworld-bytestring.hs |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import Data.Aeson (encode)
import qualified Data.ByteString.Short as SBS
import qualified Plutus.V1.Ledger.Api as Plutus
import Plutus.V1.Ledger.Contexts
import PlutusTx.Prelude as P (BuiltinByteString)
import Cardano.PlutusExample.HelloWorldByteStringParametric (hello, helloWorldSBS, helloWorldSerialised)
main :: IO ()
main = do
args <- getArgs
let nargs = length args
let scriptname = if nargs > 1 then args!!1 else "result.plutus"
putStrLn $ "Writing output to: " ++ scriptname
writePlutusScript hello scriptname helloWorldSerialised helloWorldSBS
writePlutusScript :: P.BuiltinByteString -> FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript datum filename scriptSerial scriptSBS =
do
case Plutus.defaultCostModelParams of
Just m ->
let
pData = Plutus.toData datum
(logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS
[ pData
, Plutus.toData ()
, Plutus.toData dummyContext ]
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> print ("Ex Budget" :: String) >> print exbudget
print $ "Datum value: " <> encode (scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData pData)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
dummyContext :: ScriptContext
dummyContext = ScriptContext dummyTxInfo (Spending dummyOutRef)
where
dummyOutRef :: TxOutRef
dummyOutRef = TxOutRef (Plutus.TxId "") 0
dummyTxInfo :: TxInfo
dummyTxInfo = TxInfo
{ txInfoInputs = []
, txInfoOutputs = []
, txInfoFee = mempty
, txInfoMint = mempty
, txInfoDCert = []
, txInfoWdrl = []
, txInfoValidRange = Plutus.always
, txInfoSignatories = []
, txInfoData = []
, txInfoId = Plutus.TxId ""
}
| null | https://raw.githubusercontent.com/input-output-hk/Alonzo-testnet/ddfe8f077570c6a7530123dfa29feb171061f3db/resources/plutus-sources/plutus-helloworld/app/plutus-helloworld-bytestring.hs | haskell |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import Data.Aeson (encode)
import qualified Data.ByteString.Short as SBS
import qualified Plutus.V1.Ledger.Api as Plutus
import Plutus.V1.Ledger.Contexts
import PlutusTx.Prelude as P (BuiltinByteString)
import Cardano.PlutusExample.HelloWorldByteStringParametric (hello, helloWorldSBS, helloWorldSerialised)
main :: IO ()
main = do
args <- getArgs
let nargs = length args
let scriptname = if nargs > 1 then args!!1 else "result.plutus"
putStrLn $ "Writing output to: " ++ scriptname
writePlutusScript hello scriptname helloWorldSerialised helloWorldSBS
writePlutusScript :: P.BuiltinByteString -> FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript datum filename scriptSerial scriptSBS =
do
case Plutus.defaultCostModelParams of
Just m ->
let
pData = Plutus.toData datum
(logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS
[ pData
, Plutus.toData ()
, Plutus.toData dummyContext ]
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> print ("Ex Budget" :: String) >> print exbudget
print $ "Datum value: " <> encode (scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData pData)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
dummyContext :: ScriptContext
dummyContext = ScriptContext dummyTxInfo (Spending dummyOutRef)
where
dummyOutRef :: TxOutRef
dummyOutRef = TxOutRef (Plutus.TxId "") 0
dummyTxInfo :: TxInfo
dummyTxInfo = TxInfo
{ txInfoInputs = []
, txInfoOutputs = []
, txInfoFee = mempty
, txInfoMint = mempty
, txInfoDCert = []
, txInfoWdrl = []
, txInfoValidRange = Plutus.always
, txInfoSignatories = []
, txInfoData = []
, txInfoId = Plutus.TxId ""
}
| |
0eb6175a78863abf4a7da43cbd5e7f039e934c37c542ff50c11af4a610c4b502 | ku-fpg/haskino | HelloLawrence.hs | {-# OPTIONS_GHC -fplugin=System.Hardware.Haskino.ShallowDeepPlugin #-}
-------------------------------------------------------------------------------
-- |
Module : System . Hardware . Haskino . SamplePrograms . . ScheduledLCDE
Copyright : ( c ) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- Creates a scheduled task on the Arduino which alternates writing 'Rock',
' Chalk ' and ' ' to the LCD screen every second and a half .
Note : This example requires a Mega2560 board , as the Uno boards do not have
enough RAM .
-------------------------------------------------------------------------------
module Main where
module System . Hardware . Haskino . SamplePrograms . Rewrite . HelloLawrence where
import System.Hardware.Haskino
import System.Hardware.Haskino.Utils
import System.Hardware.Haskino.SamplePrograms.Rewrite.LCD
import Data.Boolean
import Data.Word
hitachi :: LCDController
hitachi = Hitachi44780 { lcdRS = 8
, lcdEN = 9
, lcdD4 = 4
, lcdD5 = 5
, lcdD6 = 6
, lcdD7 = 7
, lcdBL = Just 10
, lcdRows = 2
, lcdCols = 16
, dotMode5x10 = false
}
Task which will execute on Arduino , write an ' Rock ' to the display , delay a
second , write a ' Chalk ' to the display , delay a second , write a ' Jayhawk '
-- to the display and repeat
theProgram :: Arduino ()
theProgram = do
lcd <- lcdRegister hitachi
lcdBacklightOn lcd
helloLawrence lcd
helloLawrence :: LCD -> Arduino ()
helloLawrence lcd = do
helloLawrence'
where
helloLawrence' :: Arduino ()
helloLawrence' = do
lcdHome lcd
lcdWrite lcd $ litString "Rock "
delayMillis 1500
lcdHome lcd
lcdWrite lcd $ litString "Chalk "
delayMillis 1500
lcdHome lcd
lcdWrite lcd $ litString "Jayhawk"
delayMillis 1500
helloLawrence'
-- Execute this function to run program with firmware interpreter
lcdExample :: IO ()
lcdExample = withArduino False "/dev/cu.usbmodem1421" theProgram
main :: IO ()
main = compileProgram theProgram "helloLawrence.ino"
| null | https://raw.githubusercontent.com/ku-fpg/haskino/9a0709c92c2da9b9371e292b00fd076e5539eb18/legacy/Rewrite/HelloLawrence.hs | haskell | # OPTIONS_GHC -fplugin=System.Hardware.Haskino.ShallowDeepPlugin #
-----------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
Creates a scheduled task on the Arduino which alternates writing 'Rock',
-----------------------------------------------------------------------------
to the display and repeat
Execute this function to run program with firmware interpreter | Module : System . Hardware . Haskino . SamplePrograms . . ScheduledLCDE
Copyright : ( c ) University of Kansas
' Chalk ' and ' ' to the LCD screen every second and a half .
Note : This example requires a Mega2560 board , as the Uno boards do not have
enough RAM .
module Main where
module System . Hardware . Haskino . SamplePrograms . Rewrite . HelloLawrence where
import System.Hardware.Haskino
import System.Hardware.Haskino.Utils
import System.Hardware.Haskino.SamplePrograms.Rewrite.LCD
import Data.Boolean
import Data.Word
hitachi :: LCDController
hitachi = Hitachi44780 { lcdRS = 8
, lcdEN = 9
, lcdD4 = 4
, lcdD5 = 5
, lcdD6 = 6
, lcdD7 = 7
, lcdBL = Just 10
, lcdRows = 2
, lcdCols = 16
, dotMode5x10 = false
}
Task which will execute on Arduino , write an ' Rock ' to the display , delay a
second , write a ' Chalk ' to the display , delay a second , write a ' Jayhawk '
theProgram :: Arduino ()
theProgram = do
lcd <- lcdRegister hitachi
lcdBacklightOn lcd
helloLawrence lcd
helloLawrence :: LCD -> Arduino ()
helloLawrence lcd = do
helloLawrence'
where
helloLawrence' :: Arduino ()
helloLawrence' = do
lcdHome lcd
lcdWrite lcd $ litString "Rock "
delayMillis 1500
lcdHome lcd
lcdWrite lcd $ litString "Chalk "
delayMillis 1500
lcdHome lcd
lcdWrite lcd $ litString "Jayhawk"
delayMillis 1500
helloLawrence'
lcdExample :: IO ()
lcdExample = withArduino False "/dev/cu.usbmodem1421" theProgram
main :: IO ()
main = compileProgram theProgram "helloLawrence.ino"
|
e9821019bf956061139c6667e438fde0dc8fc920b48761f118dbb2cf543a4a0d | bet365/soap | soap_cowboy_protocol.erl | %%
%% %CopyrightBegin%
%%
Copyright Hillside Technology Ltd. 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%% SOAP handler for Cowboy. Takes care of the SOAP specific stuff such as
%%% decoding and encoding the xml, some error handling on the level of the soap
%%% protocol, etc.
%%%
%%% This module implements the fucntionality that is shared between cowboy
version 1.0 and 2.0 . There are separate modules for the 2 versions that
%%% act as entry point and that implement some parts that are version specific.
-module(soap_cowboy_protocol).
-export([upgrade/6]).
-record(state, {
env :: cowboy_middleware:env(),
handler = undefined :: undefined | module()
}).
-type cowboy_req() :: cowboy_req:req().
-type cowboy_env() :: cowboy_middleware:env().
%%% ============================================================================
%%% Exported functions
%%% ============================================================================
%% This is the callback of the cowboy_sub_protocol behaviour.
%%
%% This is shared between Cowboy versions, 'Version_module' contanins the
%% specifics for the version. The version specific modules are the entry
%% point, they refer to this module, and this module refers back to them
%% for some specifics.
-spec upgrade(Cowboy_req::cowboy_req(), Env::cowboy_env(),
Soap_handler::module(), {Implementation_handler::module(), Options::any()},
Version::atom(),
Version_module::module()) -> {ok, cowboy_req(), cowboy_env()}.
upgrade(Cowboy_req, Env, _, {Handler, Options}, Version, Version_module) ->
Cowboy_state = #state{env = Env, handler = Handler},
case soap_server_handler:new_req(Handler, Version, Options, Cowboy_req) of
{continue, Soap_req} ->
check_conformance(Soap_req, Cowboy_req, Cowboy_state, Version_module);
{ok, _StatusCode, _Headers, _Body, _Server_req} = Error ->
make_response(Error, Cowboy_state, Version_module)
end.
%%% ============================================================================
Internal functions
%%% ============================================================================
check_conformance(Soap_req, Cowboy_req, Cowboy_state, Version_module) ->
%% collect some information about the protocol, so that
%% conformance can be checked.
Soap_req2 = Version_module:enrich_req(Cowboy_req, Soap_req),
case soap_server_handler:check_http_conformance(Soap_req2) of
{continue, Soap_req3} ->
handle_xml(Soap_req3, Cowboy_state, Version_module);
{ok, _StatusCode, _Headers, _Body, _Server_req} = Error ->
make_response(Error, Cowboy_state, Version_module)
end.
handle_xml(Soap_req, Cowboy_state, Version_module) ->
Cowboy_req = soap_req:server_req(Soap_req),
{ok, Message, Cowboy_req2} = cowboy_req:body(Cowboy_req),
Soap_req2 = soap_req:set_server_req(Soap_req, Cowboy_req2),
Soap_req3 = soap_req:set_http_body(Soap_req2, Message),
Content_type = soap_req:content_type(Soap_req3),
%% get the soap message (Xml) from the request body
{Xml, Soap_req4} =
case maybe_content_type(Content_type) of
"multipart/related" ->
soap with attachments , the message is in the first part
try
[{Mime_headers, Body} | Attachments] =
mime_decode(Message, Content_type),
{Body,
soap_req:set_mime_headers(
soap_req:set_req_attachments(Soap_req3, Attachments),
Mime_headers)}
catch
_Class:_Type ->
{Message, Soap_req3}
end;
_ ->
{Message, Soap_req3}
end,
Handler_resp = soap_server_handler:handle_message(Xml, Soap_req4),
make_response(Handler_resp, Cowboy_state, Version_module).
maybe_content_type(undefined) ->
undefined;
maybe_content_type(Content_type) ->
string:to_lower(lists:sublist(Content_type, 17)).
mime_decode(Message, Content_type_header) ->
Mime_parameters = lists:nthtail(17, Content_type_header),
Parsed_parameters = soap_mime:parse_mime_parameters(Mime_parameters),
Boundary = proplists:get_value("boundary", Parsed_parameters),
soap_mime:decode(Message, list_to_binary(Boundary)).
make_response({ok, StatusCode, Headers, Body, Cowboy_req},
#state{env = Env, handler = Handler}, Version_module) ->
Cowboy_req2 = set_headers(Headers, Cowboy_req),
Cowboy_req3 = cowboy_req:set_resp_body(Body, Cowboy_req2),
Version_module:respond(Cowboy_req3, Env, Handler, StatusCode).
set_headers(Headers, Cowboy_req) ->
lists:foldl(fun({Name, Value}, R) ->
cowboy_req:set_resp_header(to_binary(Name), Value, R)
end,
Cowboy_req, Headers).
to_binary(N) when is_binary(N) ->
N;
to_binary(N) when is_list(N) ->
erlang:list_to_binary(N).
| null | https://raw.githubusercontent.com/bet365/soap/856b5c418d8d40a6b5bcbbe3fd390c6a0b8d4f18/src/soap_cowboy_protocol.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
SOAP handler for Cowboy. Takes care of the SOAP specific stuff such as
decoding and encoding the xml, some error handling on the level of the soap
protocol, etc.
This module implements the fucntionality that is shared between cowboy
act as entry point and that implement some parts that are version specific.
============================================================================
Exported functions
============================================================================
This is the callback of the cowboy_sub_protocol behaviour.
This is shared between Cowboy versions, 'Version_module' contanins the
specifics for the version. The version specific modules are the entry
point, they refer to this module, and this module refers back to them
for some specifics.
============================================================================
============================================================================
collect some information about the protocol, so that
conformance can be checked.
get the soap message (Xml) from the request body | Copyright Hillside Technology Ltd. 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
version 1.0 and 2.0 . There are separate modules for the 2 versions that
-module(soap_cowboy_protocol).
-export([upgrade/6]).
-record(state, {
env :: cowboy_middleware:env(),
handler = undefined :: undefined | module()
}).
-type cowboy_req() :: cowboy_req:req().
-type cowboy_env() :: cowboy_middleware:env().
-spec upgrade(Cowboy_req::cowboy_req(), Env::cowboy_env(),
Soap_handler::module(), {Implementation_handler::module(), Options::any()},
Version::atom(),
Version_module::module()) -> {ok, cowboy_req(), cowboy_env()}.
upgrade(Cowboy_req, Env, _, {Handler, Options}, Version, Version_module) ->
Cowboy_state = #state{env = Env, handler = Handler},
case soap_server_handler:new_req(Handler, Version, Options, Cowboy_req) of
{continue, Soap_req} ->
check_conformance(Soap_req, Cowboy_req, Cowboy_state, Version_module);
{ok, _StatusCode, _Headers, _Body, _Server_req} = Error ->
make_response(Error, Cowboy_state, Version_module)
end.
Internal functions
check_conformance(Soap_req, Cowboy_req, Cowboy_state, Version_module) ->
Soap_req2 = Version_module:enrich_req(Cowboy_req, Soap_req),
case soap_server_handler:check_http_conformance(Soap_req2) of
{continue, Soap_req3} ->
handle_xml(Soap_req3, Cowboy_state, Version_module);
{ok, _StatusCode, _Headers, _Body, _Server_req} = Error ->
make_response(Error, Cowboy_state, Version_module)
end.
handle_xml(Soap_req, Cowboy_state, Version_module) ->
Cowboy_req = soap_req:server_req(Soap_req),
{ok, Message, Cowboy_req2} = cowboy_req:body(Cowboy_req),
Soap_req2 = soap_req:set_server_req(Soap_req, Cowboy_req2),
Soap_req3 = soap_req:set_http_body(Soap_req2, Message),
Content_type = soap_req:content_type(Soap_req3),
{Xml, Soap_req4} =
case maybe_content_type(Content_type) of
"multipart/related" ->
soap with attachments , the message is in the first part
try
[{Mime_headers, Body} | Attachments] =
mime_decode(Message, Content_type),
{Body,
soap_req:set_mime_headers(
soap_req:set_req_attachments(Soap_req3, Attachments),
Mime_headers)}
catch
_Class:_Type ->
{Message, Soap_req3}
end;
_ ->
{Message, Soap_req3}
end,
Handler_resp = soap_server_handler:handle_message(Xml, Soap_req4),
make_response(Handler_resp, Cowboy_state, Version_module).
maybe_content_type(undefined) ->
undefined;
maybe_content_type(Content_type) ->
string:to_lower(lists:sublist(Content_type, 17)).
mime_decode(Message, Content_type_header) ->
Mime_parameters = lists:nthtail(17, Content_type_header),
Parsed_parameters = soap_mime:parse_mime_parameters(Mime_parameters),
Boundary = proplists:get_value("boundary", Parsed_parameters),
soap_mime:decode(Message, list_to_binary(Boundary)).
make_response({ok, StatusCode, Headers, Body, Cowboy_req},
#state{env = Env, handler = Handler}, Version_module) ->
Cowboy_req2 = set_headers(Headers, Cowboy_req),
Cowboy_req3 = cowboy_req:set_resp_body(Body, Cowboy_req2),
Version_module:respond(Cowboy_req3, Env, Handler, StatusCode).
set_headers(Headers, Cowboy_req) ->
lists:foldl(fun({Name, Value}, R) ->
cowboy_req:set_resp_header(to_binary(Name), Value, R)
end,
Cowboy_req, Headers).
to_binary(N) when is_binary(N) ->
N;
to_binary(N) when is_list(N) ->
erlang:list_to_binary(N).
|
bbc4f3435de299c4209fdf6c063e44a859e9e105c8dd8898030a5c6d808a33bd | informatimago/lisp | thread.lisp | -*- mode : lisp;coding : utf-8 -*-
;;;;**************************************************************************
FILE : thread.lisp
;;;;LANGUAGE: Common-Lisp
;;;;SYSTEM: Common-Lisp
USER - INTERFACE :
;;;;DESCRIPTION
;;;;
;;;; Exports threads manipulation commands.
;;;;
< PJB > < >
MODIFICATIONS
2015 - 02 - 23 < PJB > Added this header .
;;;;LEGAL
AGPL3
;;;;
Copyright 2015 - 2016
;;;;
;;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; This program is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
;;;;
You should have received a copy of the GNU Affero General Public License
;;;; along with this program. If not, see </>.
;;;;**************************************************************************
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.TOOLS.THREAD"
(:use "COMMON-LISP"
"BORDEAUX-THREADS")
(:export "LIST-THREADS" "KILL-THREAD" "KILL-THREADS"
"PERIODICALLY" "DO-PERIODICALLY" "DONE"))
(in-package "COM.INFORMATIMAGO.TOOLS.THREAD")
(defun periodically (period thunk &key (name "Peridic Task") initially finally)
(bt:make-thread (lambda ()
(when initially (funcall initially))
(catch :exit-periodically
(loop (sleep period) (funcall thunk)))
(when finally (funcall finally)))
:name name
:initial-bindings (list (cons '*standard-output* *standard-output*)
(cons '*standard-input* *standard-input*)
(cons '*error-output* *error-output*)
(cons '*trace-output* *trace-output*)
(cons '*terminal-io* *terminal-io*))))
(defmacro do-periodically ((period &key (name "Periodic Task") initially finally)
&body body)
`(periodically ,period (flet ((done () (throw :exit-periodically nil)))
(lambda () ,@body))
:name ,name
:initially (lambda () ,initially)
:finally (lambda () ,finally)))
(defun list-threads (&key (threads (bt:all-threads)) (stream *standard-output*))
(loop
:named menu
:for i :from 1
:for thread :in threads
:do (format stream "~&~2D) ~A~%" i thread))
(values))
(defun kill-threads (&optional (*query-io* *query-io*))
(loop :while (kill-thread)))
(defun kill-thread (&optional thread (*query-io* *query-io*))
(if thread
(progn
(bt:destroy-thread thread)
t)
(loop
:named select
:do (let ((threads (bt:all-threads)))
(list-threads :threads threads :stream *query-io*)
(format *query-io* "~&Number of thread to kill (or 0 to abort): ")
(let ((choice (let ((*read-eval* nil)) (read *query-io*))))
(cond
((not (integerp choice)))
((zerop choice)
(format *query-io* "~&Aborted.~%")
(return-from select nil))
((<= 1 choice (length threads))
(bt:destroy-thread (nth (1- choice) threads))
(return-from select t)))
(format *query-io* "~&Invalid answer, try again.~%"))))))
( loop : repeat 3 : do ( bt : make - thread ( lambda ( ) ( sleep ) ) ) )
;; (kill-thread)
;;;; THE END ;;;;
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/tools/thread.lisp | lisp | coding : utf-8 -*-
**************************************************************************
LANGUAGE: Common-Lisp
SYSTEM: Common-Lisp
DESCRIPTION
Exports threads manipulation commands.
LEGAL
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
**************************************************************************
(kill-thread)
THE END ;;;; | FILE : thread.lisp
USER - INTERFACE :
< PJB > < >
MODIFICATIONS
2015 - 02 - 23 < PJB > Added this header .
AGPL3
Copyright 2015 - 2016
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
(eval-when (:compile-toplevel :load-toplevel :execute)
(setf *readtable* (copy-readtable nil)))
(defpackage "COM.INFORMATIMAGO.TOOLS.THREAD"
(:use "COMMON-LISP"
"BORDEAUX-THREADS")
(:export "LIST-THREADS" "KILL-THREAD" "KILL-THREADS"
"PERIODICALLY" "DO-PERIODICALLY" "DONE"))
(in-package "COM.INFORMATIMAGO.TOOLS.THREAD")
(defun periodically (period thunk &key (name "Peridic Task") initially finally)
(bt:make-thread (lambda ()
(when initially (funcall initially))
(catch :exit-periodically
(loop (sleep period) (funcall thunk)))
(when finally (funcall finally)))
:name name
:initial-bindings (list (cons '*standard-output* *standard-output*)
(cons '*standard-input* *standard-input*)
(cons '*error-output* *error-output*)
(cons '*trace-output* *trace-output*)
(cons '*terminal-io* *terminal-io*))))
(defmacro do-periodically ((period &key (name "Periodic Task") initially finally)
&body body)
`(periodically ,period (flet ((done () (throw :exit-periodically nil)))
(lambda () ,@body))
:name ,name
:initially (lambda () ,initially)
:finally (lambda () ,finally)))
(defun list-threads (&key (threads (bt:all-threads)) (stream *standard-output*))
(loop
:named menu
:for i :from 1
:for thread :in threads
:do (format stream "~&~2D) ~A~%" i thread))
(values))
(defun kill-threads (&optional (*query-io* *query-io*))
(loop :while (kill-thread)))
(defun kill-thread (&optional thread (*query-io* *query-io*))
(if thread
(progn
(bt:destroy-thread thread)
t)
(loop
:named select
:do (let ((threads (bt:all-threads)))
(list-threads :threads threads :stream *query-io*)
(format *query-io* "~&Number of thread to kill (or 0 to abort): ")
(let ((choice (let ((*read-eval* nil)) (read *query-io*))))
(cond
((not (integerp choice)))
((zerop choice)
(format *query-io* "~&Aborted.~%")
(return-from select nil))
((<= 1 choice (length threads))
(bt:destroy-thread (nth (1- choice) threads))
(return-from select t)))
(format *query-io* "~&Invalid answer, try again.~%"))))))
( loop : repeat 3 : do ( bt : make - thread ( lambda ( ) ( sleep ) ) ) )
|
70e25e26b987a75540b77a7fc944bd9dd6cbb00e96baa746e2e1ddab6bf22044 | modular-macros/ocaml-macros | t310-alloc-2.ml | open Lib;;
let v = Array.make 200000 2 in
let t = ref 0 in
Array.iter (fun x -> t := !t + x) v;
if !t <> 400000 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL " List.nth "
1246 PUSHGETGLOBALFIELD Pervasives , 2
1249 APPTERM1 3
1251 GETGLOBAL " nth "
1253 PUSHGETGLOBALFIELD Pervasives , 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives , 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL " List.map2 "
1409 PUSHGETGLOBALFIELD Pervasives , 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL " List.iter2 "
1446 PUSHGETGLOBALFIELD Pervasives , 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4 , 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL " List.fold_left2 "
1484 PUSHGETGLOBALFIELD Pervasives , 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL " List.fold_right2 "
1524 PUSHGETGLOBALFIELD Pervasives , 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 " List.for_all2 "
1607 PUSHGETGLOBALFIELD Pervasives , 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 " List.exists2 "
1648 PUSHGETGLOBALFIELD Pervasives , 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2 , 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 < 0>(0 , 0 )
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 " List.combine "
2005 PUSHGETGLOBALFIELD Pervasives , 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2 , 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL " List.rev_map2 "
2086 PUSHGETGLOBALFIELD Pervasives , 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1 , 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1 , 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL " tl "
2153 PUSHGETGLOBALFIELD Pervasives , 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL " hd "
2167 PUSHGETGLOBALFIELD Pervasives , 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0 , 1200
2181 ACC0
2182 CLOSURE 1 , 2172
2185 PUSH
2186 CLOSURE 0 , 2158
2189 PUSH
2190 CLOSURE 0 , 2144
2193 PUSH
2194 CLOSUREREC 0 , 1217
2198 GETGLOBALFIELD Pervasives , 16
2201 PUSH
2202 CLOSUREREC 0 , 1259
2206 ACC0
2207 CLOSURE 1 , 2139
2210 PUSH
2211 CLOSUREREC 0 , 1277
2215 CLOSUREREC 0 , 1294
2219 CLOSURE 0 , 2127
2222 PUSH
2223 CLOSUREREC 0 , 1316
2227 CLOSUREREC 0 , 1334
2231 CLOSUREREC 0 , 1354
2235 CLOSUREREC 0 , 1374
2239 CLOSURE 0 , 2092
2242 PUSH
2243 CLOSUREREC 0 , 1415
2247 CLOSUREREC 0 , 1452
2251 CLOSUREREC 0 , 1490
2255 CLOSUREREC 0 , 1530
2259 CLOSUREREC 0 , 1553
2263 CLOSUREREC 0 , 1573
2267 CLOSUREREC 0 , 1613
2271 CLOSUREREC 0 , 1654
2275 CLOSUREREC 0 , 1675
2279 CLOSUREREC 0 , 1695
2283 CLOSUREREC 0 , 1725
2287 CLOSUREREC 0 , 1754
2291 CLOSUREREC 0 , 1776
2295 CLOSUREREC 0 , 1797
2299 CLOSUREREC 0 , 1828
2303 CLOSUREREC 0 , 1858
2307 ACC 24
2309 CLOSURE 1 , 2042
2312 PUSHACC 25
2314 CLOSUREREC 1 , 1928
2318 CLOSUREREC 0 , 1942
2322 CLOSUREREC 0 , 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37 , 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 3341
2406 RESTART
2407 GRAB 2
2409 ACC2
2410 PUSHACC2
2411 VECTLENGTH
2412 OFFSETINT -1
2414 PUSHCONST0
2415 PUSH
2416 BRANCH 2433
2418 CHECK_SIGNALS
2419 ACC2
2420 PUSHACC2
2421 PUSHACC6
2422 C_CALL2 array_unsafe_get
2424 PUSHACC5
2425 APPLY2
2426 ASSIGN 2
2428 ACC1
2429 OFFSETINT -1
2431 ASSIGN 1
2433 ACC0
2434 PUSHACC2
2435 GEINT
2436 BRANCHIF 2418
2438 CONST0
2439 POP 2
2441 ACC0
2442 RETURN 4
2444 RESTART
2445 GRAB 2
2447 ACC1
2448 PUSHCONST0
2449 PUSHACC4
2450 VECTLENGTH
2451 OFFSETINT -1
2453 PUSH
2454 BRANCH 2471
2456 CHECK_SIGNALS
2457 ACC1
2458 PUSHACC6
2459 C_CALL2 array_unsafe_get
2461 PUSHACC3
2462 PUSHACC5
2463 APPLY2
2464 ASSIGN 2
2466 ACC1
2467 OFFSETINT 1
2469 ASSIGN 1
2471 ACC0
2472 PUSHACC2
2473 LEINT
2474 BRANCHIF 2456
2476 CONST0
2477 POP 2
2479 ACC0
2480 RETURN 4
2482 RESTART
2483 GRAB 1
2485 ACC1
2486 BRANCHIFNOT 2502
2488 ACC1
2489 GETFIELD0
2490 PUSHACC1
2491 PUSHENVACC1
2492 C_CALL3 array_unsafe_set
2494 ACC1
2495 GETFIELD1
2496 PUSHACC1
2497 OFFSETINT 1
2499 PUSHOFFSETCLOSURE0
2500 APPTERM2 4
2502 ENVACC1
2503 RETURN 2
2505 ACC0
2506 BRANCHIFNOT 2531
2508 ACC0
2509 GETFIELD1
2510 PUSHACC1
2511 GETFIELD0
2512 PUSHACC1
2513 PUSHGETGLOBALFIELD List , 0
2516 APPLY1
2517 OFFSETINT 1
2519 C_CALL2 make_vect
2521 PUSHACC0
2522 CLOSUREREC 1 , 2483
2526 ACC2
2527 PUSHCONST1
2528 PUSHACC2
2529 APPTERM2 6
2531 ATOM0
2532 RETURN 1
2534 RESTART
2535 GRAB 1
2537 CONST0
2538 PUSHACC1
2539 LTINT
2540 BRANCHIFNOT 2545
2542 ACC1
2543 RETURN 2
2545 ACC1
2546 PUSHACC1
2547 PUSHENVACC1
2548 C_CALL2 array_unsafe_get
2550 MAKEBLOCK2 0
2552 PUSHACC1
2553 OFFSETINT -1
2555 PUSHOFFSETCLOSURE0
2556 APPTERM2 4
2558 ACC0
2559 CLOSUREREC 1 , 2535
2563 CONST0
2564 PUSHACC2
2565 VECTLENGTH
2566 OFFSETINT -1
2568 PUSHACC2
2569 APPTERM2 4
2571 RESTART
2572 GRAB 1
2574 ACC1
2575 VECTLENGTH
2576 PUSHCONST0
2577 PUSHACC1
2578 EQ
2579 BRANCHIFNOT 2584
2581 ATOM0
2582 RETURN 3
2584 CONST0
2585 PUSHACC3
2586 C_CALL2 array_unsafe_get
2588 PUSHCONST0
2589 PUSHACC3
2590 APPLY2
2591 PUSHACC1
2592 C_CALL2 make_vect
2594 PUSHCONST1
2595 PUSHACC2
2596 OFFSETINT -1
2598 PUSH
2599 BRANCH 2618
2601 CHECK_SIGNALS
2602 ACC1
2603 PUSHACC6
2604 C_CALL2 array_unsafe_get
2606 PUSHACC2
2607 PUSHACC6
2608 APPLY2
2609 PUSHACC2
2610 PUSHACC4
2611 C_CALL3 array_unsafe_set
2613 ACC1
2614 OFFSETINT 1
2616 ASSIGN 1
2618 ACC0
2619 PUSHACC2
2620 LEINT
2621 BRANCHIF 2601
2623 CONST0
2624 POP 2
2626 ACC0
2627 RETURN 4
2629 RESTART
2630 GRAB 1
2632 CONST0
2633 PUSHACC2
2634 VECTLENGTH
2635 OFFSETINT -1
2637 PUSH
2638 BRANCH 2653
2640 CHECK_SIGNALS
2641 ACC1
2642 PUSHACC4
2643 C_CALL2 array_unsafe_get
2645 PUSHACC2
2646 PUSHACC4
2647 APPLY2
2648 ACC1
2649 OFFSETINT 1
2651 ASSIGN 1
2653 ACC0
2654 PUSHACC2
2655 LEINT
2656 BRANCHIF 2640
2658 CONST0
2659 RETURN 4
2661 RESTART
2662 GRAB 1
2664 ACC1
2665 VECTLENGTH
2666 PUSHCONST0
2667 PUSHACC1
2668 EQ
2669 BRANCHIFNOT 2674
2671 ATOM0
2672 RETURN 3
2674 CONST0
2675 PUSHACC3
2676 C_CALL2 array_unsafe_get
2678 PUSHACC2
2679 APPLY1
2680 PUSHACC1
2681 C_CALL2 make_vect
2683 PUSHCONST1
2684 PUSHACC2
2685 OFFSETINT -1
2687 PUSH
2688 BRANCH 2706
2690 CHECK_SIGNALS
2691 ACC1
2692 PUSHACC6
2693 C_CALL2 array_unsafe_get
2695 PUSHACC5
2696 APPLY1
2697 PUSHACC2
2698 PUSHACC4
2699 C_CALL3 array_unsafe_set
2701 ACC1
2702 OFFSETINT 1
2704 ASSIGN 1
2706 ACC0
2707 PUSHACC2
2708 LEINT
2709 BRANCHIF 2690
2711 CONST0
2712 POP 2
2714 ACC0
2715 RETURN 4
2717 RESTART
2718 GRAB 1
2720 CONST0
2721 PUSHACC2
2722 VECTLENGTH
2723 OFFSETINT -1
2725 PUSH
2726 BRANCH 2740
2728 CHECK_SIGNALS
2729 ACC1
2730 PUSHACC4
2731 C_CALL2 array_unsafe_get
2733 PUSHACC3
2734 APPLY1
2735 ACC1
2736 OFFSETINT 1
2738 ASSIGN 1
2740 ACC0
2741 PUSHACC2
2742 LEINT
2743 BRANCHIF 2728
2745 CONST0
2746 RETURN 4
2748 RESTART
2749 GRAB 4
2751 CONST0
2752 PUSHACC5
2753 LTINT
2754 BRANCHIF 2782
2756 CONST0
2757 PUSHACC2
2758 LTINT
2759 BRANCHIF 2782
2761 ACC0
2762 VECTLENGTH
2763 PUSHACC5
2764 PUSHACC3
2765 ADDINT
2766 GTINT
2767 BRANCHIF 2782
2769 CONST0
2770 PUSHACC4
2771 LTINT
2772 BRANCHIF 2782
2774 ACC2
2775 VECTLENGTH
2776 PUSHACC5
2777 PUSHACC5
2778 ADDINT
2779 GTINT
2780 BRANCHIFNOT 2789
2782 GETGLOBAL " Array.blit "
2784 PUSHGETGLOBALFIELD Pervasives , 2
2787 APPTERM1 6
2789 ACC3
2790 PUSHACC2
2791 LTINT
2792 BRANCHIFNOT 2827
2794 ACC4
2795 OFFSETINT -1
2797 PUSHCONST0
2798 PUSH
2799 BRANCH 2819
2801 CHECK_SIGNALS
2802 ACC1
2803 PUSHACC4
2804 ADDINT
2805 PUSHACC3
2806 C_CALL2 array_unsafe_get
2808 PUSHACC2
2809 PUSHACC7
2810 ADDINT
2811 PUSHACC6
2812 C_CALL3 array_unsafe_set
2814 ACC1
2815 OFFSETINT -1
2817 ASSIGN 1
2819 ACC0
2820 PUSHACC2
2821 GEINT
2822 BRANCHIF 2801
2824 CONST0
2825 RETURN 7
2827 CONST0
2828 PUSHACC5
2829 OFFSETINT -1
2831 PUSH
2832 BRANCH 2852
2834 CHECK_SIGNALS
2835 ACC1
2836 PUSHACC4
2837 ADDINT
2838 PUSHACC3
2839 C_CALL2 array_unsafe_get
2841 PUSHACC2
2842 PUSHACC7
2843 ADDINT
2844 PUSHACC6
2845 C_CALL3 array_unsafe_set
2847 ACC1
2848 OFFSETINT 1
2850 ASSIGN 1
2852 ACC0
2853 PUSHACC2
2854 LEINT
2855 BRANCHIF 2834
2857 CONST0
2858 RETURN 7
2860 RESTART
2861 GRAB 3
2863 CONST0
2864 PUSHACC2
2865 LTINT
2866 BRANCHIF 2881
2868 CONST0
2869 PUSHACC3
2870 LTINT
2871 BRANCHIF 2881
2873 ACC0
2874 VECTLENGTH
2875 PUSHACC3
2876 PUSHACC3
2877 ADDINT
2878 GTINT
2879 BRANCHIFNOT 2888
2881 GETGLOBAL " Array.fill "
2883 PUSHGETGLOBALFIELD Pervasives , 2
2886 APPTERM1 5
2888 ACC1
2889 PUSHACC3
2890 PUSHACC3
2891 ADDINT
2892 OFFSETINT -1
2894 PUSH
2895 BRANCH 2908
2897 CHECK_SIGNALS
2898 ACC5
2899 PUSHACC2
2900 PUSHACC4
2901 C_CALL3 array_unsafe_set
2903 ACC1
2904 OFFSETINT 1
2906 ASSIGN 1
2908 ACC0
2909 PUSHACC2
2910 LEINT
2911 BRANCHIF 2897
2913 CONST0
2914 RETURN 6
2916 RESTART
2917 GRAB 2
2919 CONST0
2920 PUSHACC2
2921 LTINT
2922 BRANCHIF 2937
2924 CONST0
2925 PUSHACC3
2926 LTINT
2927 BRANCHIF 2937
2929 ACC0
2930 VECTLENGTH
2931 PUSHACC3
2932 PUSHACC3
2933 ADDINT
2934 GTINT
2935 BRANCHIFNOT 2944
2937 GETGLOBAL " Array.sub "
2939 PUSHGETGLOBALFIELD Pervasives , 2
2942 APPTERM1 4
2944 CONST0
2945 PUSHACC3
2946 EQ
2947 BRANCHIFNOT 2952
2949 ATOM0
2950 RETURN 3
2952 ACC1
2953 PUSHACC1
2954 C_CALL2 array_unsafe_get
2956 PUSHACC3
2957 C_CALL2 make_vect
2959 PUSHCONST1
2960 PUSHACC4
2961 OFFSETINT -1
2963 PUSH
2964 BRANCH 2982
2966 CHECK_SIGNALS
2967 ACC1
2968 PUSHACC5
2969 ADDINT
2970 PUSHACC4
2971 C_CALL2 array_unsafe_get
2973 PUSHACC2
2974 PUSHACC4
2975 C_CALL3 array_unsafe_set
2977 ACC1
2978 OFFSETINT 1
2980 ASSIGN 1
2982 ACC0
2983 PUSHACC2
2984 LEINT
2985 BRANCHIF 2966
2987 CONST0
2988 POP 2
2990 ACC0
2991 RETURN 4
2993 ACC0
2994 BRANCHIFNOT 3017
2996 ACC0
2997 GETFIELD0
2998 PUSHCONST0
2999 PUSHACC1
3000 VECTLENGTH
3001 GTINT
3002 BRANCHIFNOT 3012
3004 ENVACC2
3005 PUSHCONST0
3006 PUSHACC2
3007 C_CALL2 array_unsafe_get
3009 PUSHENVACC1
3010 APPTERM2 4
3012 ACC1
3013 GETFIELD1
3014 PUSHOFFSETCLOSURE0
3015 APPTERM1 3
3017 ATOM0
3018 RETURN 1
3020 ACC0
3021 PUSHENVACC1
3022 CLOSUREREC 2 , 2993
3026 ACC1
3027 PUSHACC1
3028 APPTERM1 3
3030 CONST0
3031 PUSHACC1
3032 VECTLENGTH
3033 OFFSETINT -1
3035 PUSH
3036 BRANCH 3056
3038 CHECK_SIGNALS
3039 ACC1
3040 PUSHACC3
3041 C_CALL2 array_unsafe_get
3043 PUSHENVACC2
3044 GETFIELD0
3045 PUSHENVACC1
3046 C_CALL3 array_unsafe_set
3048 ENVACC2
3049 OFFSETREF 1
3051 ACC1
3052 OFFSETINT 1
3054 ASSIGN 1
3056 ACC0
3057 PUSHACC2
3058 LEINT
3059 BRANCHIF 3038
3061 CONST0
3062 RETURN 3
3064 RESTART
3065 GRAB 1
3067 ACC1
3068 VECTLENGTH
3069 PUSHACC1
3070 ADDINT
3071 RETURN 2
3073 RESTART
3074 GRAB 1
3076 ACC1
3077 PUSHCONST0
3078 PUSH
3079 CLOSURE 0 , 3065
3082 PUSHGETGLOBALFIELD List , 12
3085 APPLY3
3086 PUSHACC1
3087 PUSHACC1
3088 C_CALL2 make_vect
3090 PUSHCONST0
3091 MAKEBLOCK1 0
3093 PUSHACC4
3094 PUSHACC1
3095 PUSHACC3
3096 CLOSURE 2 , 3030
3099 PUSHGETGLOBALFIELD List , 9
3102 APPLY2
3103 ACC1
3104 RETURN 5
3106 RESTART
3107 GRAB 1
3109 ACC0
3110 VECTLENGTH
3111 PUSHACC2
3112 VECTLENGTH
3113 PUSHCONST0
3114 PUSHACC2
3115 EQ
3116 BRANCHIFNOT 3126
3118 CONST0
3119 PUSHACC1
3120 EQ
3121 BRANCHIFNOT 3126
3123 ATOM0
3124 RETURN 4
3126 CONST0
3127 PUSHCONST0
3128 PUSHACC3
3129 GTINT
3130 BRANCHIFNOT 3135
3132 ACC3
3133 BRANCH 3136
3135 ACC4
3136 C_CALL2 array_unsafe_get
3138 PUSHACC1
3139 PUSHACC3
3140 ADDINT
3141 C_CALL2 make_vect
3143 PUSHCONST0
3144 PUSHACC3
3145 OFFSETINT -1
3147 PUSH
3148 BRANCH 3164
3150 CHECK_SIGNALS
3151 ACC1
3152 PUSHACC6
3153 C_CALL2 array_unsafe_get
3155 PUSHACC2
3156 PUSHACC4
3157 C_CALL3 array_unsafe_set
3159 ACC1
3160 OFFSETINT 1
3162 ASSIGN 1
3164 ACC0
3165 PUSHACC2
3166 LEINT
3167 BRANCHIF 3150
3169 CONST0
3170 POP 2
3172 CONST0
3173 PUSHACC2
3174 OFFSETINT -1
3176 PUSH
3177 BRANCH 3195
3179 CHECK_SIGNALS
3180 ACC1
3181 PUSHACC7
3182 C_CALL2 array_unsafe_get
3184 PUSHACC5
3185 PUSHACC3
3186 ADDINT
3187 PUSHACC4
3188 C_CALL3 array_unsafe_set
3190 ACC1
3191 OFFSETINT 1
3193 ASSIGN 1
3195 ACC0
3196 PUSHACC2
3197 LEINT
3198 BRANCHIF 3179
3200 CONST0
3201 POP 2
3203 ACC0
3204 RETURN 5
3206 ACC0
3207 VECTLENGTH
3208 PUSHCONST0
3209 PUSHACC1
3210 EQ
3211 BRANCHIFNOT 3216
3213 ATOM0
3214 RETURN 2
3216 CONST0
3217 PUSHACC2
3218 C_CALL2 array_unsafe_get
3220 PUSHACC1
3221 C_CALL2 make_vect
3223 PUSHCONST1
3224 PUSHACC2
3225 OFFSETINT -1
3227 PUSH
3228 BRANCH 3244
3230 CHECK_SIGNALS
3231 ACC1
3232 PUSHACC5
3233 C_CALL2 array_unsafe_get
3235 PUSHACC2
3236 PUSHACC4
3237 C_CALL3 array_unsafe_set
3239 ACC1
3240 OFFSETINT 1
3242 ASSIGN 1
3244 ACC0
3245 PUSHACC2
3246 LEINT
3247 BRANCHIF 3230
3249 CONST0
3250 POP 2
3252 ACC0
3253 RETURN 3
3255 RESTART
3256 GRAB 2
3258 ATOM0
3259 PUSHACC1
3260 C_CALL2 make_vect
3262 PUSHCONST0
3263 PUSHACC2
3264 OFFSETINT -1
3266 PUSH
3267 BRANCH 3282
3269 CHECK_SIGNALS
3270 ACC5
3271 PUSHACC5
3272 C_CALL2 make_vect
3274 PUSHACC2
3275 PUSHACC4
3276 SETVECTITEM
3277 ACC1
3278 OFFSETINT 1
3280 ASSIGN 1
3282 ACC0
3283 PUSHACC2
3284 LEINT
3285 BRANCHIF 3269
3287 CONST0
3288 POP 2
3290 ACC0
3291 RETURN 4
3293 RESTART
3294 GRAB 1
3296 CONST0
3297 PUSHACC1
3298 EQ
3299 BRANCHIFNOT 3304
3301 ATOM0
3302 RETURN 2
3304 CONST0
3305 PUSHACC2
3306 APPLY1
3307 PUSHACC1
3308 C_CALL2 make_vect
3310 PUSHCONST1
3311 PUSHACC2
3312 OFFSETINT -1
3314 PUSH
3315 BRANCH 3330
3317 CHECK_SIGNALS
3318 ACC1
3319 PUSHACC5
3320 APPLY1
3321 PUSHACC2
3322 PUSHACC4
3323 C_CALL3 array_unsafe_set
3325 ACC1
3326 OFFSETINT 1
3328 ASSIGN 1
3330 ACC0
3331 PUSHACC2
3332 LEINT
3333 BRANCHIF 3317
3335 CONST0
3336 POP 2
3338 ACC0
3339 RETURN 3
3341 CLOSURE 0 , 3294
3344 PUSH
3345 CLOSURE 0 , 3256
3348 PUSH
3349 CLOSURE 0 , 3206
3352 PUSH
3353 CLOSURE 0 , 3107
3356 PUSH
3357 CLOSURE 0 , 3074
3360 PUSHACC0
3361 CLOSURE 1 , 3020
3364 PUSH
3365 CLOSURE 0 , 2917
3368 PUSH
3369 CLOSURE 0 , 2861
3372 PUSH
3373 CLOSURE 0 , 2749
3376 PUSH
3377 CLOSURE 0 , 2718
3380 PUSH
3381 CLOSURE 0 , 2662
3384 PUSH
3385 CLOSURE 0 , 2630
3388 PUSH
3389 CLOSURE 0 , 2572
3392 PUSH
3393 CLOSURE 0 , 2558
3396 PUSH
3397 CLOSURE 0 , 2505
3400 PUSH
3401 CLOSURE 0 , 2445
3404 PUSH
3405 CLOSURE 0 , 2407
3408 PUSHACC0
3409 PUSHACC2
3410 PUSHACC6
3411 PUSHACC 8
3413 PUSHACC 10
3415 PUSHACC 12
3417 PUSHACC 8
3419 PUSHACC 10
3421 PUSHACC 16
3423 PUSHACC 18
3425 PUSHACC 24
3427 PUSHACC 21
3429 PUSHACC 23
3431 PUSHACC 26
3433 PUSHACC 29
3435 PUSHACC 30
3437 PUSHACC 32
3439 MAKEBLOCK 17 , 0
3442 POP 17
3444 SETGLOBAL Array
3446 BRANCH 3456
3448 ACC0
3449 PUSHENVACC1
3450 GETFIELD0
3451 ADDINT
3452 PUSHENVACC1
3453 SETFIELD0
3454 RETURN 1
3456 CONST2
3457 PUSHCONSTINT 200000
3459 C_CALL2 make_vect
3461 PUSHCONST0
3462 MAKEBLOCK1 0
3464 PUSHACC1
3465 PUSHACC1
3466 CLOSURE 1 , 3448
3469 PUSHGETGLOBALFIELD Array , 11
3472 APPLY2
3473 CONSTINT 400000
3475 PUSHACC1
3476 GETFIELD0
3477 NEQ
3478 BRANCHIFNOT 3485
3480 GETGLOBAL Not_found
3482 MAKEBLOCK1 0
3484 RAISE
3485 POP 2
3487 ATOM0
3488 SETGLOBAL T310 - alloc-2
3490 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL "List.nth"
1246 PUSHGETGLOBALFIELD Pervasives, 2
1249 APPTERM1 3
1251 GETGLOBAL "nth"
1253 PUSHGETGLOBALFIELD Pervasives, 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives, 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL "List.map2"
1409 PUSHGETGLOBALFIELD Pervasives, 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL "List.iter2"
1446 PUSHGETGLOBALFIELD Pervasives, 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 BRANCHIFNOT 1482
1460 ACC3
1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4, 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL "List.fold_left2"
1484 PUSHGETGLOBALFIELD Pervasives, 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL "List.fold_right2"
1524 PUSHGETGLOBALFIELD Pervasives, 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 GETGLOBAL "List.for_all2"
1607 PUSHGETGLOBALFIELD Pervasives, 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 GETGLOBAL "List.exists2"
1648 PUSHGETGLOBALFIELD Pervasives, 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2, 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 GETGLOBAL <0>(0, 0)
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 GETGLOBAL "List.combine"
2005 PUSHGETGLOBALFIELD Pervasives, 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2, 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL "List.rev_map2"
2086 PUSHGETGLOBALFIELD Pervasives, 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1, 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1, 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL "tl"
2153 PUSHGETGLOBALFIELD Pervasives, 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL "hd"
2167 PUSHGETGLOBALFIELD Pervasives, 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0, 1200
2181 ACC0
2182 CLOSURE 1, 2172
2185 PUSH
2186 CLOSURE 0, 2158
2189 PUSH
2190 CLOSURE 0, 2144
2193 PUSH
2194 CLOSUREREC 0, 1217
2198 GETGLOBALFIELD Pervasives, 16
2201 PUSH
2202 CLOSUREREC 0, 1259
2206 ACC0
2207 CLOSURE 1, 2139
2210 PUSH
2211 CLOSUREREC 0, 1277
2215 CLOSUREREC 0, 1294
2219 CLOSURE 0, 2127
2222 PUSH
2223 CLOSUREREC 0, 1316
2227 CLOSUREREC 0, 1334
2231 CLOSUREREC 0, 1354
2235 CLOSUREREC 0, 1374
2239 CLOSURE 0, 2092
2242 PUSH
2243 CLOSUREREC 0, 1415
2247 CLOSUREREC 0, 1452
2251 CLOSUREREC 0, 1490
2255 CLOSUREREC 0, 1530
2259 CLOSUREREC 0, 1553
2263 CLOSUREREC 0, 1573
2267 CLOSUREREC 0, 1613
2271 CLOSUREREC 0, 1654
2275 CLOSUREREC 0, 1675
2279 CLOSUREREC 0, 1695
2283 CLOSUREREC 0, 1725
2287 CLOSUREREC 0, 1754
2291 CLOSUREREC 0, 1776
2295 CLOSUREREC 0, 1797
2299 CLOSUREREC 0, 1828
2303 CLOSUREREC 0, 1858
2307 ACC 24
2309 CLOSURE 1, 2042
2312 PUSHACC 25
2314 CLOSUREREC 1, 1928
2318 CLOSUREREC 0, 1942
2322 CLOSUREREC 0, 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37, 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 3341
2406 RESTART
2407 GRAB 2
2409 ACC2
2410 PUSHACC2
2411 VECTLENGTH
2412 OFFSETINT -1
2414 PUSHCONST0
2415 PUSH
2416 BRANCH 2433
2418 CHECK_SIGNALS
2419 ACC2
2420 PUSHACC2
2421 PUSHACC6
2422 C_CALL2 array_unsafe_get
2424 PUSHACC5
2425 APPLY2
2426 ASSIGN 2
2428 ACC1
2429 OFFSETINT -1
2431 ASSIGN 1
2433 ACC0
2434 PUSHACC2
2435 GEINT
2436 BRANCHIF 2418
2438 CONST0
2439 POP 2
2441 ACC0
2442 RETURN 4
2444 RESTART
2445 GRAB 2
2447 ACC1
2448 PUSHCONST0
2449 PUSHACC4
2450 VECTLENGTH
2451 OFFSETINT -1
2453 PUSH
2454 BRANCH 2471
2456 CHECK_SIGNALS
2457 ACC1
2458 PUSHACC6
2459 C_CALL2 array_unsafe_get
2461 PUSHACC3
2462 PUSHACC5
2463 APPLY2
2464 ASSIGN 2
2466 ACC1
2467 OFFSETINT 1
2469 ASSIGN 1
2471 ACC0
2472 PUSHACC2
2473 LEINT
2474 BRANCHIF 2456
2476 CONST0
2477 POP 2
2479 ACC0
2480 RETURN 4
2482 RESTART
2483 GRAB 1
2485 ACC1
2486 BRANCHIFNOT 2502
2488 ACC1
2489 GETFIELD0
2490 PUSHACC1
2491 PUSHENVACC1
2492 C_CALL3 array_unsafe_set
2494 ACC1
2495 GETFIELD1
2496 PUSHACC1
2497 OFFSETINT 1
2499 PUSHOFFSETCLOSURE0
2500 APPTERM2 4
2502 ENVACC1
2503 RETURN 2
2505 ACC0
2506 BRANCHIFNOT 2531
2508 ACC0
2509 GETFIELD1
2510 PUSHACC1
2511 GETFIELD0
2512 PUSHACC1
2513 PUSHGETGLOBALFIELD List, 0
2516 APPLY1
2517 OFFSETINT 1
2519 C_CALL2 make_vect
2521 PUSHACC0
2522 CLOSUREREC 1, 2483
2526 ACC2
2527 PUSHCONST1
2528 PUSHACC2
2529 APPTERM2 6
2531 ATOM0
2532 RETURN 1
2534 RESTART
2535 GRAB 1
2537 CONST0
2538 PUSHACC1
2539 LTINT
2540 BRANCHIFNOT 2545
2542 ACC1
2543 RETURN 2
2545 ACC1
2546 PUSHACC1
2547 PUSHENVACC1
2548 C_CALL2 array_unsafe_get
2550 MAKEBLOCK2 0
2552 PUSHACC1
2553 OFFSETINT -1
2555 PUSHOFFSETCLOSURE0
2556 APPTERM2 4
2558 ACC0
2559 CLOSUREREC 1, 2535
2563 CONST0
2564 PUSHACC2
2565 VECTLENGTH
2566 OFFSETINT -1
2568 PUSHACC2
2569 APPTERM2 4
2571 RESTART
2572 GRAB 1
2574 ACC1
2575 VECTLENGTH
2576 PUSHCONST0
2577 PUSHACC1
2578 EQ
2579 BRANCHIFNOT 2584
2581 ATOM0
2582 RETURN 3
2584 CONST0
2585 PUSHACC3
2586 C_CALL2 array_unsafe_get
2588 PUSHCONST0
2589 PUSHACC3
2590 APPLY2
2591 PUSHACC1
2592 C_CALL2 make_vect
2594 PUSHCONST1
2595 PUSHACC2
2596 OFFSETINT -1
2598 PUSH
2599 BRANCH 2618
2601 CHECK_SIGNALS
2602 ACC1
2603 PUSHACC6
2604 C_CALL2 array_unsafe_get
2606 PUSHACC2
2607 PUSHACC6
2608 APPLY2
2609 PUSHACC2
2610 PUSHACC4
2611 C_CALL3 array_unsafe_set
2613 ACC1
2614 OFFSETINT 1
2616 ASSIGN 1
2618 ACC0
2619 PUSHACC2
2620 LEINT
2621 BRANCHIF 2601
2623 CONST0
2624 POP 2
2626 ACC0
2627 RETURN 4
2629 RESTART
2630 GRAB 1
2632 CONST0
2633 PUSHACC2
2634 VECTLENGTH
2635 OFFSETINT -1
2637 PUSH
2638 BRANCH 2653
2640 CHECK_SIGNALS
2641 ACC1
2642 PUSHACC4
2643 C_CALL2 array_unsafe_get
2645 PUSHACC2
2646 PUSHACC4
2647 APPLY2
2648 ACC1
2649 OFFSETINT 1
2651 ASSIGN 1
2653 ACC0
2654 PUSHACC2
2655 LEINT
2656 BRANCHIF 2640
2658 CONST0
2659 RETURN 4
2661 RESTART
2662 GRAB 1
2664 ACC1
2665 VECTLENGTH
2666 PUSHCONST0
2667 PUSHACC1
2668 EQ
2669 BRANCHIFNOT 2674
2671 ATOM0
2672 RETURN 3
2674 CONST0
2675 PUSHACC3
2676 C_CALL2 array_unsafe_get
2678 PUSHACC2
2679 APPLY1
2680 PUSHACC1
2681 C_CALL2 make_vect
2683 PUSHCONST1
2684 PUSHACC2
2685 OFFSETINT -1
2687 PUSH
2688 BRANCH 2706
2690 CHECK_SIGNALS
2691 ACC1
2692 PUSHACC6
2693 C_CALL2 array_unsafe_get
2695 PUSHACC5
2696 APPLY1
2697 PUSHACC2
2698 PUSHACC4
2699 C_CALL3 array_unsafe_set
2701 ACC1
2702 OFFSETINT 1
2704 ASSIGN 1
2706 ACC0
2707 PUSHACC2
2708 LEINT
2709 BRANCHIF 2690
2711 CONST0
2712 POP 2
2714 ACC0
2715 RETURN 4
2717 RESTART
2718 GRAB 1
2720 CONST0
2721 PUSHACC2
2722 VECTLENGTH
2723 OFFSETINT -1
2725 PUSH
2726 BRANCH 2740
2728 CHECK_SIGNALS
2729 ACC1
2730 PUSHACC4
2731 C_CALL2 array_unsafe_get
2733 PUSHACC3
2734 APPLY1
2735 ACC1
2736 OFFSETINT 1
2738 ASSIGN 1
2740 ACC0
2741 PUSHACC2
2742 LEINT
2743 BRANCHIF 2728
2745 CONST0
2746 RETURN 4
2748 RESTART
2749 GRAB 4
2751 CONST0
2752 PUSHACC5
2753 LTINT
2754 BRANCHIF 2782
2756 CONST0
2757 PUSHACC2
2758 LTINT
2759 BRANCHIF 2782
2761 ACC0
2762 VECTLENGTH
2763 PUSHACC5
2764 PUSHACC3
2765 ADDINT
2766 GTINT
2767 BRANCHIF 2782
2769 CONST0
2770 PUSHACC4
2771 LTINT
2772 BRANCHIF 2782
2774 ACC2
2775 VECTLENGTH
2776 PUSHACC5
2777 PUSHACC5
2778 ADDINT
2779 GTINT
2780 BRANCHIFNOT 2789
2782 GETGLOBAL "Array.blit"
2784 PUSHGETGLOBALFIELD Pervasives, 2
2787 APPTERM1 6
2789 ACC3
2790 PUSHACC2
2791 LTINT
2792 BRANCHIFNOT 2827
2794 ACC4
2795 OFFSETINT -1
2797 PUSHCONST0
2798 PUSH
2799 BRANCH 2819
2801 CHECK_SIGNALS
2802 ACC1
2803 PUSHACC4
2804 ADDINT
2805 PUSHACC3
2806 C_CALL2 array_unsafe_get
2808 PUSHACC2
2809 PUSHACC7
2810 ADDINT
2811 PUSHACC6
2812 C_CALL3 array_unsafe_set
2814 ACC1
2815 OFFSETINT -1
2817 ASSIGN 1
2819 ACC0
2820 PUSHACC2
2821 GEINT
2822 BRANCHIF 2801
2824 CONST0
2825 RETURN 7
2827 CONST0
2828 PUSHACC5
2829 OFFSETINT -1
2831 PUSH
2832 BRANCH 2852
2834 CHECK_SIGNALS
2835 ACC1
2836 PUSHACC4
2837 ADDINT
2838 PUSHACC3
2839 C_CALL2 array_unsafe_get
2841 PUSHACC2
2842 PUSHACC7
2843 ADDINT
2844 PUSHACC6
2845 C_CALL3 array_unsafe_set
2847 ACC1
2848 OFFSETINT 1
2850 ASSIGN 1
2852 ACC0
2853 PUSHACC2
2854 LEINT
2855 BRANCHIF 2834
2857 CONST0
2858 RETURN 7
2860 RESTART
2861 GRAB 3
2863 CONST0
2864 PUSHACC2
2865 LTINT
2866 BRANCHIF 2881
2868 CONST0
2869 PUSHACC3
2870 LTINT
2871 BRANCHIF 2881
2873 ACC0
2874 VECTLENGTH
2875 PUSHACC3
2876 PUSHACC3
2877 ADDINT
2878 GTINT
2879 BRANCHIFNOT 2888
2881 GETGLOBAL "Array.fill"
2883 PUSHGETGLOBALFIELD Pervasives, 2
2886 APPTERM1 5
2888 ACC1
2889 PUSHACC3
2890 PUSHACC3
2891 ADDINT
2892 OFFSETINT -1
2894 PUSH
2895 BRANCH 2908
2897 CHECK_SIGNALS
2898 ACC5
2899 PUSHACC2
2900 PUSHACC4
2901 C_CALL3 array_unsafe_set
2903 ACC1
2904 OFFSETINT 1
2906 ASSIGN 1
2908 ACC0
2909 PUSHACC2
2910 LEINT
2911 BRANCHIF 2897
2913 CONST0
2914 RETURN 6
2916 RESTART
2917 GRAB 2
2919 CONST0
2920 PUSHACC2
2921 LTINT
2922 BRANCHIF 2937
2924 CONST0
2925 PUSHACC3
2926 LTINT
2927 BRANCHIF 2937
2929 ACC0
2930 VECTLENGTH
2931 PUSHACC3
2932 PUSHACC3
2933 ADDINT
2934 GTINT
2935 BRANCHIFNOT 2944
2937 GETGLOBAL "Array.sub"
2939 PUSHGETGLOBALFIELD Pervasives, 2
2942 APPTERM1 4
2944 CONST0
2945 PUSHACC3
2946 EQ
2947 BRANCHIFNOT 2952
2949 ATOM0
2950 RETURN 3
2952 ACC1
2953 PUSHACC1
2954 C_CALL2 array_unsafe_get
2956 PUSHACC3
2957 C_CALL2 make_vect
2959 PUSHCONST1
2960 PUSHACC4
2961 OFFSETINT -1
2963 PUSH
2964 BRANCH 2982
2966 CHECK_SIGNALS
2967 ACC1
2968 PUSHACC5
2969 ADDINT
2970 PUSHACC4
2971 C_CALL2 array_unsafe_get
2973 PUSHACC2
2974 PUSHACC4
2975 C_CALL3 array_unsafe_set
2977 ACC1
2978 OFFSETINT 1
2980 ASSIGN 1
2982 ACC0
2983 PUSHACC2
2984 LEINT
2985 BRANCHIF 2966
2987 CONST0
2988 POP 2
2990 ACC0
2991 RETURN 4
2993 ACC0
2994 BRANCHIFNOT 3017
2996 ACC0
2997 GETFIELD0
2998 PUSHCONST0
2999 PUSHACC1
3000 VECTLENGTH
3001 GTINT
3002 BRANCHIFNOT 3012
3004 ENVACC2
3005 PUSHCONST0
3006 PUSHACC2
3007 C_CALL2 array_unsafe_get
3009 PUSHENVACC1
3010 APPTERM2 4
3012 ACC1
3013 GETFIELD1
3014 PUSHOFFSETCLOSURE0
3015 APPTERM1 3
3017 ATOM0
3018 RETURN 1
3020 ACC0
3021 PUSHENVACC1
3022 CLOSUREREC 2, 2993
3026 ACC1
3027 PUSHACC1
3028 APPTERM1 3
3030 CONST0
3031 PUSHACC1
3032 VECTLENGTH
3033 OFFSETINT -1
3035 PUSH
3036 BRANCH 3056
3038 CHECK_SIGNALS
3039 ACC1
3040 PUSHACC3
3041 C_CALL2 array_unsafe_get
3043 PUSHENVACC2
3044 GETFIELD0
3045 PUSHENVACC1
3046 C_CALL3 array_unsafe_set
3048 ENVACC2
3049 OFFSETREF 1
3051 ACC1
3052 OFFSETINT 1
3054 ASSIGN 1
3056 ACC0
3057 PUSHACC2
3058 LEINT
3059 BRANCHIF 3038
3061 CONST0
3062 RETURN 3
3064 RESTART
3065 GRAB 1
3067 ACC1
3068 VECTLENGTH
3069 PUSHACC1
3070 ADDINT
3071 RETURN 2
3073 RESTART
3074 GRAB 1
3076 ACC1
3077 PUSHCONST0
3078 PUSH
3079 CLOSURE 0, 3065
3082 PUSHGETGLOBALFIELD List, 12
3085 APPLY3
3086 PUSHACC1
3087 PUSHACC1
3088 C_CALL2 make_vect
3090 PUSHCONST0
3091 MAKEBLOCK1 0
3093 PUSHACC4
3094 PUSHACC1
3095 PUSHACC3
3096 CLOSURE 2, 3030
3099 PUSHGETGLOBALFIELD List, 9
3102 APPLY2
3103 ACC1
3104 RETURN 5
3106 RESTART
3107 GRAB 1
3109 ACC0
3110 VECTLENGTH
3111 PUSHACC2
3112 VECTLENGTH
3113 PUSHCONST0
3114 PUSHACC2
3115 EQ
3116 BRANCHIFNOT 3126
3118 CONST0
3119 PUSHACC1
3120 EQ
3121 BRANCHIFNOT 3126
3123 ATOM0
3124 RETURN 4
3126 CONST0
3127 PUSHCONST0
3128 PUSHACC3
3129 GTINT
3130 BRANCHIFNOT 3135
3132 ACC3
3133 BRANCH 3136
3135 ACC4
3136 C_CALL2 array_unsafe_get
3138 PUSHACC1
3139 PUSHACC3
3140 ADDINT
3141 C_CALL2 make_vect
3143 PUSHCONST0
3144 PUSHACC3
3145 OFFSETINT -1
3147 PUSH
3148 BRANCH 3164
3150 CHECK_SIGNALS
3151 ACC1
3152 PUSHACC6
3153 C_CALL2 array_unsafe_get
3155 PUSHACC2
3156 PUSHACC4
3157 C_CALL3 array_unsafe_set
3159 ACC1
3160 OFFSETINT 1
3162 ASSIGN 1
3164 ACC0
3165 PUSHACC2
3166 LEINT
3167 BRANCHIF 3150
3169 CONST0
3170 POP 2
3172 CONST0
3173 PUSHACC2
3174 OFFSETINT -1
3176 PUSH
3177 BRANCH 3195
3179 CHECK_SIGNALS
3180 ACC1
3181 PUSHACC7
3182 C_CALL2 array_unsafe_get
3184 PUSHACC5
3185 PUSHACC3
3186 ADDINT
3187 PUSHACC4
3188 C_CALL3 array_unsafe_set
3190 ACC1
3191 OFFSETINT 1
3193 ASSIGN 1
3195 ACC0
3196 PUSHACC2
3197 LEINT
3198 BRANCHIF 3179
3200 CONST0
3201 POP 2
3203 ACC0
3204 RETURN 5
3206 ACC0
3207 VECTLENGTH
3208 PUSHCONST0
3209 PUSHACC1
3210 EQ
3211 BRANCHIFNOT 3216
3213 ATOM0
3214 RETURN 2
3216 CONST0
3217 PUSHACC2
3218 C_CALL2 array_unsafe_get
3220 PUSHACC1
3221 C_CALL2 make_vect
3223 PUSHCONST1
3224 PUSHACC2
3225 OFFSETINT -1
3227 PUSH
3228 BRANCH 3244
3230 CHECK_SIGNALS
3231 ACC1
3232 PUSHACC5
3233 C_CALL2 array_unsafe_get
3235 PUSHACC2
3236 PUSHACC4
3237 C_CALL3 array_unsafe_set
3239 ACC1
3240 OFFSETINT 1
3242 ASSIGN 1
3244 ACC0
3245 PUSHACC2
3246 LEINT
3247 BRANCHIF 3230
3249 CONST0
3250 POP 2
3252 ACC0
3253 RETURN 3
3255 RESTART
3256 GRAB 2
3258 ATOM0
3259 PUSHACC1
3260 C_CALL2 make_vect
3262 PUSHCONST0
3263 PUSHACC2
3264 OFFSETINT -1
3266 PUSH
3267 BRANCH 3282
3269 CHECK_SIGNALS
3270 ACC5
3271 PUSHACC5
3272 C_CALL2 make_vect
3274 PUSHACC2
3275 PUSHACC4
3276 SETVECTITEM
3277 ACC1
3278 OFFSETINT 1
3280 ASSIGN 1
3282 ACC0
3283 PUSHACC2
3284 LEINT
3285 BRANCHIF 3269
3287 CONST0
3288 POP 2
3290 ACC0
3291 RETURN 4
3293 RESTART
3294 GRAB 1
3296 CONST0
3297 PUSHACC1
3298 EQ
3299 BRANCHIFNOT 3304
3301 ATOM0
3302 RETURN 2
3304 CONST0
3305 PUSHACC2
3306 APPLY1
3307 PUSHACC1
3308 C_CALL2 make_vect
3310 PUSHCONST1
3311 PUSHACC2
3312 OFFSETINT -1
3314 PUSH
3315 BRANCH 3330
3317 CHECK_SIGNALS
3318 ACC1
3319 PUSHACC5
3320 APPLY1
3321 PUSHACC2
3322 PUSHACC4
3323 C_CALL3 array_unsafe_set
3325 ACC1
3326 OFFSETINT 1
3328 ASSIGN 1
3330 ACC0
3331 PUSHACC2
3332 LEINT
3333 BRANCHIF 3317
3335 CONST0
3336 POP 2
3338 ACC0
3339 RETURN 3
3341 CLOSURE 0, 3294
3344 PUSH
3345 CLOSURE 0, 3256
3348 PUSH
3349 CLOSURE 0, 3206
3352 PUSH
3353 CLOSURE 0, 3107
3356 PUSH
3357 CLOSURE 0, 3074
3360 PUSHACC0
3361 CLOSURE 1, 3020
3364 PUSH
3365 CLOSURE 0, 2917
3368 PUSH
3369 CLOSURE 0, 2861
3372 PUSH
3373 CLOSURE 0, 2749
3376 PUSH
3377 CLOSURE 0, 2718
3380 PUSH
3381 CLOSURE 0, 2662
3384 PUSH
3385 CLOSURE 0, 2630
3388 PUSH
3389 CLOSURE 0, 2572
3392 PUSH
3393 CLOSURE 0, 2558
3396 PUSH
3397 CLOSURE 0, 2505
3400 PUSH
3401 CLOSURE 0, 2445
3404 PUSH
3405 CLOSURE 0, 2407
3408 PUSHACC0
3409 PUSHACC2
3410 PUSHACC6
3411 PUSHACC 8
3413 PUSHACC 10
3415 PUSHACC 12
3417 PUSHACC 8
3419 PUSHACC 10
3421 PUSHACC 16
3423 PUSHACC 18
3425 PUSHACC 24
3427 PUSHACC 21
3429 PUSHACC 23
3431 PUSHACC 26
3433 PUSHACC 29
3435 PUSHACC 30
3437 PUSHACC 32
3439 MAKEBLOCK 17, 0
3442 POP 17
3444 SETGLOBAL Array
3446 BRANCH 3456
3448 ACC0
3449 PUSHENVACC1
3450 GETFIELD0
3451 ADDINT
3452 PUSHENVACC1
3453 SETFIELD0
3454 RETURN 1
3456 CONST2
3457 PUSHCONSTINT 200000
3459 C_CALL2 make_vect
3461 PUSHCONST0
3462 MAKEBLOCK1 0
3464 PUSHACC1
3465 PUSHACC1
3466 CLOSURE 1, 3448
3469 PUSHGETGLOBALFIELD Array, 11
3472 APPLY2
3473 CONSTINT 400000
3475 PUSHACC1
3476 GETFIELD0
3477 NEQ
3478 BRANCHIFNOT 3485
3480 GETGLOBAL Not_found
3482 MAKEBLOCK1 0
3484 RAISE
3485 POP 2
3487 ATOM0
3488 SETGLOBAL T310-alloc-2
3490 STOP
**)
| null | https://raw.githubusercontent.com/modular-macros/ocaml-macros/05372c7248b5a7b1aa507b3c581f710380f17fcd/testsuite/tests/tool-ocaml/t310-alloc-2.ml | ocaml | open Lib;;
let v = Array.make 200000 2 in
let t = ref 0 in
Array.iter (fun x -> t := !t + x) v;
if !t <> 400000 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL " List.nth "
1246 PUSHGETGLOBALFIELD Pervasives , 2
1249 APPTERM1 3
1251 GETGLOBAL " nth "
1253 PUSHGETGLOBALFIELD Pervasives , 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives , 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL " List.map2 "
1409 PUSHGETGLOBALFIELD Pervasives , 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL " List.iter2 "
1446 PUSHGETGLOBALFIELD Pervasives , 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4 , 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL " List.fold_left2 "
1484 PUSHGETGLOBALFIELD Pervasives , 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL " List.fold_right2 "
1524 PUSHGETGLOBALFIELD Pervasives , 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 " List.for_all2 "
1607 PUSHGETGLOBALFIELD Pervasives , 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 " List.exists2 "
1648 PUSHGETGLOBALFIELD Pervasives , 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2 , 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 < 0>(0 , 0 )
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 " List.combine "
2005 PUSHGETGLOBALFIELD Pervasives , 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2 , 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL " List.rev_map2 "
2086 PUSHGETGLOBALFIELD Pervasives , 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1 , 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1 , 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL " tl "
2153 PUSHGETGLOBALFIELD Pervasives , 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL " hd "
2167 PUSHGETGLOBALFIELD Pervasives , 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0 , 1200
2181 ACC0
2182 CLOSURE 1 , 2172
2185 PUSH
2186 CLOSURE 0 , 2158
2189 PUSH
2190 CLOSURE 0 , 2144
2193 PUSH
2194 CLOSUREREC 0 , 1217
2198 GETGLOBALFIELD Pervasives , 16
2201 PUSH
2202 CLOSUREREC 0 , 1259
2206 ACC0
2207 CLOSURE 1 , 2139
2210 PUSH
2211 CLOSUREREC 0 , 1277
2215 CLOSUREREC 0 , 1294
2219 CLOSURE 0 , 2127
2222 PUSH
2223 CLOSUREREC 0 , 1316
2227 CLOSUREREC 0 , 1334
2231 CLOSUREREC 0 , 1354
2235 CLOSUREREC 0 , 1374
2239 CLOSURE 0 , 2092
2242 PUSH
2243 CLOSUREREC 0 , 1415
2247 CLOSUREREC 0 , 1452
2251 CLOSUREREC 0 , 1490
2255 CLOSUREREC 0 , 1530
2259 CLOSUREREC 0 , 1553
2263 CLOSUREREC 0 , 1573
2267 CLOSUREREC 0 , 1613
2271 CLOSUREREC 0 , 1654
2275 CLOSUREREC 0 , 1675
2279 CLOSUREREC 0 , 1695
2283 CLOSUREREC 0 , 1725
2287 CLOSUREREC 0 , 1754
2291 CLOSUREREC 0 , 1776
2295 CLOSUREREC 0 , 1797
2299 CLOSUREREC 0 , 1828
2303 CLOSUREREC 0 , 1858
2307 ACC 24
2309 CLOSURE 1 , 2042
2312 PUSHACC 25
2314 CLOSUREREC 1 , 1928
2318 CLOSUREREC 0 , 1942
2322 CLOSUREREC 0 , 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37 , 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 3341
2406 RESTART
2407 GRAB 2
2409 ACC2
2410 PUSHACC2
2411 VECTLENGTH
2412 OFFSETINT -1
2414 PUSHCONST0
2415 PUSH
2416 BRANCH 2433
2418 CHECK_SIGNALS
2419 ACC2
2420 PUSHACC2
2421 PUSHACC6
2422 C_CALL2 array_unsafe_get
2424 PUSHACC5
2425 APPLY2
2426 ASSIGN 2
2428 ACC1
2429 OFFSETINT -1
2431 ASSIGN 1
2433 ACC0
2434 PUSHACC2
2435 GEINT
2436 BRANCHIF 2418
2438 CONST0
2439 POP 2
2441 ACC0
2442 RETURN 4
2444 RESTART
2445 GRAB 2
2447 ACC1
2448 PUSHCONST0
2449 PUSHACC4
2450 VECTLENGTH
2451 OFFSETINT -1
2453 PUSH
2454 BRANCH 2471
2456 CHECK_SIGNALS
2457 ACC1
2458 PUSHACC6
2459 C_CALL2 array_unsafe_get
2461 PUSHACC3
2462 PUSHACC5
2463 APPLY2
2464 ASSIGN 2
2466 ACC1
2467 OFFSETINT 1
2469 ASSIGN 1
2471 ACC0
2472 PUSHACC2
2473 LEINT
2474 BRANCHIF 2456
2476 CONST0
2477 POP 2
2479 ACC0
2480 RETURN 4
2482 RESTART
2483 GRAB 1
2485 ACC1
2486 BRANCHIFNOT 2502
2488 ACC1
2489 GETFIELD0
2490 PUSHACC1
2491 PUSHENVACC1
2492 C_CALL3 array_unsafe_set
2494 ACC1
2495 GETFIELD1
2496 PUSHACC1
2497 OFFSETINT 1
2499 PUSHOFFSETCLOSURE0
2500 APPTERM2 4
2502 ENVACC1
2503 RETURN 2
2505 ACC0
2506 BRANCHIFNOT 2531
2508 ACC0
2509 GETFIELD1
2510 PUSHACC1
2511 GETFIELD0
2512 PUSHACC1
2513 PUSHGETGLOBALFIELD List , 0
2516 APPLY1
2517 OFFSETINT 1
2519 C_CALL2 make_vect
2521 PUSHACC0
2522 CLOSUREREC 1 , 2483
2526 ACC2
2527 PUSHCONST1
2528 PUSHACC2
2529 APPTERM2 6
2531 ATOM0
2532 RETURN 1
2534 RESTART
2535 GRAB 1
2537 CONST0
2538 PUSHACC1
2539 LTINT
2540 BRANCHIFNOT 2545
2542 ACC1
2543 RETURN 2
2545 ACC1
2546 PUSHACC1
2547 PUSHENVACC1
2548 C_CALL2 array_unsafe_get
2550 MAKEBLOCK2 0
2552 PUSHACC1
2553 OFFSETINT -1
2555 PUSHOFFSETCLOSURE0
2556 APPTERM2 4
2558 ACC0
2559 CLOSUREREC 1 , 2535
2563 CONST0
2564 PUSHACC2
2565 VECTLENGTH
2566 OFFSETINT -1
2568 PUSHACC2
2569 APPTERM2 4
2571 RESTART
2572 GRAB 1
2574 ACC1
2575 VECTLENGTH
2576 PUSHCONST0
2577 PUSHACC1
2578 EQ
2579 BRANCHIFNOT 2584
2581 ATOM0
2582 RETURN 3
2584 CONST0
2585 PUSHACC3
2586 C_CALL2 array_unsafe_get
2588 PUSHCONST0
2589 PUSHACC3
2590 APPLY2
2591 PUSHACC1
2592 C_CALL2 make_vect
2594 PUSHCONST1
2595 PUSHACC2
2596 OFFSETINT -1
2598 PUSH
2599 BRANCH 2618
2601 CHECK_SIGNALS
2602 ACC1
2603 PUSHACC6
2604 C_CALL2 array_unsafe_get
2606 PUSHACC2
2607 PUSHACC6
2608 APPLY2
2609 PUSHACC2
2610 PUSHACC4
2611 C_CALL3 array_unsafe_set
2613 ACC1
2614 OFFSETINT 1
2616 ASSIGN 1
2618 ACC0
2619 PUSHACC2
2620 LEINT
2621 BRANCHIF 2601
2623 CONST0
2624 POP 2
2626 ACC0
2627 RETURN 4
2629 RESTART
2630 GRAB 1
2632 CONST0
2633 PUSHACC2
2634 VECTLENGTH
2635 OFFSETINT -1
2637 PUSH
2638 BRANCH 2653
2640 CHECK_SIGNALS
2641 ACC1
2642 PUSHACC4
2643 C_CALL2 array_unsafe_get
2645 PUSHACC2
2646 PUSHACC4
2647 APPLY2
2648 ACC1
2649 OFFSETINT 1
2651 ASSIGN 1
2653 ACC0
2654 PUSHACC2
2655 LEINT
2656 BRANCHIF 2640
2658 CONST0
2659 RETURN 4
2661 RESTART
2662 GRAB 1
2664 ACC1
2665 VECTLENGTH
2666 PUSHCONST0
2667 PUSHACC1
2668 EQ
2669 BRANCHIFNOT 2674
2671 ATOM0
2672 RETURN 3
2674 CONST0
2675 PUSHACC3
2676 C_CALL2 array_unsafe_get
2678 PUSHACC2
2679 APPLY1
2680 PUSHACC1
2681 C_CALL2 make_vect
2683 PUSHCONST1
2684 PUSHACC2
2685 OFFSETINT -1
2687 PUSH
2688 BRANCH 2706
2690 CHECK_SIGNALS
2691 ACC1
2692 PUSHACC6
2693 C_CALL2 array_unsafe_get
2695 PUSHACC5
2696 APPLY1
2697 PUSHACC2
2698 PUSHACC4
2699 C_CALL3 array_unsafe_set
2701 ACC1
2702 OFFSETINT 1
2704 ASSIGN 1
2706 ACC0
2707 PUSHACC2
2708 LEINT
2709 BRANCHIF 2690
2711 CONST0
2712 POP 2
2714 ACC0
2715 RETURN 4
2717 RESTART
2718 GRAB 1
2720 CONST0
2721 PUSHACC2
2722 VECTLENGTH
2723 OFFSETINT -1
2725 PUSH
2726 BRANCH 2740
2728 CHECK_SIGNALS
2729 ACC1
2730 PUSHACC4
2731 C_CALL2 array_unsafe_get
2733 PUSHACC3
2734 APPLY1
2735 ACC1
2736 OFFSETINT 1
2738 ASSIGN 1
2740 ACC0
2741 PUSHACC2
2742 LEINT
2743 BRANCHIF 2728
2745 CONST0
2746 RETURN 4
2748 RESTART
2749 GRAB 4
2751 CONST0
2752 PUSHACC5
2753 LTINT
2754 BRANCHIF 2782
2756 CONST0
2757 PUSHACC2
2758 LTINT
2759 BRANCHIF 2782
2761 ACC0
2762 VECTLENGTH
2763 PUSHACC5
2764 PUSHACC3
2765 ADDINT
2766 GTINT
2767 BRANCHIF 2782
2769 CONST0
2770 PUSHACC4
2771 LTINT
2772 BRANCHIF 2782
2774 ACC2
2775 VECTLENGTH
2776 PUSHACC5
2777 PUSHACC5
2778 ADDINT
2779 GTINT
2780 BRANCHIFNOT 2789
2782 GETGLOBAL " Array.blit "
2784 PUSHGETGLOBALFIELD Pervasives , 2
2787 APPTERM1 6
2789 ACC3
2790 PUSHACC2
2791 LTINT
2792 BRANCHIFNOT 2827
2794 ACC4
2795 OFFSETINT -1
2797 PUSHCONST0
2798 PUSH
2799 BRANCH 2819
2801 CHECK_SIGNALS
2802 ACC1
2803 PUSHACC4
2804 ADDINT
2805 PUSHACC3
2806 C_CALL2 array_unsafe_get
2808 PUSHACC2
2809 PUSHACC7
2810 ADDINT
2811 PUSHACC6
2812 C_CALL3 array_unsafe_set
2814 ACC1
2815 OFFSETINT -1
2817 ASSIGN 1
2819 ACC0
2820 PUSHACC2
2821 GEINT
2822 BRANCHIF 2801
2824 CONST0
2825 RETURN 7
2827 CONST0
2828 PUSHACC5
2829 OFFSETINT -1
2831 PUSH
2832 BRANCH 2852
2834 CHECK_SIGNALS
2835 ACC1
2836 PUSHACC4
2837 ADDINT
2838 PUSHACC3
2839 C_CALL2 array_unsafe_get
2841 PUSHACC2
2842 PUSHACC7
2843 ADDINT
2844 PUSHACC6
2845 C_CALL3 array_unsafe_set
2847 ACC1
2848 OFFSETINT 1
2850 ASSIGN 1
2852 ACC0
2853 PUSHACC2
2854 LEINT
2855 BRANCHIF 2834
2857 CONST0
2858 RETURN 7
2860 RESTART
2861 GRAB 3
2863 CONST0
2864 PUSHACC2
2865 LTINT
2866 BRANCHIF 2881
2868 CONST0
2869 PUSHACC3
2870 LTINT
2871 BRANCHIF 2881
2873 ACC0
2874 VECTLENGTH
2875 PUSHACC3
2876 PUSHACC3
2877 ADDINT
2878 GTINT
2879 BRANCHIFNOT 2888
2881 GETGLOBAL " Array.fill "
2883 PUSHGETGLOBALFIELD Pervasives , 2
2886 APPTERM1 5
2888 ACC1
2889 PUSHACC3
2890 PUSHACC3
2891 ADDINT
2892 OFFSETINT -1
2894 PUSH
2895 BRANCH 2908
2897 CHECK_SIGNALS
2898 ACC5
2899 PUSHACC2
2900 PUSHACC4
2901 C_CALL3 array_unsafe_set
2903 ACC1
2904 OFFSETINT 1
2906 ASSIGN 1
2908 ACC0
2909 PUSHACC2
2910 LEINT
2911 BRANCHIF 2897
2913 CONST0
2914 RETURN 6
2916 RESTART
2917 GRAB 2
2919 CONST0
2920 PUSHACC2
2921 LTINT
2922 BRANCHIF 2937
2924 CONST0
2925 PUSHACC3
2926 LTINT
2927 BRANCHIF 2937
2929 ACC0
2930 VECTLENGTH
2931 PUSHACC3
2932 PUSHACC3
2933 ADDINT
2934 GTINT
2935 BRANCHIFNOT 2944
2937 GETGLOBAL " Array.sub "
2939 PUSHGETGLOBALFIELD Pervasives , 2
2942 APPTERM1 4
2944 CONST0
2945 PUSHACC3
2946 EQ
2947 BRANCHIFNOT 2952
2949 ATOM0
2950 RETURN 3
2952 ACC1
2953 PUSHACC1
2954 C_CALL2 array_unsafe_get
2956 PUSHACC3
2957 C_CALL2 make_vect
2959 PUSHCONST1
2960 PUSHACC4
2961 OFFSETINT -1
2963 PUSH
2964 BRANCH 2982
2966 CHECK_SIGNALS
2967 ACC1
2968 PUSHACC5
2969 ADDINT
2970 PUSHACC4
2971 C_CALL2 array_unsafe_get
2973 PUSHACC2
2974 PUSHACC4
2975 C_CALL3 array_unsafe_set
2977 ACC1
2978 OFFSETINT 1
2980 ASSIGN 1
2982 ACC0
2983 PUSHACC2
2984 LEINT
2985 BRANCHIF 2966
2987 CONST0
2988 POP 2
2990 ACC0
2991 RETURN 4
2993 ACC0
2994 BRANCHIFNOT 3017
2996 ACC0
2997 GETFIELD0
2998 PUSHCONST0
2999 PUSHACC1
3000 VECTLENGTH
3001 GTINT
3002 BRANCHIFNOT 3012
3004 ENVACC2
3005 PUSHCONST0
3006 PUSHACC2
3007 C_CALL2 array_unsafe_get
3009 PUSHENVACC1
3010 APPTERM2 4
3012 ACC1
3013 GETFIELD1
3014 PUSHOFFSETCLOSURE0
3015 APPTERM1 3
3017 ATOM0
3018 RETURN 1
3020 ACC0
3021 PUSHENVACC1
3022 CLOSUREREC 2 , 2993
3026 ACC1
3027 PUSHACC1
3028 APPTERM1 3
3030 CONST0
3031 PUSHACC1
3032 VECTLENGTH
3033 OFFSETINT -1
3035 PUSH
3036 BRANCH 3056
3038 CHECK_SIGNALS
3039 ACC1
3040 PUSHACC3
3041 C_CALL2 array_unsafe_get
3043 PUSHENVACC2
3044 GETFIELD0
3045 PUSHENVACC1
3046 C_CALL3 array_unsafe_set
3048 ENVACC2
3049 OFFSETREF 1
3051 ACC1
3052 OFFSETINT 1
3054 ASSIGN 1
3056 ACC0
3057 PUSHACC2
3058 LEINT
3059 BRANCHIF 3038
3061 CONST0
3062 RETURN 3
3064 RESTART
3065 GRAB 1
3067 ACC1
3068 VECTLENGTH
3069 PUSHACC1
3070 ADDINT
3071 RETURN 2
3073 RESTART
3074 GRAB 1
3076 ACC1
3077 PUSHCONST0
3078 PUSH
3079 CLOSURE 0 , 3065
3082 PUSHGETGLOBALFIELD List , 12
3085 APPLY3
3086 PUSHACC1
3087 PUSHACC1
3088 C_CALL2 make_vect
3090 PUSHCONST0
3091 MAKEBLOCK1 0
3093 PUSHACC4
3094 PUSHACC1
3095 PUSHACC3
3096 CLOSURE 2 , 3030
3099 PUSHGETGLOBALFIELD List , 9
3102 APPLY2
3103 ACC1
3104 RETURN 5
3106 RESTART
3107 GRAB 1
3109 ACC0
3110 VECTLENGTH
3111 PUSHACC2
3112 VECTLENGTH
3113 PUSHCONST0
3114 PUSHACC2
3115 EQ
3116 BRANCHIFNOT 3126
3118 CONST0
3119 PUSHACC1
3120 EQ
3121 BRANCHIFNOT 3126
3123 ATOM0
3124 RETURN 4
3126 CONST0
3127 PUSHCONST0
3128 PUSHACC3
3129 GTINT
3130 BRANCHIFNOT 3135
3132 ACC3
3133 BRANCH 3136
3135 ACC4
3136 C_CALL2 array_unsafe_get
3138 PUSHACC1
3139 PUSHACC3
3140 ADDINT
3141 C_CALL2 make_vect
3143 PUSHCONST0
3144 PUSHACC3
3145 OFFSETINT -1
3147 PUSH
3148 BRANCH 3164
3150 CHECK_SIGNALS
3151 ACC1
3152 PUSHACC6
3153 C_CALL2 array_unsafe_get
3155 PUSHACC2
3156 PUSHACC4
3157 C_CALL3 array_unsafe_set
3159 ACC1
3160 OFFSETINT 1
3162 ASSIGN 1
3164 ACC0
3165 PUSHACC2
3166 LEINT
3167 BRANCHIF 3150
3169 CONST0
3170 POP 2
3172 CONST0
3173 PUSHACC2
3174 OFFSETINT -1
3176 PUSH
3177 BRANCH 3195
3179 CHECK_SIGNALS
3180 ACC1
3181 PUSHACC7
3182 C_CALL2 array_unsafe_get
3184 PUSHACC5
3185 PUSHACC3
3186 ADDINT
3187 PUSHACC4
3188 C_CALL3 array_unsafe_set
3190 ACC1
3191 OFFSETINT 1
3193 ASSIGN 1
3195 ACC0
3196 PUSHACC2
3197 LEINT
3198 BRANCHIF 3179
3200 CONST0
3201 POP 2
3203 ACC0
3204 RETURN 5
3206 ACC0
3207 VECTLENGTH
3208 PUSHCONST0
3209 PUSHACC1
3210 EQ
3211 BRANCHIFNOT 3216
3213 ATOM0
3214 RETURN 2
3216 CONST0
3217 PUSHACC2
3218 C_CALL2 array_unsafe_get
3220 PUSHACC1
3221 C_CALL2 make_vect
3223 PUSHCONST1
3224 PUSHACC2
3225 OFFSETINT -1
3227 PUSH
3228 BRANCH 3244
3230 CHECK_SIGNALS
3231 ACC1
3232 PUSHACC5
3233 C_CALL2 array_unsafe_get
3235 PUSHACC2
3236 PUSHACC4
3237 C_CALL3 array_unsafe_set
3239 ACC1
3240 OFFSETINT 1
3242 ASSIGN 1
3244 ACC0
3245 PUSHACC2
3246 LEINT
3247 BRANCHIF 3230
3249 CONST0
3250 POP 2
3252 ACC0
3253 RETURN 3
3255 RESTART
3256 GRAB 2
3258 ATOM0
3259 PUSHACC1
3260 C_CALL2 make_vect
3262 PUSHCONST0
3263 PUSHACC2
3264 OFFSETINT -1
3266 PUSH
3267 BRANCH 3282
3269 CHECK_SIGNALS
3270 ACC5
3271 PUSHACC5
3272 C_CALL2 make_vect
3274 PUSHACC2
3275 PUSHACC4
3276 SETVECTITEM
3277 ACC1
3278 OFFSETINT 1
3280 ASSIGN 1
3282 ACC0
3283 PUSHACC2
3284 LEINT
3285 BRANCHIF 3269
3287 CONST0
3288 POP 2
3290 ACC0
3291 RETURN 4
3293 RESTART
3294 GRAB 1
3296 CONST0
3297 PUSHACC1
3298 EQ
3299 BRANCHIFNOT 3304
3301 ATOM0
3302 RETURN 2
3304 CONST0
3305 PUSHACC2
3306 APPLY1
3307 PUSHACC1
3308 C_CALL2 make_vect
3310 PUSHCONST1
3311 PUSHACC2
3312 OFFSETINT -1
3314 PUSH
3315 BRANCH 3330
3317 CHECK_SIGNALS
3318 ACC1
3319 PUSHACC5
3320 APPLY1
3321 PUSHACC2
3322 PUSHACC4
3323 C_CALL3 array_unsafe_set
3325 ACC1
3326 OFFSETINT 1
3328 ASSIGN 1
3330 ACC0
3331 PUSHACC2
3332 LEINT
3333 BRANCHIF 3317
3335 CONST0
3336 POP 2
3338 ACC0
3339 RETURN 3
3341 CLOSURE 0 , 3294
3344 PUSH
3345 CLOSURE 0 , 3256
3348 PUSH
3349 CLOSURE 0 , 3206
3352 PUSH
3353 CLOSURE 0 , 3107
3356 PUSH
3357 CLOSURE 0 , 3074
3360 PUSHACC0
3361 CLOSURE 1 , 3020
3364 PUSH
3365 CLOSURE 0 , 2917
3368 PUSH
3369 CLOSURE 0 , 2861
3372 PUSH
3373 CLOSURE 0 , 2749
3376 PUSH
3377 CLOSURE 0 , 2718
3380 PUSH
3381 CLOSURE 0 , 2662
3384 PUSH
3385 CLOSURE 0 , 2630
3388 PUSH
3389 CLOSURE 0 , 2572
3392 PUSH
3393 CLOSURE 0 , 2558
3396 PUSH
3397 CLOSURE 0 , 2505
3400 PUSH
3401 CLOSURE 0 , 2445
3404 PUSH
3405 CLOSURE 0 , 2407
3408 PUSHACC0
3409 PUSHACC2
3410 PUSHACC6
3411 PUSHACC 8
3413 PUSHACC 10
3415 PUSHACC 12
3417 PUSHACC 8
3419 PUSHACC 10
3421 PUSHACC 16
3423 PUSHACC 18
3425 PUSHACC 24
3427 PUSHACC 21
3429 PUSHACC 23
3431 PUSHACC 26
3433 PUSHACC 29
3435 PUSHACC 30
3437 PUSHACC 32
3439 MAKEBLOCK 17 , 0
3442 POP 17
3444 SETGLOBAL Array
3446 BRANCH 3456
3448 ACC0
3449 PUSHENVACC1
3450 GETFIELD0
3451 ADDINT
3452 PUSHENVACC1
3453 SETFIELD0
3454 RETURN 1
3456 CONST2
3457 PUSHCONSTINT 200000
3459 C_CALL2 make_vect
3461 PUSHCONST0
3462 MAKEBLOCK1 0
3464 PUSHACC1
3465 PUSHACC1
3466 CLOSURE 1 , 3448
3469 PUSHGETGLOBALFIELD Array , 11
3472 APPLY2
3473 CONSTINT 400000
3475 PUSHACC1
3476 GETFIELD0
3477 NEQ
3478 BRANCHIFNOT 3485
3480 GETGLOBAL Not_found
3482 MAKEBLOCK1 0
3484 RAISE
3485 POP 2
3487 ATOM0
3488 SETGLOBAL T310 - alloc-2
3490 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL "List.nth"
1246 PUSHGETGLOBALFIELD Pervasives, 2
1249 APPTERM1 3
1251 GETGLOBAL "nth"
1253 PUSHGETGLOBALFIELD Pervasives, 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives, 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL "List.map2"
1409 PUSHGETGLOBALFIELD Pervasives, 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL "List.iter2"
1446 PUSHGETGLOBALFIELD Pervasives, 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 BRANCHIFNOT 1482
1460 ACC3
1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4, 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL "List.fold_left2"
1484 PUSHGETGLOBALFIELD Pervasives, 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL "List.fold_right2"
1524 PUSHGETGLOBALFIELD Pervasives, 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 GETGLOBAL "List.for_all2"
1607 PUSHGETGLOBALFIELD Pervasives, 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 GETGLOBAL "List.exists2"
1648 PUSHGETGLOBALFIELD Pervasives, 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2, 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 GETGLOBAL <0>(0, 0)
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 GETGLOBAL "List.combine"
2005 PUSHGETGLOBALFIELD Pervasives, 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2, 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL "List.rev_map2"
2086 PUSHGETGLOBALFIELD Pervasives, 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1, 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1, 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL "tl"
2153 PUSHGETGLOBALFIELD Pervasives, 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL "hd"
2167 PUSHGETGLOBALFIELD Pervasives, 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0, 1200
2181 ACC0
2182 CLOSURE 1, 2172
2185 PUSH
2186 CLOSURE 0, 2158
2189 PUSH
2190 CLOSURE 0, 2144
2193 PUSH
2194 CLOSUREREC 0, 1217
2198 GETGLOBALFIELD Pervasives, 16
2201 PUSH
2202 CLOSUREREC 0, 1259
2206 ACC0
2207 CLOSURE 1, 2139
2210 PUSH
2211 CLOSUREREC 0, 1277
2215 CLOSUREREC 0, 1294
2219 CLOSURE 0, 2127
2222 PUSH
2223 CLOSUREREC 0, 1316
2227 CLOSUREREC 0, 1334
2231 CLOSUREREC 0, 1354
2235 CLOSUREREC 0, 1374
2239 CLOSURE 0, 2092
2242 PUSH
2243 CLOSUREREC 0, 1415
2247 CLOSUREREC 0, 1452
2251 CLOSUREREC 0, 1490
2255 CLOSUREREC 0, 1530
2259 CLOSUREREC 0, 1553
2263 CLOSUREREC 0, 1573
2267 CLOSUREREC 0, 1613
2271 CLOSUREREC 0, 1654
2275 CLOSUREREC 0, 1675
2279 CLOSUREREC 0, 1695
2283 CLOSUREREC 0, 1725
2287 CLOSUREREC 0, 1754
2291 CLOSUREREC 0, 1776
2295 CLOSUREREC 0, 1797
2299 CLOSUREREC 0, 1828
2303 CLOSUREREC 0, 1858
2307 ACC 24
2309 CLOSURE 1, 2042
2312 PUSHACC 25
2314 CLOSUREREC 1, 1928
2318 CLOSUREREC 0, 1942
2322 CLOSUREREC 0, 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37, 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 3341
2406 RESTART
2407 GRAB 2
2409 ACC2
2410 PUSHACC2
2411 VECTLENGTH
2412 OFFSETINT -1
2414 PUSHCONST0
2415 PUSH
2416 BRANCH 2433
2418 CHECK_SIGNALS
2419 ACC2
2420 PUSHACC2
2421 PUSHACC6
2422 C_CALL2 array_unsafe_get
2424 PUSHACC5
2425 APPLY2
2426 ASSIGN 2
2428 ACC1
2429 OFFSETINT -1
2431 ASSIGN 1
2433 ACC0
2434 PUSHACC2
2435 GEINT
2436 BRANCHIF 2418
2438 CONST0
2439 POP 2
2441 ACC0
2442 RETURN 4
2444 RESTART
2445 GRAB 2
2447 ACC1
2448 PUSHCONST0
2449 PUSHACC4
2450 VECTLENGTH
2451 OFFSETINT -1
2453 PUSH
2454 BRANCH 2471
2456 CHECK_SIGNALS
2457 ACC1
2458 PUSHACC6
2459 C_CALL2 array_unsafe_get
2461 PUSHACC3
2462 PUSHACC5
2463 APPLY2
2464 ASSIGN 2
2466 ACC1
2467 OFFSETINT 1
2469 ASSIGN 1
2471 ACC0
2472 PUSHACC2
2473 LEINT
2474 BRANCHIF 2456
2476 CONST0
2477 POP 2
2479 ACC0
2480 RETURN 4
2482 RESTART
2483 GRAB 1
2485 ACC1
2486 BRANCHIFNOT 2502
2488 ACC1
2489 GETFIELD0
2490 PUSHACC1
2491 PUSHENVACC1
2492 C_CALL3 array_unsafe_set
2494 ACC1
2495 GETFIELD1
2496 PUSHACC1
2497 OFFSETINT 1
2499 PUSHOFFSETCLOSURE0
2500 APPTERM2 4
2502 ENVACC1
2503 RETURN 2
2505 ACC0
2506 BRANCHIFNOT 2531
2508 ACC0
2509 GETFIELD1
2510 PUSHACC1
2511 GETFIELD0
2512 PUSHACC1
2513 PUSHGETGLOBALFIELD List, 0
2516 APPLY1
2517 OFFSETINT 1
2519 C_CALL2 make_vect
2521 PUSHACC0
2522 CLOSUREREC 1, 2483
2526 ACC2
2527 PUSHCONST1
2528 PUSHACC2
2529 APPTERM2 6
2531 ATOM0
2532 RETURN 1
2534 RESTART
2535 GRAB 1
2537 CONST0
2538 PUSHACC1
2539 LTINT
2540 BRANCHIFNOT 2545
2542 ACC1
2543 RETURN 2
2545 ACC1
2546 PUSHACC1
2547 PUSHENVACC1
2548 C_CALL2 array_unsafe_get
2550 MAKEBLOCK2 0
2552 PUSHACC1
2553 OFFSETINT -1
2555 PUSHOFFSETCLOSURE0
2556 APPTERM2 4
2558 ACC0
2559 CLOSUREREC 1, 2535
2563 CONST0
2564 PUSHACC2
2565 VECTLENGTH
2566 OFFSETINT -1
2568 PUSHACC2
2569 APPTERM2 4
2571 RESTART
2572 GRAB 1
2574 ACC1
2575 VECTLENGTH
2576 PUSHCONST0
2577 PUSHACC1
2578 EQ
2579 BRANCHIFNOT 2584
2581 ATOM0
2582 RETURN 3
2584 CONST0
2585 PUSHACC3
2586 C_CALL2 array_unsafe_get
2588 PUSHCONST0
2589 PUSHACC3
2590 APPLY2
2591 PUSHACC1
2592 C_CALL2 make_vect
2594 PUSHCONST1
2595 PUSHACC2
2596 OFFSETINT -1
2598 PUSH
2599 BRANCH 2618
2601 CHECK_SIGNALS
2602 ACC1
2603 PUSHACC6
2604 C_CALL2 array_unsafe_get
2606 PUSHACC2
2607 PUSHACC6
2608 APPLY2
2609 PUSHACC2
2610 PUSHACC4
2611 C_CALL3 array_unsafe_set
2613 ACC1
2614 OFFSETINT 1
2616 ASSIGN 1
2618 ACC0
2619 PUSHACC2
2620 LEINT
2621 BRANCHIF 2601
2623 CONST0
2624 POP 2
2626 ACC0
2627 RETURN 4
2629 RESTART
2630 GRAB 1
2632 CONST0
2633 PUSHACC2
2634 VECTLENGTH
2635 OFFSETINT -1
2637 PUSH
2638 BRANCH 2653
2640 CHECK_SIGNALS
2641 ACC1
2642 PUSHACC4
2643 C_CALL2 array_unsafe_get
2645 PUSHACC2
2646 PUSHACC4
2647 APPLY2
2648 ACC1
2649 OFFSETINT 1
2651 ASSIGN 1
2653 ACC0
2654 PUSHACC2
2655 LEINT
2656 BRANCHIF 2640
2658 CONST0
2659 RETURN 4
2661 RESTART
2662 GRAB 1
2664 ACC1
2665 VECTLENGTH
2666 PUSHCONST0
2667 PUSHACC1
2668 EQ
2669 BRANCHIFNOT 2674
2671 ATOM0
2672 RETURN 3
2674 CONST0
2675 PUSHACC3
2676 C_CALL2 array_unsafe_get
2678 PUSHACC2
2679 APPLY1
2680 PUSHACC1
2681 C_CALL2 make_vect
2683 PUSHCONST1
2684 PUSHACC2
2685 OFFSETINT -1
2687 PUSH
2688 BRANCH 2706
2690 CHECK_SIGNALS
2691 ACC1
2692 PUSHACC6
2693 C_CALL2 array_unsafe_get
2695 PUSHACC5
2696 APPLY1
2697 PUSHACC2
2698 PUSHACC4
2699 C_CALL3 array_unsafe_set
2701 ACC1
2702 OFFSETINT 1
2704 ASSIGN 1
2706 ACC0
2707 PUSHACC2
2708 LEINT
2709 BRANCHIF 2690
2711 CONST0
2712 POP 2
2714 ACC0
2715 RETURN 4
2717 RESTART
2718 GRAB 1
2720 CONST0
2721 PUSHACC2
2722 VECTLENGTH
2723 OFFSETINT -1
2725 PUSH
2726 BRANCH 2740
2728 CHECK_SIGNALS
2729 ACC1
2730 PUSHACC4
2731 C_CALL2 array_unsafe_get
2733 PUSHACC3
2734 APPLY1
2735 ACC1
2736 OFFSETINT 1
2738 ASSIGN 1
2740 ACC0
2741 PUSHACC2
2742 LEINT
2743 BRANCHIF 2728
2745 CONST0
2746 RETURN 4
2748 RESTART
2749 GRAB 4
2751 CONST0
2752 PUSHACC5
2753 LTINT
2754 BRANCHIF 2782
2756 CONST0
2757 PUSHACC2
2758 LTINT
2759 BRANCHIF 2782
2761 ACC0
2762 VECTLENGTH
2763 PUSHACC5
2764 PUSHACC3
2765 ADDINT
2766 GTINT
2767 BRANCHIF 2782
2769 CONST0
2770 PUSHACC4
2771 LTINT
2772 BRANCHIF 2782
2774 ACC2
2775 VECTLENGTH
2776 PUSHACC5
2777 PUSHACC5
2778 ADDINT
2779 GTINT
2780 BRANCHIFNOT 2789
2782 GETGLOBAL "Array.blit"
2784 PUSHGETGLOBALFIELD Pervasives, 2
2787 APPTERM1 6
2789 ACC3
2790 PUSHACC2
2791 LTINT
2792 BRANCHIFNOT 2827
2794 ACC4
2795 OFFSETINT -1
2797 PUSHCONST0
2798 PUSH
2799 BRANCH 2819
2801 CHECK_SIGNALS
2802 ACC1
2803 PUSHACC4
2804 ADDINT
2805 PUSHACC3
2806 C_CALL2 array_unsafe_get
2808 PUSHACC2
2809 PUSHACC7
2810 ADDINT
2811 PUSHACC6
2812 C_CALL3 array_unsafe_set
2814 ACC1
2815 OFFSETINT -1
2817 ASSIGN 1
2819 ACC0
2820 PUSHACC2
2821 GEINT
2822 BRANCHIF 2801
2824 CONST0
2825 RETURN 7
2827 CONST0
2828 PUSHACC5
2829 OFFSETINT -1
2831 PUSH
2832 BRANCH 2852
2834 CHECK_SIGNALS
2835 ACC1
2836 PUSHACC4
2837 ADDINT
2838 PUSHACC3
2839 C_CALL2 array_unsafe_get
2841 PUSHACC2
2842 PUSHACC7
2843 ADDINT
2844 PUSHACC6
2845 C_CALL3 array_unsafe_set
2847 ACC1
2848 OFFSETINT 1
2850 ASSIGN 1
2852 ACC0
2853 PUSHACC2
2854 LEINT
2855 BRANCHIF 2834
2857 CONST0
2858 RETURN 7
2860 RESTART
2861 GRAB 3
2863 CONST0
2864 PUSHACC2
2865 LTINT
2866 BRANCHIF 2881
2868 CONST0
2869 PUSHACC3
2870 LTINT
2871 BRANCHIF 2881
2873 ACC0
2874 VECTLENGTH
2875 PUSHACC3
2876 PUSHACC3
2877 ADDINT
2878 GTINT
2879 BRANCHIFNOT 2888
2881 GETGLOBAL "Array.fill"
2883 PUSHGETGLOBALFIELD Pervasives, 2
2886 APPTERM1 5
2888 ACC1
2889 PUSHACC3
2890 PUSHACC3
2891 ADDINT
2892 OFFSETINT -1
2894 PUSH
2895 BRANCH 2908
2897 CHECK_SIGNALS
2898 ACC5
2899 PUSHACC2
2900 PUSHACC4
2901 C_CALL3 array_unsafe_set
2903 ACC1
2904 OFFSETINT 1
2906 ASSIGN 1
2908 ACC0
2909 PUSHACC2
2910 LEINT
2911 BRANCHIF 2897
2913 CONST0
2914 RETURN 6
2916 RESTART
2917 GRAB 2
2919 CONST0
2920 PUSHACC2
2921 LTINT
2922 BRANCHIF 2937
2924 CONST0
2925 PUSHACC3
2926 LTINT
2927 BRANCHIF 2937
2929 ACC0
2930 VECTLENGTH
2931 PUSHACC3
2932 PUSHACC3
2933 ADDINT
2934 GTINT
2935 BRANCHIFNOT 2944
2937 GETGLOBAL "Array.sub"
2939 PUSHGETGLOBALFIELD Pervasives, 2
2942 APPTERM1 4
2944 CONST0
2945 PUSHACC3
2946 EQ
2947 BRANCHIFNOT 2952
2949 ATOM0
2950 RETURN 3
2952 ACC1
2953 PUSHACC1
2954 C_CALL2 array_unsafe_get
2956 PUSHACC3
2957 C_CALL2 make_vect
2959 PUSHCONST1
2960 PUSHACC4
2961 OFFSETINT -1
2963 PUSH
2964 BRANCH 2982
2966 CHECK_SIGNALS
2967 ACC1
2968 PUSHACC5
2969 ADDINT
2970 PUSHACC4
2971 C_CALL2 array_unsafe_get
2973 PUSHACC2
2974 PUSHACC4
2975 C_CALL3 array_unsafe_set
2977 ACC1
2978 OFFSETINT 1
2980 ASSIGN 1
2982 ACC0
2983 PUSHACC2
2984 LEINT
2985 BRANCHIF 2966
2987 CONST0
2988 POP 2
2990 ACC0
2991 RETURN 4
2993 ACC0
2994 BRANCHIFNOT 3017
2996 ACC0
2997 GETFIELD0
2998 PUSHCONST0
2999 PUSHACC1
3000 VECTLENGTH
3001 GTINT
3002 BRANCHIFNOT 3012
3004 ENVACC2
3005 PUSHCONST0
3006 PUSHACC2
3007 C_CALL2 array_unsafe_get
3009 PUSHENVACC1
3010 APPTERM2 4
3012 ACC1
3013 GETFIELD1
3014 PUSHOFFSETCLOSURE0
3015 APPTERM1 3
3017 ATOM0
3018 RETURN 1
3020 ACC0
3021 PUSHENVACC1
3022 CLOSUREREC 2, 2993
3026 ACC1
3027 PUSHACC1
3028 APPTERM1 3
3030 CONST0
3031 PUSHACC1
3032 VECTLENGTH
3033 OFFSETINT -1
3035 PUSH
3036 BRANCH 3056
3038 CHECK_SIGNALS
3039 ACC1
3040 PUSHACC3
3041 C_CALL2 array_unsafe_get
3043 PUSHENVACC2
3044 GETFIELD0
3045 PUSHENVACC1
3046 C_CALL3 array_unsafe_set
3048 ENVACC2
3049 OFFSETREF 1
3051 ACC1
3052 OFFSETINT 1
3054 ASSIGN 1
3056 ACC0
3057 PUSHACC2
3058 LEINT
3059 BRANCHIF 3038
3061 CONST0
3062 RETURN 3
3064 RESTART
3065 GRAB 1
3067 ACC1
3068 VECTLENGTH
3069 PUSHACC1
3070 ADDINT
3071 RETURN 2
3073 RESTART
3074 GRAB 1
3076 ACC1
3077 PUSHCONST0
3078 PUSH
3079 CLOSURE 0, 3065
3082 PUSHGETGLOBALFIELD List, 12
3085 APPLY3
3086 PUSHACC1
3087 PUSHACC1
3088 C_CALL2 make_vect
3090 PUSHCONST0
3091 MAKEBLOCK1 0
3093 PUSHACC4
3094 PUSHACC1
3095 PUSHACC3
3096 CLOSURE 2, 3030
3099 PUSHGETGLOBALFIELD List, 9
3102 APPLY2
3103 ACC1
3104 RETURN 5
3106 RESTART
3107 GRAB 1
3109 ACC0
3110 VECTLENGTH
3111 PUSHACC2
3112 VECTLENGTH
3113 PUSHCONST0
3114 PUSHACC2
3115 EQ
3116 BRANCHIFNOT 3126
3118 CONST0
3119 PUSHACC1
3120 EQ
3121 BRANCHIFNOT 3126
3123 ATOM0
3124 RETURN 4
3126 CONST0
3127 PUSHCONST0
3128 PUSHACC3
3129 GTINT
3130 BRANCHIFNOT 3135
3132 ACC3
3133 BRANCH 3136
3135 ACC4
3136 C_CALL2 array_unsafe_get
3138 PUSHACC1
3139 PUSHACC3
3140 ADDINT
3141 C_CALL2 make_vect
3143 PUSHCONST0
3144 PUSHACC3
3145 OFFSETINT -1
3147 PUSH
3148 BRANCH 3164
3150 CHECK_SIGNALS
3151 ACC1
3152 PUSHACC6
3153 C_CALL2 array_unsafe_get
3155 PUSHACC2
3156 PUSHACC4
3157 C_CALL3 array_unsafe_set
3159 ACC1
3160 OFFSETINT 1
3162 ASSIGN 1
3164 ACC0
3165 PUSHACC2
3166 LEINT
3167 BRANCHIF 3150
3169 CONST0
3170 POP 2
3172 CONST0
3173 PUSHACC2
3174 OFFSETINT -1
3176 PUSH
3177 BRANCH 3195
3179 CHECK_SIGNALS
3180 ACC1
3181 PUSHACC7
3182 C_CALL2 array_unsafe_get
3184 PUSHACC5
3185 PUSHACC3
3186 ADDINT
3187 PUSHACC4
3188 C_CALL3 array_unsafe_set
3190 ACC1
3191 OFFSETINT 1
3193 ASSIGN 1
3195 ACC0
3196 PUSHACC2
3197 LEINT
3198 BRANCHIF 3179
3200 CONST0
3201 POP 2
3203 ACC0
3204 RETURN 5
3206 ACC0
3207 VECTLENGTH
3208 PUSHCONST0
3209 PUSHACC1
3210 EQ
3211 BRANCHIFNOT 3216
3213 ATOM0
3214 RETURN 2
3216 CONST0
3217 PUSHACC2
3218 C_CALL2 array_unsafe_get
3220 PUSHACC1
3221 C_CALL2 make_vect
3223 PUSHCONST1
3224 PUSHACC2
3225 OFFSETINT -1
3227 PUSH
3228 BRANCH 3244
3230 CHECK_SIGNALS
3231 ACC1
3232 PUSHACC5
3233 C_CALL2 array_unsafe_get
3235 PUSHACC2
3236 PUSHACC4
3237 C_CALL3 array_unsafe_set
3239 ACC1
3240 OFFSETINT 1
3242 ASSIGN 1
3244 ACC0
3245 PUSHACC2
3246 LEINT
3247 BRANCHIF 3230
3249 CONST0
3250 POP 2
3252 ACC0
3253 RETURN 3
3255 RESTART
3256 GRAB 2
3258 ATOM0
3259 PUSHACC1
3260 C_CALL2 make_vect
3262 PUSHCONST0
3263 PUSHACC2
3264 OFFSETINT -1
3266 PUSH
3267 BRANCH 3282
3269 CHECK_SIGNALS
3270 ACC5
3271 PUSHACC5
3272 C_CALL2 make_vect
3274 PUSHACC2
3275 PUSHACC4
3276 SETVECTITEM
3277 ACC1
3278 OFFSETINT 1
3280 ASSIGN 1
3282 ACC0
3283 PUSHACC2
3284 LEINT
3285 BRANCHIF 3269
3287 CONST0
3288 POP 2
3290 ACC0
3291 RETURN 4
3293 RESTART
3294 GRAB 1
3296 CONST0
3297 PUSHACC1
3298 EQ
3299 BRANCHIFNOT 3304
3301 ATOM0
3302 RETURN 2
3304 CONST0
3305 PUSHACC2
3306 APPLY1
3307 PUSHACC1
3308 C_CALL2 make_vect
3310 PUSHCONST1
3311 PUSHACC2
3312 OFFSETINT -1
3314 PUSH
3315 BRANCH 3330
3317 CHECK_SIGNALS
3318 ACC1
3319 PUSHACC5
3320 APPLY1
3321 PUSHACC2
3322 PUSHACC4
3323 C_CALL3 array_unsafe_set
3325 ACC1
3326 OFFSETINT 1
3328 ASSIGN 1
3330 ACC0
3331 PUSHACC2
3332 LEINT
3333 BRANCHIF 3317
3335 CONST0
3336 POP 2
3338 ACC0
3339 RETURN 3
3341 CLOSURE 0, 3294
3344 PUSH
3345 CLOSURE 0, 3256
3348 PUSH
3349 CLOSURE 0, 3206
3352 PUSH
3353 CLOSURE 0, 3107
3356 PUSH
3357 CLOSURE 0, 3074
3360 PUSHACC0
3361 CLOSURE 1, 3020
3364 PUSH
3365 CLOSURE 0, 2917
3368 PUSH
3369 CLOSURE 0, 2861
3372 PUSH
3373 CLOSURE 0, 2749
3376 PUSH
3377 CLOSURE 0, 2718
3380 PUSH
3381 CLOSURE 0, 2662
3384 PUSH
3385 CLOSURE 0, 2630
3388 PUSH
3389 CLOSURE 0, 2572
3392 PUSH
3393 CLOSURE 0, 2558
3396 PUSH
3397 CLOSURE 0, 2505
3400 PUSH
3401 CLOSURE 0, 2445
3404 PUSH
3405 CLOSURE 0, 2407
3408 PUSHACC0
3409 PUSHACC2
3410 PUSHACC6
3411 PUSHACC 8
3413 PUSHACC 10
3415 PUSHACC 12
3417 PUSHACC 8
3419 PUSHACC 10
3421 PUSHACC 16
3423 PUSHACC 18
3425 PUSHACC 24
3427 PUSHACC 21
3429 PUSHACC 23
3431 PUSHACC 26
3433 PUSHACC 29
3435 PUSHACC 30
3437 PUSHACC 32
3439 MAKEBLOCK 17, 0
3442 POP 17
3444 SETGLOBAL Array
3446 BRANCH 3456
3448 ACC0
3449 PUSHENVACC1
3450 GETFIELD0
3451 ADDINT
3452 PUSHENVACC1
3453 SETFIELD0
3454 RETURN 1
3456 CONST2
3457 PUSHCONSTINT 200000
3459 C_CALL2 make_vect
3461 PUSHCONST0
3462 MAKEBLOCK1 0
3464 PUSHACC1
3465 PUSHACC1
3466 CLOSURE 1, 3448
3469 PUSHGETGLOBALFIELD Array, 11
3472 APPLY2
3473 CONSTINT 400000
3475 PUSHACC1
3476 GETFIELD0
3477 NEQ
3478 BRANCHIFNOT 3485
3480 GETGLOBAL Not_found
3482 MAKEBLOCK1 0
3484 RAISE
3485 POP 2
3487 ATOM0
3488 SETGLOBAL T310-alloc-2
3490 STOP
**)
| |
1cf9b2d829748b54394c0f5b339e6045062e4361a4e47497b0a749dd7a5946c7 | avsm/platform | element.ml | TyXML
*
* Copyright ( C ) 2016
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Suite 500 , Boston , MA 02111 - 1307 , USA .
*
* Copyright (C) 2016 Anton Bachin
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02111-1307, USA.
*)
let parse
~loc ~parent_lang
~name:((ns, name) as element_name) ~attributes children =
let attributes = Attributes.parse loc element_name attributes in
let lang, (module Reflected) = Namespace.reflect loc ns in
let lang = match parent_lang, lang with
| Common.Html, Svg -> Common.Html
| Html, Html | Svg, Svg -> lang
| Svg, Html ->
Common.error loc
"Nesting of Html element inside svg element is not supported"
in
let name =
try List.assoc name Reflected.renamed_elements
with Not_found -> Tyxml_name.ident name
in
let element_function = Common.make ~loc lang name in
let assembler =
try List.assoc name Reflected.element_assemblers
with Not_found ->
Common.error loc "Unknown %s element %s" (Common.lang lang) name
in
let children = assembler ~lang ~loc ~name children in
Ast_helper.Exp.apply ~loc element_function (attributes @ children)
let comment ~loc ~lang s =
let tot = Common.make ~loc lang "tot" in
let comment = Common.make ~loc lang "Xml.comment" in
let s = Common.string loc s in
(* Using metaquot here avoids fiddling with labels. *)
[%expr [%e tot] ([%e comment] [%e s])][@metaloc loc]
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/tyxml.4.3.0/ppx/element.ml | ocaml | Using metaquot here avoids fiddling with labels. | TyXML
*
* Copyright ( C ) 2016
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Suite 500 , Boston , MA 02111 - 1307 , USA .
*
* Copyright (C) 2016 Anton Bachin
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02111-1307, USA.
*)
let parse
~loc ~parent_lang
~name:((ns, name) as element_name) ~attributes children =
let attributes = Attributes.parse loc element_name attributes in
let lang, (module Reflected) = Namespace.reflect loc ns in
let lang = match parent_lang, lang with
| Common.Html, Svg -> Common.Html
| Html, Html | Svg, Svg -> lang
| Svg, Html ->
Common.error loc
"Nesting of Html element inside svg element is not supported"
in
let name =
try List.assoc name Reflected.renamed_elements
with Not_found -> Tyxml_name.ident name
in
let element_function = Common.make ~loc lang name in
let assembler =
try List.assoc name Reflected.element_assemblers
with Not_found ->
Common.error loc "Unknown %s element %s" (Common.lang lang) name
in
let children = assembler ~lang ~loc ~name children in
Ast_helper.Exp.apply ~loc element_function (attributes @ children)
let comment ~loc ~lang s =
let tot = Common.make ~loc lang "tot" in
let comment = Common.make ~loc lang "Xml.comment" in
let s = Common.string loc s in
[%expr [%e tot] ([%e comment] [%e s])][@metaloc loc]
|
2e33c884a917901a4fc5ba9c7d8dcdac1b6701a9ba042efe4df7b720926e6f74 | ucsd-progsys/dsolve | heap.mli | (**************************************************************************)
(* *)
Copyright ( C )
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
(* Heaps *)
module type Ordered = sig
type t
val compare : t -> t -> int
end
exception EmptyHeap
S Imperative implementation .
module Imperative(X: Ordered) : sig
(* Type of imperative heaps.
(In the following [n] refers to the number of elements in the heap) *)
type t
(* [create c] creates a new heap, with initial capacity of [c] *)
val create : int -> t
(* [is_empty h] checks the emptiness of [h] *)
val is_empty : t -> bool
(* [add x h] adds a new element [x] in heap [h]; size of [h] is doubled
when maximum capacity is reached; complexity $O(log(n))$ *)
val add : t -> X.t -> unit
(* [maximum h] returns the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(1)$ *)
val maximum : t -> X.t
(* [remove h] removes the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(log(n))$ *)
val remove : t -> unit
(* [pop_maximum h] removes the maximum element of [h] and returns it;
raises [EmptyHeap] when [h] is empty; complexity $O(log(n))$ *)
val pop_maximum : t -> X.t
(* usual iterators and combinators; elements are presented in
arbitrary order *)
val iter : (X.t -> unit) -> t -> unit
val fold : (X.t -> 'a -> 'a) -> t -> 'a -> 'a
end
(*S Functional implementation. *)
module type FunctionalSig = sig
(* heap elements *)
type elt
(* Type of functional heaps *)
type t
(* The empty heap *)
val empty : t
(* [add x h] returns a new heap containing the elements of [h], plus [x];
complexity $O(log(n))$ *)
val add : elt -> t -> t
(* [maximum h] returns the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(1)$ *)
val maximum : t -> elt
(* [remove h] returns a new heap containing the elements of [h], except
the maximum of [h]; raises [EmptyHeap] when [h] is empty;
complexity $O(log(n))$ *)
val remove : t -> t
(* usual iterators and combinators; elements are presented in
arbitrary order *)
val iter : (elt -> unit) -> t -> unit
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
end
module Functional(X: Ordered) : FunctionalSig with type elt = X.t
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/utils/heap.mli | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
Heaps
Type of imperative heaps.
(In the following [n] refers to the number of elements in the heap)
[create c] creates a new heap, with initial capacity of [c]
[is_empty h] checks the emptiness of [h]
[add x h] adds a new element [x] in heap [h]; size of [h] is doubled
when maximum capacity is reached; complexity $O(log(n))$
[maximum h] returns the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(1)$
[remove h] removes the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(log(n))$
[pop_maximum h] removes the maximum element of [h] and returns it;
raises [EmptyHeap] when [h] is empty; complexity $O(log(n))$
usual iterators and combinators; elements are presented in
arbitrary order
S Functional implementation.
heap elements
Type of functional heaps
The empty heap
[add x h] returns a new heap containing the elements of [h], plus [x];
complexity $O(log(n))$
[maximum h] returns the maximum element of [h]; raises [EmptyHeap]
when [h] is empty; complexity $O(1)$
[remove h] returns a new heap containing the elements of [h], except
the maximum of [h]; raises [EmptyHeap] when [h] is empty;
complexity $O(log(n))$
usual iterators and combinators; elements are presented in
arbitrary order | Copyright ( C )
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
module type Ordered = sig
type t
val compare : t -> t -> int
end
exception EmptyHeap
S Imperative implementation .
module Imperative(X: Ordered) : sig
type t
val create : int -> t
val is_empty : t -> bool
val add : t -> X.t -> unit
val maximum : t -> X.t
val remove : t -> unit
val pop_maximum : t -> X.t
val iter : (X.t -> unit) -> t -> unit
val fold : (X.t -> 'a -> 'a) -> t -> 'a -> 'a
end
module type FunctionalSig = sig
type elt
type t
val empty : t
val add : elt -> t -> t
val maximum : t -> elt
val remove : t -> t
val iter : (elt -> unit) -> t -> unit
val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a
end
module Functional(X: Ordered) : FunctionalSig with type elt = X.t
|
9e88dfdb1f7ab066ffd9eccfac0136536b6da5b8437fc188ca0d4277de1166c2 | spawngrid/htoad | erlydtl_runtime.erl | -module(erlydtl_runtime).
-compile(export_all).
-define(IFCHANGED_CONTEXT_VARIABLE, erlydtl_ifchanged_context).
find_value(_, undefined) ->
undefined;
find_value(Key, Fun) when is_function(Fun, 1) ->
Fun(Key);
find_value(Key, L) when is_list(L) ->
case proplists:get_value(Key, L) of
undefined ->
case proplists:get_value(atom_to_list(Key), L) of
undefined ->
proplists:get_value(list_to_binary(atom_to_list(Key)), L);
Val -> Val
end;
Val -> Val
end;
find_value(Key, {GBSize, GBData}) when is_integer(GBSize) ->
case gb_trees:lookup(Key, {GBSize, GBData}) of
{value, Val} ->
Val;
_ ->
undefined
end;
find_value(Key, Tuple) when is_tuple(Tuple) ->
Module = element(1, Tuple),
case Module of
dict ->
case dict:find(Key, Tuple) of
{ok, Val} ->
Val;
_ ->
undefined
end;
Module ->
case lists:member({Key, 1}, Module:module_info(exports)) of
true ->
Tuple:Key();
_ ->
undefined
end
end.
fetch_value(Key, Data) ->
case find_value(Key, Data) of
undefined ->
throw({undefined_variable, Key});
Val ->
Val
end.
regroup(List, Attribute) ->
regroup(List, Attribute, []).
regroup([], _, []) ->
[];
regroup([], _, [[{grouper, LastGrouper}, {list, LastList}]|Acc]) ->
lists:reverse([[{grouper, LastGrouper}, {list, lists:reverse(LastList)}]|Acc]);
regroup([Item|Rest], Attribute, []) ->
regroup(Rest, Attribute, [[{grouper, find_value(Attribute, Item)}, {list, [Item]}]]);
regroup([Item|Rest], Attribute, [[{grouper, PrevGrouper}, {list, PrevList}]|Acc]) ->
case find_value(Attribute, Item) of
Value when Value =:= PrevGrouper ->
regroup(Rest, Attribute, [[{grouper, PrevGrouper}, {list, [Item|PrevList]}]|Acc]);
Value ->
regroup(Rest, Attribute, [[{grouper, Value}, {list, [Item]}], [{grouper, PrevGrouper}, {list, lists:reverse(PrevList)}]|Acc])
end.
translate(_, none, Default) ->
Default;
translate(String, TranslationFun, Default) when is_function(TranslationFun) ->
case TranslationFun(String) of
undefined -> Default;
<<"">> -> Default;
"" -> Default;
Str -> Str
end.
are_equal(Arg1, Arg2) when Arg1 =:= Arg2 ->
true;
are_equal(Arg1, Arg2) when is_binary(Arg1) ->
are_equal(binary_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_binary(Arg2) ->
are_equal(Arg1, binary_to_list(Arg2));
are_equal(Arg1, Arg2) when is_integer(Arg1) ->
are_equal(integer_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_integer(Arg2) ->
are_equal(Arg1, integer_to_list(Arg2));
are_equal(Arg1, Arg2) when is_atom(Arg1), is_list(Arg2) ->
are_equal(atom_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_list(Arg1), is_atom(Arg2) ->
are_equal(Arg1, atom_to_list(Arg2));
are_equal(_, _) ->
false.
is_false("") ->
true;
is_false(false) ->
true;
is_false(undefined) ->
true;
is_false("0") ->
true;
is_false(<<"0">>) ->
true;
is_false(<<>>) ->
true;
is_false(_) ->
false.
is_true(V) ->
not is_false(V).
'in'(Sublist, [Sublist|_]) ->
true;
'in'(Sublist, List) when is_atom(List) ->
'in'(Sublist, atom_to_list(List));
'in'(Sublist, List) when is_binary(Sublist) ->
'in'(binary_to_list(Sublist), List);
'in'(Sublist, List) when is_binary(List) ->
'in'(Sublist, binary_to_list(List));
'in'(Sublist, [C|Rest]) when is_list(Sublist) andalso is_binary(C) ->
'in'(Sublist, [binary_to_list(C)|Rest]);
'in'(Sublist, [C|Rest]) when is_list(Sublist) andalso is_list(C) ->
'in'(Sublist, Rest);
'in'(Sublist, List) when is_list(Sublist) andalso is_list(List) ->
string:str(List, Sublist) > 0;
'in'(Element, List) when is_list(List) ->
lists:member(Element, List);
'in'(_, _) ->
false.
'not'(Value) ->
not is_true(Value).
'or'(Value1, Value2) ->
is_true(Value1) or is_true(Value2).
'and'(Value1, Value2) ->
is_true(Value1) and is_true(Value2).
'eq'(Value1, Value2) ->
are_equal(Value1, Value2).
'ne'(Value1, Value2) ->
not are_equal(Value1, Value2).
'le'(Value1, Value2) ->
not 'gt'(Value1, Value2).
'ge'(Value1, Value2) ->
not 'lt'(Value1, Value2).
'gt'(Value1, Value2) when is_list(Value1) ->
'gt'(list_to_integer(Value1), Value2);
'gt'(Value1, Value2) when is_list(Value2) ->
'gt'(Value1, list_to_integer(Value2));
'gt'(Value1, Value2) when Value1 > Value2 ->
true;
'gt'(_, _) ->
false.
'lt'(Value1, Value2) when is_list(Value1) ->
'lt'(list_to_integer(Value1), Value2);
'lt'(Value1, Value2) when is_list(Value2) ->
'lt'(Value1, list_to_integer(Value2));
'lt'(Value1, Value2) when Value1 < Value2 ->
true;
'lt'(_, _) ->
false.
stringify_final(In, BinaryStrings) ->
stringify_final(In, [], BinaryStrings).
stringify_final([], Out, _) ->
lists:reverse(Out);
stringify_final([El | Rest], Out, false = BinaryStrings) when is_atom(El) ->
stringify_final(Rest, [atom_to_list(El) | Out], BinaryStrings);
stringify_final([El | Rest], Out, true = BinaryStrings) when is_atom(El) ->
stringify_final(Rest, [list_to_binary(atom_to_list(El)) | Out], BinaryStrings);
stringify_final([El | Rest], Out, BinaryStrings) when is_list(El) ->
stringify_final(Rest, [stringify_final(El, BinaryStrings) | Out], BinaryStrings);
stringify_final([El | Rest], Out, false = BinaryStrings) when is_tuple(El) ->
stringify_final(Rest, [io_lib:print(El) | Out], BinaryStrings);
stringify_final([El | Rest], Out, true = BinaryStrings) when is_tuple(El) ->
stringify_final(Rest, [list_to_binary(io_lib:print(El)) | Out], BinaryStrings);
stringify_final([El | Rest], Out, BinaryStrings) ->
stringify_final(Rest, [El | Out], BinaryStrings).
init_counter_stats(List) ->
init_counter_stats(List, undefined).
init_counter_stats(List, Parent) when is_list(List) ->
[{counter, 1},
{counter0, 0},
{revcounter, length(List)},
{revcounter0, length(List) - 1},
{first, true},
{last, length(List) =:= 1},
{parentloop, Parent}].
increment_counter_stats([{counter, Counter}, {counter0, Counter0}, {revcounter, RevCounter},
{revcounter0, RevCounter0}, {first, _}, {last, _}, {parentloop, Parent}]) ->
[{counter, Counter + 1},
{counter0, Counter0 + 1},
{revcounter, RevCounter - 1},
{revcounter0, RevCounter0 - 1},
{first, false}, {last, RevCounter0 =:= 1},
{parentloop, Parent}].
forloop(Fun, Acc0, Values) ->
push_ifchanged_context(),
Result = lists:mapfoldl(Fun, Acc0, Values),
pop_ifchanged_context(),
Result.
push_ifchanged_context() ->
IfChangedContextStack = case get(?IFCHANGED_CONTEXT_VARIABLE) of
undefined -> [];
Stack -> Stack
end,
put(?IFCHANGED_CONTEXT_VARIABLE, [[]|IfChangedContextStack]).
pop_ifchanged_context() ->
[_|Rest] = get(?IFCHANGED_CONTEXT_VARIABLE),
put(?IFCHANGED_CONTEXT_VARIABLE, Rest).
ifchanged(SourceText, EvaluatedText, AlternativeText) ->
[IfChangedContext|Rest] = get(?IFCHANGED_CONTEXT_VARIABLE),
PreviousText = proplists:get_value(SourceText, IfChangedContext),
if
PreviousText =:= EvaluatedText ->
AlternativeText;
true ->
NewContext = [{SourceText, EvaluatedText}|proplists:delete(SourceText, IfChangedContext)],
put(?IFCHANGED_CONTEXT_VARIABLE, [NewContext|Rest]),
EvaluatedText
end.
cycle(NamesTuple, Counters) when is_tuple(NamesTuple) ->
element(fetch_value(counter0, Counters) rem size(NamesTuple) + 1, NamesTuple).
widthratio(Numerator, Denominator, Scale) ->
round(Numerator / Denominator * Scale).
spaceless(Contents) ->
Contents1 = lists:flatten(Contents),
Contents2 = re:replace(Contents1, "^\s+<", "<", [{return,list}]),
Contents3 = re:replace(Contents2, ">\s+$", ">", [{return,list}]),
Contents4 = re:replace(Contents3, ">\s+<", "><", [global, {return,list}]),
Contents4.
read_file(Module, Function, DocRoot, FileName) ->
AbsName = case filename:absname(FileName) of
FileName -> FileName;
_ -> filename:join([DocRoot, FileName])
end,
{ok, Binary} = Module:Function(AbsName),
binary_to_list(Binary).
| null | https://raw.githubusercontent.com/spawngrid/htoad/f0c7dfbd911b29fb0c406b7c26606f553af11194/deps/erlydtl/src/erlydtl_runtime.erl | erlang | -module(erlydtl_runtime).
-compile(export_all).
-define(IFCHANGED_CONTEXT_VARIABLE, erlydtl_ifchanged_context).
find_value(_, undefined) ->
undefined;
find_value(Key, Fun) when is_function(Fun, 1) ->
Fun(Key);
find_value(Key, L) when is_list(L) ->
case proplists:get_value(Key, L) of
undefined ->
case proplists:get_value(atom_to_list(Key), L) of
undefined ->
proplists:get_value(list_to_binary(atom_to_list(Key)), L);
Val -> Val
end;
Val -> Val
end;
find_value(Key, {GBSize, GBData}) when is_integer(GBSize) ->
case gb_trees:lookup(Key, {GBSize, GBData}) of
{value, Val} ->
Val;
_ ->
undefined
end;
find_value(Key, Tuple) when is_tuple(Tuple) ->
Module = element(1, Tuple),
case Module of
dict ->
case dict:find(Key, Tuple) of
{ok, Val} ->
Val;
_ ->
undefined
end;
Module ->
case lists:member({Key, 1}, Module:module_info(exports)) of
true ->
Tuple:Key();
_ ->
undefined
end
end.
fetch_value(Key, Data) ->
case find_value(Key, Data) of
undefined ->
throw({undefined_variable, Key});
Val ->
Val
end.
regroup(List, Attribute) ->
regroup(List, Attribute, []).
regroup([], _, []) ->
[];
regroup([], _, [[{grouper, LastGrouper}, {list, LastList}]|Acc]) ->
lists:reverse([[{grouper, LastGrouper}, {list, lists:reverse(LastList)}]|Acc]);
regroup([Item|Rest], Attribute, []) ->
regroup(Rest, Attribute, [[{grouper, find_value(Attribute, Item)}, {list, [Item]}]]);
regroup([Item|Rest], Attribute, [[{grouper, PrevGrouper}, {list, PrevList}]|Acc]) ->
case find_value(Attribute, Item) of
Value when Value =:= PrevGrouper ->
regroup(Rest, Attribute, [[{grouper, PrevGrouper}, {list, [Item|PrevList]}]|Acc]);
Value ->
regroup(Rest, Attribute, [[{grouper, Value}, {list, [Item]}], [{grouper, PrevGrouper}, {list, lists:reverse(PrevList)}]|Acc])
end.
translate(_, none, Default) ->
Default;
translate(String, TranslationFun, Default) when is_function(TranslationFun) ->
case TranslationFun(String) of
undefined -> Default;
<<"">> -> Default;
"" -> Default;
Str -> Str
end.
are_equal(Arg1, Arg2) when Arg1 =:= Arg2 ->
true;
are_equal(Arg1, Arg2) when is_binary(Arg1) ->
are_equal(binary_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_binary(Arg2) ->
are_equal(Arg1, binary_to_list(Arg2));
are_equal(Arg1, Arg2) when is_integer(Arg1) ->
are_equal(integer_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_integer(Arg2) ->
are_equal(Arg1, integer_to_list(Arg2));
are_equal(Arg1, Arg2) when is_atom(Arg1), is_list(Arg2) ->
are_equal(atom_to_list(Arg1), Arg2);
are_equal(Arg1, Arg2) when is_list(Arg1), is_atom(Arg2) ->
are_equal(Arg1, atom_to_list(Arg2));
are_equal(_, _) ->
false.
is_false("") ->
true;
is_false(false) ->
true;
is_false(undefined) ->
true;
is_false("0") ->
true;
is_false(<<"0">>) ->
true;
is_false(<<>>) ->
true;
is_false(_) ->
false.
is_true(V) ->
not is_false(V).
'in'(Sublist, [Sublist|_]) ->
true;
'in'(Sublist, List) when is_atom(List) ->
'in'(Sublist, atom_to_list(List));
'in'(Sublist, List) when is_binary(Sublist) ->
'in'(binary_to_list(Sublist), List);
'in'(Sublist, List) when is_binary(List) ->
'in'(Sublist, binary_to_list(List));
'in'(Sublist, [C|Rest]) when is_list(Sublist) andalso is_binary(C) ->
'in'(Sublist, [binary_to_list(C)|Rest]);
'in'(Sublist, [C|Rest]) when is_list(Sublist) andalso is_list(C) ->
'in'(Sublist, Rest);
'in'(Sublist, List) when is_list(Sublist) andalso is_list(List) ->
string:str(List, Sublist) > 0;
'in'(Element, List) when is_list(List) ->
lists:member(Element, List);
'in'(_, _) ->
false.
'not'(Value) ->
not is_true(Value).
'or'(Value1, Value2) ->
is_true(Value1) or is_true(Value2).
'and'(Value1, Value2) ->
is_true(Value1) and is_true(Value2).
'eq'(Value1, Value2) ->
are_equal(Value1, Value2).
'ne'(Value1, Value2) ->
not are_equal(Value1, Value2).
'le'(Value1, Value2) ->
not 'gt'(Value1, Value2).
'ge'(Value1, Value2) ->
not 'lt'(Value1, Value2).
'gt'(Value1, Value2) when is_list(Value1) ->
'gt'(list_to_integer(Value1), Value2);
'gt'(Value1, Value2) when is_list(Value2) ->
'gt'(Value1, list_to_integer(Value2));
'gt'(Value1, Value2) when Value1 > Value2 ->
true;
'gt'(_, _) ->
false.
'lt'(Value1, Value2) when is_list(Value1) ->
'lt'(list_to_integer(Value1), Value2);
'lt'(Value1, Value2) when is_list(Value2) ->
'lt'(Value1, list_to_integer(Value2));
'lt'(Value1, Value2) when Value1 < Value2 ->
true;
'lt'(_, _) ->
false.
stringify_final(In, BinaryStrings) ->
stringify_final(In, [], BinaryStrings).
stringify_final([], Out, _) ->
lists:reverse(Out);
stringify_final([El | Rest], Out, false = BinaryStrings) when is_atom(El) ->
stringify_final(Rest, [atom_to_list(El) | Out], BinaryStrings);
stringify_final([El | Rest], Out, true = BinaryStrings) when is_atom(El) ->
stringify_final(Rest, [list_to_binary(atom_to_list(El)) | Out], BinaryStrings);
stringify_final([El | Rest], Out, BinaryStrings) when is_list(El) ->
stringify_final(Rest, [stringify_final(El, BinaryStrings) | Out], BinaryStrings);
stringify_final([El | Rest], Out, false = BinaryStrings) when is_tuple(El) ->
stringify_final(Rest, [io_lib:print(El) | Out], BinaryStrings);
stringify_final([El | Rest], Out, true = BinaryStrings) when is_tuple(El) ->
stringify_final(Rest, [list_to_binary(io_lib:print(El)) | Out], BinaryStrings);
stringify_final([El | Rest], Out, BinaryStrings) ->
stringify_final(Rest, [El | Out], BinaryStrings).
init_counter_stats(List) ->
init_counter_stats(List, undefined).
init_counter_stats(List, Parent) when is_list(List) ->
[{counter, 1},
{counter0, 0},
{revcounter, length(List)},
{revcounter0, length(List) - 1},
{first, true},
{last, length(List) =:= 1},
{parentloop, Parent}].
increment_counter_stats([{counter, Counter}, {counter0, Counter0}, {revcounter, RevCounter},
{revcounter0, RevCounter0}, {first, _}, {last, _}, {parentloop, Parent}]) ->
[{counter, Counter + 1},
{counter0, Counter0 + 1},
{revcounter, RevCounter - 1},
{revcounter0, RevCounter0 - 1},
{first, false}, {last, RevCounter0 =:= 1},
{parentloop, Parent}].
forloop(Fun, Acc0, Values) ->
push_ifchanged_context(),
Result = lists:mapfoldl(Fun, Acc0, Values),
pop_ifchanged_context(),
Result.
push_ifchanged_context() ->
IfChangedContextStack = case get(?IFCHANGED_CONTEXT_VARIABLE) of
undefined -> [];
Stack -> Stack
end,
put(?IFCHANGED_CONTEXT_VARIABLE, [[]|IfChangedContextStack]).
pop_ifchanged_context() ->
[_|Rest] = get(?IFCHANGED_CONTEXT_VARIABLE),
put(?IFCHANGED_CONTEXT_VARIABLE, Rest).
ifchanged(SourceText, EvaluatedText, AlternativeText) ->
[IfChangedContext|Rest] = get(?IFCHANGED_CONTEXT_VARIABLE),
PreviousText = proplists:get_value(SourceText, IfChangedContext),
if
PreviousText =:= EvaluatedText ->
AlternativeText;
true ->
NewContext = [{SourceText, EvaluatedText}|proplists:delete(SourceText, IfChangedContext)],
put(?IFCHANGED_CONTEXT_VARIABLE, [NewContext|Rest]),
EvaluatedText
end.
cycle(NamesTuple, Counters) when is_tuple(NamesTuple) ->
element(fetch_value(counter0, Counters) rem size(NamesTuple) + 1, NamesTuple).
widthratio(Numerator, Denominator, Scale) ->
round(Numerator / Denominator * Scale).
spaceless(Contents) ->
Contents1 = lists:flatten(Contents),
Contents2 = re:replace(Contents1, "^\s+<", "<", [{return,list}]),
Contents3 = re:replace(Contents2, ">\s+$", ">", [{return,list}]),
Contents4 = re:replace(Contents3, ">\s+<", "><", [global, {return,list}]),
Contents4.
read_file(Module, Function, DocRoot, FileName) ->
AbsName = case filename:absname(FileName) of
FileName -> FileName;
_ -> filename:join([DocRoot, FileName])
end,
{ok, Binary} = Module:Function(AbsName),
binary_to_list(Binary).
| |
3a1128ef1b78dd0721b0949427ce4b1734aedd2ca88c7711d0e03af2dc4465a1 | mbenelli/klio | charsets#.scm | ;; charsets#.scm - Srfi-14 char-set library
;;
Copyright ( c ) 2010 by < >
All Rights Reserved .
(namespace
("charsets#"
; predicates & comparison
char-set?
char-set=
char-set<=
char-set-hash
; iterating over character sets
char-set-cursor
char-set-ref
char-set-cursor-next
end-of-char-set?
char-set-fold
char-set-unfold
char-set-unfold!
char-set-for-each
char-set-map
; creating character sets
char-set-copy
char-set
list->char-set
list->char-set!
string->char-set
string->char-set!
char-set-filter
char-set-filter!
ucs-range->char-set
ucs-range->char-set!
; querying character sets
char-set->list
char-set->string
char-set-size
char-set-count
char-set-contains?
char-set-every
char-set-any
; character-set algebra
char-set-adjoin
char-set-adjoin!
char-set-delete
char-set-delete!
char-set-complement
char-set-complement!
char-set-union!
char-set-union
char-set-intersection
char-set-intersection!
char-set-difference
char-set-difference!
char-set-xor
char-set-xor!
char-set-diff+intersection
char-set-diff+intersection!
; standard character sets
char-set:empty
char-set:full
char-set:lower-case
char-set:upper-case
char-set:title-case
char-set:letter
char-set:digit
char-set:hex-digit
char-set:letter+digit
char-set:punctuation
char-set:symbol
char-set:graphic
char-set:whitespace
char-set:printing
char-set:blank
char-set:iso-control
char-set:ascii
))
| null | https://raw.githubusercontent.com/mbenelli/klio/33c11700d6080de44a22a27a5147f97899583f6e/klio/charsets%23.scm | scheme | charsets#.scm - Srfi-14 char-set library
predicates & comparison
iterating over character sets
creating character sets
querying character sets
character-set algebra
standard character sets | Copyright ( c ) 2010 by < >
All Rights Reserved .
(namespace
("charsets#"
char-set?
char-set=
char-set<=
char-set-hash
char-set-cursor
char-set-ref
char-set-cursor-next
end-of-char-set?
char-set-fold
char-set-unfold
char-set-unfold!
char-set-for-each
char-set-map
char-set-copy
char-set
list->char-set
list->char-set!
string->char-set
string->char-set!
char-set-filter
char-set-filter!
ucs-range->char-set
ucs-range->char-set!
char-set->list
char-set->string
char-set-size
char-set-count
char-set-contains?
char-set-every
char-set-any
char-set-adjoin
char-set-adjoin!
char-set-delete
char-set-delete!
char-set-complement
char-set-complement!
char-set-union!
char-set-union
char-set-intersection
char-set-intersection!
char-set-difference
char-set-difference!
char-set-xor
char-set-xor!
char-set-diff+intersection
char-set-diff+intersection!
char-set:empty
char-set:full
char-set:lower-case
char-set:upper-case
char-set:title-case
char-set:letter
char-set:digit
char-set:hex-digit
char-set:letter+digit
char-set:punctuation
char-set:symbol
char-set:graphic
char-set:whitespace
char-set:printing
char-set:blank
char-set:iso-control
char-set:ascii
))
|
954e33cece4b76cfb31f4e82d1fd3141b860149d7accf181e246b866fb5a37df | jaydchan/tawny-karyotype | karyotype_test.clj | The contents of this file are subject to the LGPL License , Version 3.0 .
Copyright ( C ) 2013 , Newcastle University
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program . If not , see /.
(ns ncl.karyotype.karyotype_test
(:use [clojure.test])
(:require
[ncl.karyotype.karyotype :as k]
[tawny.owl :as o]
[tawny.reasoner :as r]))
(defn ontology-reasoner-fixture [tests]
(r/reasoner-factory :hermit)
(o/ontology-to-namespace k/karyotype)
(binding [r/*reasoner-progress-monitor*
(atom
r/reasoner-progress-monitor-silent)]
(tests)))
(use-fixtures :once ontology-reasoner-fixture)
(deftest Basic
(is (r/consistent?))
(is (r/coherent?)))
| null | https://raw.githubusercontent.com/jaydchan/tawny-karyotype/3a875e5ef7de9372aee70d9e6cc8c22fc5187544/test/ncl/karyotype/karyotype_test.clj | clojure | This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details. | The contents of this file are subject to the LGPL License , Version 3.0 .
Copyright ( C ) 2013 , Newcastle University
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
along with this program . If not , see /.
(ns ncl.karyotype.karyotype_test
(:use [clojure.test])
(:require
[ncl.karyotype.karyotype :as k]
[tawny.owl :as o]
[tawny.reasoner :as r]))
(defn ontology-reasoner-fixture [tests]
(r/reasoner-factory :hermit)
(o/ontology-to-namespace k/karyotype)
(binding [r/*reasoner-progress-monitor*
(atom
r/reasoner-progress-monitor-silent)]
(tests)))
(use-fixtures :once ontology-reasoner-fixture)
(deftest Basic
(is (r/consistent?))
(is (r/coherent?)))
|
367c60943d0ae57a02ce15e7aae29a81655a067addc9283075239aabc6008992 | haroldcarr/learn-haskell-coq-ml-etc | philosophers.hs | -- generated by HCPN NetEdit v0.0
module Unnamed where
import SimpleHCPN
import GuiHCPN
import List (intersperse)
-- declarations
-- markings
data Mark = Mark {
phil1_ready :: [()]
, phil1_has_both :: [()]
, phil1_has_right :: [()]
, fork2 :: [()]
, fork1 :: [()]
, fork3 :: [()]
, phil3_ready :: [()]
, phil3_has_both :: [()]
, phil3_has_right :: [()]
, phil2_ready :: [()]
, phil2_has_both :: [()]
, phil2_has_right :: [()]
} deriving Show
-- transition actions
phil1_finished :: Mark -> [Mark]
phil1_finished m =
do
let phil1_has_both_marking = phil1_has_both m
let phil1_ready_marking = phil1_ready m
let fork1_marking = fork1 m
let fork2_marking = fork2 m
((), phil1_has_both_marking) <- select $ phil1_has_both_marking
if True
then return m{
phil1_has_both = phil1_has_both_marking
, phil1_ready = (()) : phil1_ready_marking
, fork1 = (()) : fork1_marking
, fork2 = (()) : fork2_marking
}
else fail "guard failed"
phil1_take_left :: Mark -> [Mark]
phil1_take_left m =
do
let fork2_marking = fork2 m
let phil1_has_right_marking = phil1_has_right m
let phil1_has_both_marking = phil1_has_both m
((), fork2_marking) <- select $ fork2_marking
((), phil1_has_right_marking) <- select $ phil1_has_right_marking
if True
then return m{
fork2 = fork2_marking
, phil1_has_right = phil1_has_right_marking
, phil1_has_both = (()) : phil1_has_both_marking
}
else fail "guard failed"
phil1_take_right :: Mark -> [Mark]
phil1_take_right m =
do
let fork1_marking = fork1 m
let phil1_ready_marking = phil1_ready m
let phil1_has_right_marking = phil1_has_right m
((), fork1_marking) <- select $ fork1_marking
((), phil1_ready_marking) <- select $ phil1_ready_marking
if True
then return m{
fork1 = fork1_marking
, phil1_ready = phil1_ready_marking
, phil1_has_right = (()) : phil1_has_right_marking
}
else fail "guard failed"
phil3_finished :: Mark -> [Mark]
phil3_finished m =
do
let phil3_has_both_marking = phil3_has_both m
let fork3_marking = fork3 m
let fork1_marking = fork1 m
let phil3_ready_marking = phil3_ready m
((), phil3_has_both_marking) <- select $ phil3_has_both_marking
if True
then return m{
phil3_has_both = phil3_has_both_marking
, fork3 = (()) : fork3_marking
, fork1 = (()) : fork1_marking
, phil3_ready = (()) : phil3_ready_marking
}
else fail "guard failed"
phil3_takes_left :: Mark -> [Mark]
phil3_takes_left m =
do
let fork1_marking = fork1 m
let phil3_has_right_marking = phil3_has_right m
let phil3_has_both_marking = phil3_has_both m
((), fork1_marking) <- select $ fork1_marking
((), phil3_has_right_marking) <- select $ phil3_has_right_marking
if True
then return m{
fork1 = fork1_marking
, phil3_has_right = phil3_has_right_marking
, phil3_has_both = (()) : phil3_has_both_marking
}
else fail "guard failed"
phil3_takes_right :: Mark -> [Mark]
phil3_takes_right m =
do
let fork3_marking = fork3 m
let phil3_ready_marking = phil3_ready m
let phil3_has_right_marking = phil3_has_right m
((), fork3_marking) <- select $ fork3_marking
((), phil3_ready_marking) <- select $ phil3_ready_marking
if True
then return m{
fork3 = fork3_marking
, phil3_ready = phil3_ready_marking
, phil3_has_right = (()) : phil3_has_right_marking
}
else fail "guard failed"
phil2_finished :: Mark -> [Mark]
phil2_finished m =
do
let phil2_has_both_marking = phil2_has_both m
let fork2_marking = fork2 m
let fork3_marking = fork3 m
let phil2_ready_marking = phil2_ready m
((), phil2_has_both_marking) <- select $ phil2_has_both_marking
if True
then return m{
phil2_has_both = phil2_has_both_marking
, fork2 = (()) : fork2_marking
, fork3 = (()) : fork3_marking
, phil2_ready = (()) : phil2_ready_marking
}
else fail "guard failed"
phil2_takes_left :: Mark -> [Mark]
phil2_takes_left m =
do
let fork3_marking = fork3 m
let phil2_has_right_marking = phil2_has_right m
let phil2_has_both_marking = phil2_has_both m
((), fork3_marking) <- select $ fork3_marking
((), phil2_has_right_marking) <- select $ phil2_has_right_marking
if True
then return m{
fork3 = fork3_marking
, phil2_has_right = phil2_has_right_marking
, phil2_has_both = (()) : phil2_has_both_marking
}
else fail "guard failed"
phil2_takes_right :: Mark -> [Mark]
phil2_takes_right m =
do
let fork2_marking = fork2 m
let phil2_ready_marking = phil2_ready m
let phil2_has_right_marking = phil2_has_right m
((), fork2_marking) <- select $ fork2_marking
((), phil2_ready_marking) <- select $ phil2_ready_marking
if True
then return m{
fork2 = fork2_marking
, phil2_ready = phil2_ready_marking
, phil2_has_right = (()) : phil2_has_right_marking
}
else fail "guard failed"
-- transitions
net = Net{trans=[ Trans{name="phil1_finished",info=Nothing,action=phil1_finished}
, Trans{name="phil1_take_left",info=Nothing,action=phil1_take_left}
, Trans{name="phil1_take_right",info=Nothing,action=phil1_take_right}
, Trans{name="phil3_finished",info=Nothing,action=phil3_finished}
, Trans{name="phil3_takes_left",info=Nothing,action=phil3_takes_left}
, Trans{name="phil3_takes_right",info=Nothing,action=phil3_takes_right}
, Trans{name="phil2_finished",info=Nothing,action=phil2_finished}
, Trans{name="phil2_takes_left",info=Nothing,action=phil2_takes_left}
, Trans{name="phil2_takes_right",info=Nothing,action=phil2_takes_right}
]}
-- initial marking
mark = Mark{ phil1_ready = [()]
, phil1_has_both = []
, phil1_has_right = []
, fork2 = [()]
, fork1 = [()]
, fork3 = [()]
, phil3_ready = [()]
, phil3_has_both = []
, phil3_has_right = []
, phil2_ready = [()]
, phil2_has_both = []
, phil2_has_right = []
}
-- end of net code
main = simMain "philosophers.hcpn" showMarking net mark
showMarking pmap = let (Just nV_phil1_ready) = lookup "phil1_ready" pmap
(Just nV_phil1_has_both) = lookup "phil1_has_both" pmap
(Just nV_phil1_has_right) = lookup "phil1_has_right" pmap
(Just nV_fork2) = lookup "fork2" pmap
(Just nV_fork1) = lookup "fork1" pmap
(Just nV_fork3) = lookup "fork3" pmap
(Just nV_phil3_ready) = lookup "phil3_ready" pmap
(Just nV_phil3_has_both) = lookup "phil3_has_both" pmap
(Just nV_phil3_has_right) = lookup "phil3_has_right" pmap
(Just nV_phil2_ready) = lookup "phil2_ready" pmap
(Just nV_phil2_has_both) = lookup "phil2_has_both" pmap
(Just nV_phil2_has_right) = lookup "phil2_has_right" pmap
in \setPlaceMark m-> do
setPlaceMark nV_phil1_ready (concat $ intersperse "," $ map show $ phil1_ready m)
setPlaceMark nV_phil1_has_both (concat $ intersperse "," $ map show $ phil1_has_both m)
setPlaceMark nV_phil1_has_right (concat $ intersperse "," $ map show $ phil1_has_right m)
setPlaceMark nV_fork2 (concat $ intersperse "," $ map show $ fork2 m)
setPlaceMark nV_fork1 (concat $ intersperse "," $ map show $ fork1 m)
setPlaceMark nV_fork3 (concat $ intersperse "," $ map show $ fork3 m)
setPlaceMark nV_phil3_ready (concat $ intersperse "," $ map show $ phil3_ready m)
setPlaceMark nV_phil3_has_both (concat $ intersperse "," $ map show $ phil3_has_both m)
setPlaceMark nV_phil3_has_right (concat $ intersperse "," $ map show $ phil3_has_right m)
setPlaceMark nV_phil2_ready (concat $ intersperse "," $ map show $ phil2_ready m)
setPlaceMark nV_phil2_has_both (concat $ intersperse "," $ map show $ phil2_has_both m)
setPlaceMark nV_phil2_has_right (concat $ intersperse "," $ map show $ phil2_has_right m)
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/playpen/hcpn/examples/philosophers.hs | haskell | generated by HCPN NetEdit v0.0
declarations
markings
transition actions
transitions
initial marking
end of net code
| module Unnamed where
import SimpleHCPN
import GuiHCPN
import List (intersperse)
data Mark = Mark {
phil1_ready :: [()]
, phil1_has_both :: [()]
, phil1_has_right :: [()]
, fork2 :: [()]
, fork1 :: [()]
, fork3 :: [()]
, phil3_ready :: [()]
, phil3_has_both :: [()]
, phil3_has_right :: [()]
, phil2_ready :: [()]
, phil2_has_both :: [()]
, phil2_has_right :: [()]
} deriving Show
phil1_finished :: Mark -> [Mark]
phil1_finished m =
do
let phil1_has_both_marking = phil1_has_both m
let phil1_ready_marking = phil1_ready m
let fork1_marking = fork1 m
let fork2_marking = fork2 m
((), phil1_has_both_marking) <- select $ phil1_has_both_marking
if True
then return m{
phil1_has_both = phil1_has_both_marking
, phil1_ready = (()) : phil1_ready_marking
, fork1 = (()) : fork1_marking
, fork2 = (()) : fork2_marking
}
else fail "guard failed"
phil1_take_left :: Mark -> [Mark]
phil1_take_left m =
do
let fork2_marking = fork2 m
let phil1_has_right_marking = phil1_has_right m
let phil1_has_both_marking = phil1_has_both m
((), fork2_marking) <- select $ fork2_marking
((), phil1_has_right_marking) <- select $ phil1_has_right_marking
if True
then return m{
fork2 = fork2_marking
, phil1_has_right = phil1_has_right_marking
, phil1_has_both = (()) : phil1_has_both_marking
}
else fail "guard failed"
phil1_take_right :: Mark -> [Mark]
phil1_take_right m =
do
let fork1_marking = fork1 m
let phil1_ready_marking = phil1_ready m
let phil1_has_right_marking = phil1_has_right m
((), fork1_marking) <- select $ fork1_marking
((), phil1_ready_marking) <- select $ phil1_ready_marking
if True
then return m{
fork1 = fork1_marking
, phil1_ready = phil1_ready_marking
, phil1_has_right = (()) : phil1_has_right_marking
}
else fail "guard failed"
phil3_finished :: Mark -> [Mark]
phil3_finished m =
do
let phil3_has_both_marking = phil3_has_both m
let fork3_marking = fork3 m
let fork1_marking = fork1 m
let phil3_ready_marking = phil3_ready m
((), phil3_has_both_marking) <- select $ phil3_has_both_marking
if True
then return m{
phil3_has_both = phil3_has_both_marking
, fork3 = (()) : fork3_marking
, fork1 = (()) : fork1_marking
, phil3_ready = (()) : phil3_ready_marking
}
else fail "guard failed"
phil3_takes_left :: Mark -> [Mark]
phil3_takes_left m =
do
let fork1_marking = fork1 m
let phil3_has_right_marking = phil3_has_right m
let phil3_has_both_marking = phil3_has_both m
((), fork1_marking) <- select $ fork1_marking
((), phil3_has_right_marking) <- select $ phil3_has_right_marking
if True
then return m{
fork1 = fork1_marking
, phil3_has_right = phil3_has_right_marking
, phil3_has_both = (()) : phil3_has_both_marking
}
else fail "guard failed"
phil3_takes_right :: Mark -> [Mark]
phil3_takes_right m =
do
let fork3_marking = fork3 m
let phil3_ready_marking = phil3_ready m
let phil3_has_right_marking = phil3_has_right m
((), fork3_marking) <- select $ fork3_marking
((), phil3_ready_marking) <- select $ phil3_ready_marking
if True
then return m{
fork3 = fork3_marking
, phil3_ready = phil3_ready_marking
, phil3_has_right = (()) : phil3_has_right_marking
}
else fail "guard failed"
phil2_finished :: Mark -> [Mark]
phil2_finished m =
do
let phil2_has_both_marking = phil2_has_both m
let fork2_marking = fork2 m
let fork3_marking = fork3 m
let phil2_ready_marking = phil2_ready m
((), phil2_has_both_marking) <- select $ phil2_has_both_marking
if True
then return m{
phil2_has_both = phil2_has_both_marking
, fork2 = (()) : fork2_marking
, fork3 = (()) : fork3_marking
, phil2_ready = (()) : phil2_ready_marking
}
else fail "guard failed"
phil2_takes_left :: Mark -> [Mark]
phil2_takes_left m =
do
let fork3_marking = fork3 m
let phil2_has_right_marking = phil2_has_right m
let phil2_has_both_marking = phil2_has_both m
((), fork3_marking) <- select $ fork3_marking
((), phil2_has_right_marking) <- select $ phil2_has_right_marking
if True
then return m{
fork3 = fork3_marking
, phil2_has_right = phil2_has_right_marking
, phil2_has_both = (()) : phil2_has_both_marking
}
else fail "guard failed"
phil2_takes_right :: Mark -> [Mark]
phil2_takes_right m =
do
let fork2_marking = fork2 m
let phil2_ready_marking = phil2_ready m
let phil2_has_right_marking = phil2_has_right m
((), fork2_marking) <- select $ fork2_marking
((), phil2_ready_marking) <- select $ phil2_ready_marking
if True
then return m{
fork2 = fork2_marking
, phil2_ready = phil2_ready_marking
, phil2_has_right = (()) : phil2_has_right_marking
}
else fail "guard failed"
net = Net{trans=[ Trans{name="phil1_finished",info=Nothing,action=phil1_finished}
, Trans{name="phil1_take_left",info=Nothing,action=phil1_take_left}
, Trans{name="phil1_take_right",info=Nothing,action=phil1_take_right}
, Trans{name="phil3_finished",info=Nothing,action=phil3_finished}
, Trans{name="phil3_takes_left",info=Nothing,action=phil3_takes_left}
, Trans{name="phil3_takes_right",info=Nothing,action=phil3_takes_right}
, Trans{name="phil2_finished",info=Nothing,action=phil2_finished}
, Trans{name="phil2_takes_left",info=Nothing,action=phil2_takes_left}
, Trans{name="phil2_takes_right",info=Nothing,action=phil2_takes_right}
]}
mark = Mark{ phil1_ready = [()]
, phil1_has_both = []
, phil1_has_right = []
, fork2 = [()]
, fork1 = [()]
, fork3 = [()]
, phil3_ready = [()]
, phil3_has_both = []
, phil3_has_right = []
, phil2_ready = [()]
, phil2_has_both = []
, phil2_has_right = []
}
main = simMain "philosophers.hcpn" showMarking net mark
showMarking pmap = let (Just nV_phil1_ready) = lookup "phil1_ready" pmap
(Just nV_phil1_has_both) = lookup "phil1_has_both" pmap
(Just nV_phil1_has_right) = lookup "phil1_has_right" pmap
(Just nV_fork2) = lookup "fork2" pmap
(Just nV_fork1) = lookup "fork1" pmap
(Just nV_fork3) = lookup "fork3" pmap
(Just nV_phil3_ready) = lookup "phil3_ready" pmap
(Just nV_phil3_has_both) = lookup "phil3_has_both" pmap
(Just nV_phil3_has_right) = lookup "phil3_has_right" pmap
(Just nV_phil2_ready) = lookup "phil2_ready" pmap
(Just nV_phil2_has_both) = lookup "phil2_has_both" pmap
(Just nV_phil2_has_right) = lookup "phil2_has_right" pmap
in \setPlaceMark m-> do
setPlaceMark nV_phil1_ready (concat $ intersperse "," $ map show $ phil1_ready m)
setPlaceMark nV_phil1_has_both (concat $ intersperse "," $ map show $ phil1_has_both m)
setPlaceMark nV_phil1_has_right (concat $ intersperse "," $ map show $ phil1_has_right m)
setPlaceMark nV_fork2 (concat $ intersperse "," $ map show $ fork2 m)
setPlaceMark nV_fork1 (concat $ intersperse "," $ map show $ fork1 m)
setPlaceMark nV_fork3 (concat $ intersperse "," $ map show $ fork3 m)
setPlaceMark nV_phil3_ready (concat $ intersperse "," $ map show $ phil3_ready m)
setPlaceMark nV_phil3_has_both (concat $ intersperse "," $ map show $ phil3_has_both m)
setPlaceMark nV_phil3_has_right (concat $ intersperse "," $ map show $ phil3_has_right m)
setPlaceMark nV_phil2_ready (concat $ intersperse "," $ map show $ phil2_ready m)
setPlaceMark nV_phil2_has_both (concat $ intersperse "," $ map show $ phil2_has_both m)
setPlaceMark nV_phil2_has_right (concat $ intersperse "," $ map show $ phil2_has_right m)
|
604ff1654873275c198cc37f875e13aa9fff1d34190ef32ee13204ff7b993864 | mirage/ocaml-git | hkt.ml | * This is a module used to share functionality needed by modules that
contain higher - kinded type behavior .
= Higher - Kinded Types
contain higher-kinded type behavior.
HKT = Higher-Kinded Types *)
module HKT = struct
type t
external inj : 'a -> 'b = "%identity"
external prj : 'a -> 'b = "%identity"
end
module Make_sched (T : sig
type +'a t
end) =
struct
type +'a s = 'a T.t
include HKT
end
module Make_store (T : sig
type ('k, 'v) t
end) =
struct
type ('a, 'b) s = ('a, 'b) T.t
include HKT
end
| null | https://raw.githubusercontent.com/mirage/ocaml-git/37c9ef41944b5b19117c34eee83ca672bb63f482/src/not-so-smart/hkt.ml | ocaml | * This is a module used to share functionality needed by modules that
contain higher - kinded type behavior .
= Higher - Kinded Types
contain higher-kinded type behavior.
HKT = Higher-Kinded Types *)
module HKT = struct
type t
external inj : 'a -> 'b = "%identity"
external prj : 'a -> 'b = "%identity"
end
module Make_sched (T : sig
type +'a t
end) =
struct
type +'a s = 'a T.t
include HKT
end
module Make_store (T : sig
type ('k, 'v) t
end) =
struct
type ('a, 'b) s = ('a, 'b) T.t
include HKT
end
| |
4a61a5beb90a1d7070d2f167fd72f76ab727eb372c056c08128c6ba239fbccf8 | anoma/juvix | Paths.hs | module Juvix.Extra.Paths
( module Juvix.Extra.Paths,
module Juvix.Extra.Paths.Base,
)
where
import Juvix.Extra.Paths.Base
import Juvix.Prelude.Base
import Juvix.Prelude.Path
import Language.Haskell.TH.Syntax
relToProject :: Path Rel a -> Path Abs a
relToProject r = $(projectPath) <//> r
assetsDir :: [(Path Rel File, ByteString)]
assetsDir = map (first relFile) $(assetsDirQ)
cssDir :: [(Path Rel File, ByteString)]
cssDir = map (first relFile) $(cssDirQ)
jsDir :: [(Path Rel File, ByteString)]
jsDir = map (first relFile) $(jsDirQ)
imagesDir :: [(Path Rel File, ByteString)]
imagesDir = map (first relFile) $(imagesDirQ)
-- | Given a relative file from the root of the project, checks that the file
-- exists and returns the absolute path
mkProjFile :: Path Rel File -> Q Exp
mkProjFile r = do
let p = relToProject r
ensureFile p
lift p
| null | https://raw.githubusercontent.com/anoma/juvix/22027f137c96845cb91c08d510e63fa4bc3f06e2/src/Juvix/Extra/Paths.hs | haskell | | Given a relative file from the root of the project, checks that the file
exists and returns the absolute path | module Juvix.Extra.Paths
( module Juvix.Extra.Paths,
module Juvix.Extra.Paths.Base,
)
where
import Juvix.Extra.Paths.Base
import Juvix.Prelude.Base
import Juvix.Prelude.Path
import Language.Haskell.TH.Syntax
relToProject :: Path Rel a -> Path Abs a
relToProject r = $(projectPath) <//> r
assetsDir :: [(Path Rel File, ByteString)]
assetsDir = map (first relFile) $(assetsDirQ)
cssDir :: [(Path Rel File, ByteString)]
cssDir = map (first relFile) $(cssDirQ)
jsDir :: [(Path Rel File, ByteString)]
jsDir = map (first relFile) $(jsDirQ)
imagesDir :: [(Path Rel File, ByteString)]
imagesDir = map (first relFile) $(imagesDirQ)
mkProjFile :: Path Rel File -> Q Exp
mkProjFile r = do
let p = relToProject r
ensureFile p
lift p
|
f589b24da58d27ff4edc8ba4a44643b1867418705ed4a364112ccdf9e79a680d | cjohansen/dumdom | dev.cljs | (ns dumdom.dev
(:require [dumdom.core :as dumdom :refer [defcomponent]]
[dumdom.dom :as d]
[snabbdom :as snabbdom]))
(enable-console-print!)
(def app (js/document.getElementById "app"))
(defonce store (atom {:things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}]}))
(defn mark-active [things id]
(mapv #(assoc % :active? (= (:id %) id)) things))
(defcomponent Thing
:keyfn :id
[{:keys [id idx active? text]}]
[:div {:style {:cursor "pointer"}
:key (name id)
:onClick (fn [e]
(swap! store update :things mark-active id))}
(if active?
[:strong text]
text)])
(defcomponent App [data]
[:div
[:h1 "HELLO"]
(map Thing (:things data))])
(defn render [state]
(dumdom/render (App state) app))
(add-watch store :render (fn [_ _ _ state]
(println "Render" state)
(render state)))
(render @store)
(def patch (snabbdom/init #js [snabbdom/styleModule]))
(comment
(swap! store assoc :things [])
(swap! store assoc :things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}])
(swap! store assoc :things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}
{:text "Thing 4"
:id :t4}
{:text "Thing 5"
:id :t5}])
(require '[quiescent.core :as q]
'[quiescent.dom :as qd])
(dumdom/render [:div {}
nil
[:div "Dumdom"]] app)
(dumdom/render [:div {}
[:div {:style {:opacity 0.3 :transition "opacity 500ms"}} "Hello"]
[:div "Dumdom"]] app)
(dumdom/render [:div {}
[:div {:style {:opacity 0.7 :transition "opacity 500ms"}} "Hello"]
[:div "Dumdom"]] app)
(def qel (js/document.createElement "div"))
(js/document.body.appendChild qel)
(q/render (qd/div {}
nil
(qd/div {} "Quiescent")) qel)
(q/render (qd/div {}
(qd/div {:style {:opacity 0.3 :transition "opacity 500ms"}}
"Hello!")
(qd/div {} "Quiescent")) qel)
(def el (js/document.createElement "div"))
(js/document.body.appendChild el)
(js/console.log #js {:style #js {:opacity 0.3 :transition "opacity 500ms"}})
(def vdom (patch el (snabbdom/h "!" #js {} "nil")))
(def vdom (patch vdom (snabbdom/h "div" #js {} #js ["OK"])))
(def vdom (patch el (snabbdom/vnode "" #js {} #js [])))
(def vdom (patch vdom (snabbdom/h "div" #js {} #js ["OK"])))
(def vdom (patch el (snabbdom/h "div" #js {} #js [(snabbdom/h "div" #js {} #js ["Hello from snabbdom"])])))
(def vdom (patch vdom (snabbdom/h
"div"
#js {}
#js [(snabbdom/h
"div"
#js {:style #js {:opacity 0.3 :transition "opacity 500ms"}}
#js ["Hello from snabbdom"])])))
(set! (.-innerHTML el) "Yo yoyo")
(set! (.. el -style -transition) "opacity 0.5s")
(set! (.. el -style -opacity) "0.3")
)
| null | https://raw.githubusercontent.com/cjohansen/dumdom/75c4d049bd9721a483bdbe0900f9c6f65cded19c/dev/dumdom/dev.cljs | clojure | (ns dumdom.dev
(:require [dumdom.core :as dumdom :refer [defcomponent]]
[dumdom.dom :as d]
[snabbdom :as snabbdom]))
(enable-console-print!)
(def app (js/document.getElementById "app"))
(defonce store (atom {:things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}]}))
(defn mark-active [things id]
(mapv #(assoc % :active? (= (:id %) id)) things))
(defcomponent Thing
:keyfn :id
[{:keys [id idx active? text]}]
[:div {:style {:cursor "pointer"}
:key (name id)
:onClick (fn [e]
(swap! store update :things mark-active id))}
(if active?
[:strong text]
text)])
(defcomponent App [data]
[:div
[:h1 "HELLO"]
(map Thing (:things data))])
(defn render [state]
(dumdom/render (App state) app))
(add-watch store :render (fn [_ _ _ state]
(println "Render" state)
(render state)))
(render @store)
(def patch (snabbdom/init #js [snabbdom/styleModule]))
(comment
(swap! store assoc :things [])
(swap! store assoc :things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}])
(swap! store assoc :things [{:text "Thing 1"
:id :t1}
{:text "Thing 2"
:id :t2}
{:text "Thing 3"
:id :t3}
{:text "Thing 4"
:id :t4}
{:text "Thing 5"
:id :t5}])
(require '[quiescent.core :as q]
'[quiescent.dom :as qd])
(dumdom/render [:div {}
nil
[:div "Dumdom"]] app)
(dumdom/render [:div {}
[:div {:style {:opacity 0.3 :transition "opacity 500ms"}} "Hello"]
[:div "Dumdom"]] app)
(dumdom/render [:div {}
[:div {:style {:opacity 0.7 :transition "opacity 500ms"}} "Hello"]
[:div "Dumdom"]] app)
(def qel (js/document.createElement "div"))
(js/document.body.appendChild qel)
(q/render (qd/div {}
nil
(qd/div {} "Quiescent")) qel)
(q/render (qd/div {}
(qd/div {:style {:opacity 0.3 :transition "opacity 500ms"}}
"Hello!")
(qd/div {} "Quiescent")) qel)
(def el (js/document.createElement "div"))
(js/document.body.appendChild el)
(js/console.log #js {:style #js {:opacity 0.3 :transition "opacity 500ms"}})
(def vdom (patch el (snabbdom/h "!" #js {} "nil")))
(def vdom (patch vdom (snabbdom/h "div" #js {} #js ["OK"])))
(def vdom (patch el (snabbdom/vnode "" #js {} #js [])))
(def vdom (patch vdom (snabbdom/h "div" #js {} #js ["OK"])))
(def vdom (patch el (snabbdom/h "div" #js {} #js [(snabbdom/h "div" #js {} #js ["Hello from snabbdom"])])))
(def vdom (patch vdom (snabbdom/h
"div"
#js {}
#js [(snabbdom/h
"div"
#js {:style #js {:opacity 0.3 :transition "opacity 500ms"}}
#js ["Hello from snabbdom"])])))
(set! (.-innerHTML el) "Yo yoyo")
(set! (.. el -style -transition) "opacity 0.5s")
(set! (.. el -style -opacity) "0.3")
)
| |
b584ff65d6d130cc9a909760961c186e6905198375941ad31885ef350e4a9a4a | phillord/tawny-owl | type.clj | The contents of this file are subject to the LGPL License , Version 3.0 .
Copyright ( C ) 2012 , 2013 , 2014 , 2017 , Newcastle University
;; This program is free software: you can redistribute it and/or modify
;; it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See they
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
along with this program . If not , see /.
(ns
^{:doc "Type based predicates for OWL."
:author "Phillip Lord"}
tawny.type
(:require [tawny.protocol])
(:import (org.semanticweb.owlapi.model
IRI
OWLAnonymousIndividual
OWLAnnotationProperty
OWLAnnotationValue
OWLClass
OWLClassExpression
OWLDataProperty
OWLDataPropertyExpression
OWLDataRange
OWLDatatype
OWLIndividual
OWLLiteral
OWLNamedObject
OWLObjectProperty
OWLObjectPropertyExpression
OWLOntology)))
(defn- entity-instance?
"Return true if the entity of `e` is an instance of `type`."
[type e]
(instance? type (tawny.protocol/as-entity e)))
(defn iri?
"Return true if `e` is an instance of `IRI`."
[e]
(entity-instance? IRI e))
(defn ann-val?
"Return true if `e` is an instance of `OWLAnnotationValue`."
[e]
(entity-instance? OWLAnnotationValue e))
(defn ann-prop?
"Return true if `e` is an instance of `OWLAnnotationProperty`."
[e]
(entity-instance? OWLAnnotationProperty e))
(defn anonymous?
"Return true if `e` is an instance of `OWLAnonymousIndividual`."
[e]
(entity-instance? OWLAnonymousIndividual e))
(defn owl-class?
"Return true if `e` is an instance of `OWLClass`."
[e]
(entity-instance? OWLClass e))
(defn class-exp?
"Return true if `e` is an instance of `OWLClassExpression`."
[e]
(entity-instance? OWLClassExpression e))
(defn data-prop?
"Return true if `e` is an instance of `OWLDataProperty`."
[e]
(entity-instance? OWLDataProperty e))
(defn data-prop-exp?
"Return true if `e` is an instance of `OWLDataPropertyExpression`."
[e]
(entity-instance? OWLDataPropertyExpression e))
(defn data-range?
"Return true if `e` is an instance of `OWLDataRange`."
[e]
(entity-instance? OWLDataRange e))
(defn data-type?
"Return true if `e` is an instance of `OWLDatatype`."
[e]
(entity-instance? OWLDatatype e))
(defn individual?
"Return true if `e` is an instance of `OWLIndividual`."
[e]
(entity-instance? OWLIndividual e))
(defn literal?
"Return true if `e` is an instance of `OWLLiteral`."
[e]
(entity-instance? OWLLiteral e))
(defn named?
"Return true if `e` is an instance of `OWLNamedObject`."
[e]
(entity-instance? OWLNamedObject e))
(defn obj-prop?
"Return true if `e` is an instance of `OWLObjectProperty`."
[e]
(entity-instance? OWLObjectProperty e))
(defn obj-prop-exp?
"Return true if `e` is an instance of `OWLObjectPropertyExpression`."
[e]
(entity-instance? OWLObjectPropertyExpression e))
(defn ontology?
"Return true if `e` is an instance of `OWLOntology`."
[e]
(entity-instance? OWLOntology e))
| null | https://raw.githubusercontent.com/phillord/tawny-owl/331e14b838a42adebbd325f80f60830fa0915d76/src/tawny/type.clj | clojure | This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See they | The contents of this file are subject to the LGPL License , Version 3.0 .
Copyright ( C ) 2012 , 2013 , 2014 , 2017 , Newcastle University
the Free Software Foundation , either version 3 of the License , or
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
along with this program . If not , see /.
(ns
^{:doc "Type based predicates for OWL."
:author "Phillip Lord"}
tawny.type
(:require [tawny.protocol])
(:import (org.semanticweb.owlapi.model
IRI
OWLAnonymousIndividual
OWLAnnotationProperty
OWLAnnotationValue
OWLClass
OWLClassExpression
OWLDataProperty
OWLDataPropertyExpression
OWLDataRange
OWLDatatype
OWLIndividual
OWLLiteral
OWLNamedObject
OWLObjectProperty
OWLObjectPropertyExpression
OWLOntology)))
(defn- entity-instance?
"Return true if the entity of `e` is an instance of `type`."
[type e]
(instance? type (tawny.protocol/as-entity e)))
(defn iri?
"Return true if `e` is an instance of `IRI`."
[e]
(entity-instance? IRI e))
(defn ann-val?
"Return true if `e` is an instance of `OWLAnnotationValue`."
[e]
(entity-instance? OWLAnnotationValue e))
(defn ann-prop?
"Return true if `e` is an instance of `OWLAnnotationProperty`."
[e]
(entity-instance? OWLAnnotationProperty e))
(defn anonymous?
"Return true if `e` is an instance of `OWLAnonymousIndividual`."
[e]
(entity-instance? OWLAnonymousIndividual e))
(defn owl-class?
"Return true if `e` is an instance of `OWLClass`."
[e]
(entity-instance? OWLClass e))
(defn class-exp?
"Return true if `e` is an instance of `OWLClassExpression`."
[e]
(entity-instance? OWLClassExpression e))
(defn data-prop?
"Return true if `e` is an instance of `OWLDataProperty`."
[e]
(entity-instance? OWLDataProperty e))
(defn data-prop-exp?
"Return true if `e` is an instance of `OWLDataPropertyExpression`."
[e]
(entity-instance? OWLDataPropertyExpression e))
(defn data-range?
"Return true if `e` is an instance of `OWLDataRange`."
[e]
(entity-instance? OWLDataRange e))
(defn data-type?
"Return true if `e` is an instance of `OWLDatatype`."
[e]
(entity-instance? OWLDatatype e))
(defn individual?
"Return true if `e` is an instance of `OWLIndividual`."
[e]
(entity-instance? OWLIndividual e))
(defn literal?
"Return true if `e` is an instance of `OWLLiteral`."
[e]
(entity-instance? OWLLiteral e))
(defn named?
"Return true if `e` is an instance of `OWLNamedObject`."
[e]
(entity-instance? OWLNamedObject e))
(defn obj-prop?
"Return true if `e` is an instance of `OWLObjectProperty`."
[e]
(entity-instance? OWLObjectProperty e))
(defn obj-prop-exp?
"Return true if `e` is an instance of `OWLObjectPropertyExpression`."
[e]
(entity-instance? OWLObjectPropertyExpression e))
(defn ontology?
"Return true if `e` is an instance of `OWLOntology`."
[e]
(entity-instance? OWLOntology e))
|
368209db13c9a29536675c37de0ea7e98d9d8947fefb16e648024b987fca7eb3 | bgamari/io-uring | PVar.hs | # LANGUAGE UnboxedTuples #
# LANGUAGE MagicHash #
{-# LANGUAGE BangPatterns #-}
# LANGUAGE ScopedTypeVariables #
module System.Linux.IO.URing.PVar
( PVar
, newPVar
, Prim(..)
) where
import GHC.Exts
import GHC.IO
import GHC.Word
data PVar a = PVar (MutableByteArray# RealWorld)
class Prim a where
sizeOf :: a -> Int
readPVar :: PVar a -> IO a
writePVar :: PVar a -> a -> IO ()
instance Prim Word32 where
sizeOf _ = 4
readPVar (PVar mba) = IO $ \s ->
case readWord32Array# mba 0# s of (# s', r #) -> (# s', W32# r #)
writePVar (PVar mba) (W32# x) = IO $ \s ->
case writeWord32Array# mba 0# x s of s' -> (# s', () #)
newPVar :: forall a. Prim a => a -> IO (PVar a)
newPVar x = do
pvar <- create
writePVar pvar x
return pvar
where
create =
IO $ \s -> case newByteArray# size s of
(# s', mba #) -> (# s', PVar mba #)
!(I# size) = sizeOf (undefined :: a)
| null | https://raw.githubusercontent.com/bgamari/io-uring/3917e39ac9915daa72b59f523fa0cb118eb10784/src/System/Linux/IO/URing/PVar.hs | haskell | # LANGUAGE BangPatterns # | # LANGUAGE UnboxedTuples #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
module System.Linux.IO.URing.PVar
( PVar
, newPVar
, Prim(..)
) where
import GHC.Exts
import GHC.IO
import GHC.Word
data PVar a = PVar (MutableByteArray# RealWorld)
class Prim a where
sizeOf :: a -> Int
readPVar :: PVar a -> IO a
writePVar :: PVar a -> a -> IO ()
instance Prim Word32 where
sizeOf _ = 4
readPVar (PVar mba) = IO $ \s ->
case readWord32Array# mba 0# s of (# s', r #) -> (# s', W32# r #)
writePVar (PVar mba) (W32# x) = IO $ \s ->
case writeWord32Array# mba 0# x s of s' -> (# s', () #)
newPVar :: forall a. Prim a => a -> IO (PVar a)
newPVar x = do
pvar <- create
writePVar pvar x
return pvar
where
create =
IO $ \s -> case newByteArray# size s of
(# s', mba #) -> (# s', PVar mba #)
!(I# size) = sizeOf (undefined :: a)
|
7a313b13890d1dbb2cd50fc586a2735c8620b1db614827aa09059542d5499166 | k3nn7/algoliasearch-client-erlang | algolia_index_test.erl | -module(algolia_index_test).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
add_object_test() ->
Object = #{<<"content">> => <<"foo bar">>},
ExpectedRequest = {write, post, "/1/indexes/baz", Object},
ExpectedResult = {ok,
#{<<"createdAt">> => <<"2016-01-24T08:34:47.700Z">>,
<<"objectID">> => <<"129196290">>,
<<"taskID">> => 699175850}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:add_object(Index, Object)).
add_object_with_given_id_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/4321%201234", Object},
ExpectedResult = {ok,
#{<<"createdAt">> => <<"2016-01-24T08:34:47.700Z">>,
<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699175850}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:add_object(Index, Object)).
update_object_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/4321%201234", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:update_object(Index, Object)).
update_object_with_escaped_id_test() ->
Object = #{
<<"objectID">> => <<"foo bar">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/foo%20bar", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:update_object(Index, Object)).
partial_update_object_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, post, "/1/indexes/baz/4321%201234/partial", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:partial_update_object(Index, Object)).
delete_object_test() ->
ExpectedRequest = {write, delete, "/1/indexes/baz/4321%201234"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:40:40.717Z">>,
<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 1012510111}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:delete_object(Index, <<"4321 1234">>)).
search_test() ->
RequestBody = #{<<"params">> => <<"query=foo">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>],
<<"value">> => <<"<em>foo</em>">>}},
<<"content">> => <<"foo">>,
<<"objectID">> => <<"53383650">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:search(Index, <<"foo">>)).
search_encode_query_test() ->
RequestBody = #{<<"params">> => <<"query=foo%20bar">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>, <<"bar">>],
<<"value">> => <<"<em>foo</em> <em>bar</em>">>}},
<<"content">> => <<"foo bar">>,
<<"objectID">> => <<"129196290">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo%20bar">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo bar">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:search(Index, <<"foo bar">>)).
search_with_additional_parameters_test() ->
RequestBody = #{<<"params">> => <<"getRankingInfo=1&hitsPerPage=10&query=foo%20bar&queryType=prefixAll">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>, <<"bar">>],
<<"value">> => <<"<em>foo</em> <em>bar</em>">>}},
<<"content">> => <<"foo bar">>,
<<"objectID">> => <<"129196290">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo%20bar">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo bar">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(
ExpectedResult,
algolia_index:search(Index, <<"foo bar">>, #{
<<"queryType">> => <<"prefixAll">>,
<<"hitsPerPage">> => 10,
<<"getRankingInfo">> => 1
})
).
get_object_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/4321%201234"},
ExpectedResult = {ok,
#{<<"content">> => <<"foo bar">>, <<"objectID">> => <<"4321 1234">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_object(Index, <<"4321 1234">>)).
get_object_with_attributes_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/4321%201234?attribute=name%2Cage"},
ExpectedResult = {ok,
#{<<"content">> => <<"foo bar">>, <<"objectID">> => <<"4321 1234">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_object(Index, <<"4321 1234">>, <<"name,age">>)).
get_settings_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/settings"},
ExpectedResult = {ok,
#{<<"attributeForDistinct">> => null,
<<"snippetEllipsisText">> => <<>>,
<<"unretrievableAttributes">> => null}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_settings(Index)).
set_settings_test() ->
RequestBody = #{
<<"hitsPerPage">> => 50,
<<"attributesToIndex">> => [<<"name">>, <<"email">>]
},
ExpectedRequest = {write, put, "/1/indexes/baz/settings", RequestBody},
ExpectedResult = {ok,
#{<<"taskID">> => 699197950, <<"updatedAt">> => <<"2016-01-24T08:55:13.504Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:set_settings(Index, RequestBody)).
delete_test() ->
ExpectedRequest = {write, delete, "/1/indexes/baz"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:57:49.752Z">>, <<"taskID">> => 699201240}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:delete(Index)).
delete_escaped_index_name_test() ->
ExpectedRequest = {write, delete, "/1/indexes/foo%20bar"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:57:49.752Z">>, <<"taskID">> => 699201240}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "foo bar"),
?assertEqual(ExpectedResult, algolia_index:delete(Index)).
clear_test() ->
ExpectedRequest = {write, post, "/1/indexes/baz/clear"},
ExpectedResult = {ok,
#{<<"taskID">> => 699197950, <<"updatedAt">> => <<"2016-01-24T08:55:13.504Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:clear(Index)).
| null | https://raw.githubusercontent.com/k3nn7/algoliasearch-client-erlang/640f3da5161975a77f45f563ebab22edbf1fb021/test/algolia_index_test.erl | erlang | -module(algolia_index_test).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
add_object_test() ->
Object = #{<<"content">> => <<"foo bar">>},
ExpectedRequest = {write, post, "/1/indexes/baz", Object},
ExpectedResult = {ok,
#{<<"createdAt">> => <<"2016-01-24T08:34:47.700Z">>,
<<"objectID">> => <<"129196290">>,
<<"taskID">> => 699175850}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:add_object(Index, Object)).
add_object_with_given_id_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/4321%201234", Object},
ExpectedResult = {ok,
#{<<"createdAt">> => <<"2016-01-24T08:34:47.700Z">>,
<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699175850}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:add_object(Index, Object)).
update_object_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/4321%201234", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:update_object(Index, Object)).
update_object_with_escaped_id_test() ->
Object = #{
<<"objectID">> => <<"foo bar">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, put, "/1/indexes/baz/foo%20bar", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:update_object(Index, Object)).
partial_update_object_test() ->
Object = #{
<<"objectID">> => <<"4321 1234">>,
<<"content">> => <<"foo bar">>
},
ExpectedRequest = {write, post, "/1/indexes/baz/4321%201234/partial", Object},
ExpectedResult = {ok,
#{<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 699180670,
<<"updatedAt">> => <<"2016-01-24T08:37:05.242Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:partial_update_object(Index, Object)).
delete_object_test() ->
ExpectedRequest = {write, delete, "/1/indexes/baz/4321%201234"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:40:40.717Z">>,
<<"objectID">> => <<"4321 1234">>,
<<"taskID">> => 1012510111}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:delete_object(Index, <<"4321 1234">>)).
search_test() ->
RequestBody = #{<<"params">> => <<"query=foo">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>],
<<"value">> => <<"<em>foo</em>">>}},
<<"content">> => <<"foo">>,
<<"objectID">> => <<"53383650">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:search(Index, <<"foo">>)).
search_encode_query_test() ->
RequestBody = #{<<"params">> => <<"query=foo%20bar">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>, <<"bar">>],
<<"value">> => <<"<em>foo</em> <em>bar</em>">>}},
<<"content">> => <<"foo bar">>,
<<"objectID">> => <<"129196290">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo%20bar">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo bar">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:search(Index, <<"foo bar">>)).
search_with_additional_parameters_test() ->
RequestBody = #{<<"params">> => <<"getRankingInfo=1&hitsPerPage=10&query=foo%20bar&queryType=prefixAll">>},
ExpectedRequest = {read, post, "/1/indexes/baz/query", RequestBody},
ExpectedResult = {ok, #{<<"hits">> => [
#{<<"_highlightResult">> => #{<<"content">> => #{<<"matchLevel">> => <<"full">>,
<<"matchedWords">> => [<<"foo">>, <<"bar">>],
<<"value">> => <<"<em>foo</em> <em>bar</em>">>}},
<<"content">> => <<"foo bar">>,
<<"objectID">> => <<"129196290">>}],
<<"hitsPerPage">> => 20,
<<"nbHits">> => 1,
<<"nbPages">> => 1,
<<"page">> => 0,
<<"params">> => <<"query=foo%20bar">>,
<<"processingTimeMS">> => 1,
<<"query">> => <<"foo bar">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(
ExpectedResult,
algolia_index:search(Index, <<"foo bar">>, #{
<<"queryType">> => <<"prefixAll">>,
<<"hitsPerPage">> => 10,
<<"getRankingInfo">> => 1
})
).
get_object_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/4321%201234"},
ExpectedResult = {ok,
#{<<"content">> => <<"foo bar">>, <<"objectID">> => <<"4321 1234">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_object(Index, <<"4321 1234">>)).
get_object_with_attributes_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/4321%201234?attribute=name%2Cage"},
ExpectedResult = {ok,
#{<<"content">> => <<"foo bar">>, <<"objectID">> => <<"4321 1234">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_object(Index, <<"4321 1234">>, <<"name,age">>)).
get_settings_test() ->
ExpectedRequest = {read, get, "/1/indexes/baz/settings"},
ExpectedResult = {ok,
#{<<"attributeForDistinct">> => null,
<<"snippetEllipsisText">> => <<>>,
<<"unretrievableAttributes">> => null}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:get_settings(Index)).
set_settings_test() ->
RequestBody = #{
<<"hitsPerPage">> => 50,
<<"attributesToIndex">> => [<<"name">>, <<"email">>]
},
ExpectedRequest = {write, put, "/1/indexes/baz/settings", RequestBody},
ExpectedResult = {ok,
#{<<"taskID">> => 699197950, <<"updatedAt">> => <<"2016-01-24T08:55:13.504Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:set_settings(Index, RequestBody)).
delete_test() ->
ExpectedRequest = {write, delete, "/1/indexes/baz"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:57:49.752Z">>, <<"taskID">> => 699201240}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:delete(Index)).
delete_escaped_index_name_test() ->
ExpectedRequest = {write, delete, "/1/indexes/foo%20bar"},
ExpectedResult = {ok,
#{<<"deletedAt">> => <<"2016-01-24T08:57:49.752Z">>, <<"taskID">> => 699201240}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "foo bar"),
?assertEqual(ExpectedResult, algolia_index:delete(Index)).
clear_test() ->
ExpectedRequest = {write, post, "/1/indexes/baz/clear"},
ExpectedResult = {ok,
#{<<"taskID">> => 699197950, <<"updatedAt">> => <<"2016-01-24T08:55:13.504Z">>}},
Client = algolia_mock_client:make(ExpectedRequest, ExpectedResult),
Index = algolia:init_index(Client, "baz"),
?assertEqual(ExpectedResult, algolia_index:clear(Index)).
| |
3dc652de358601843d812cbc334d0272342440cb6b56a9378f03a26b7a4f6ffb | soulomoon/SICP | Exercise4.35.scm | Exercise 4.35 : Write a procedure an - integer - between that returns an integer between two given bounds . This can be used to implement a procedure that finds Pythagorean triples , i.e. , triples of integers ( i , j , ) between the given bounds such that i≤ji≤j and i2+j2 = k2i2+j2 = k2 , as follows :
; (define (a-pythagorean-triple-between low high)
; (let ((i (an-integer-between low high)))
; (let ((j (an-integer-between i high)))
; (let ((k (an-integer-between j high)))
; (require (= (+ (* i i) (* j j))
; (* k k)))
; (list i j k)))))
#lang swindle
(define (square x) (* x x))
(define (require p)
(if (not p) (amb)))
(define (an-integer-between low high)
(require (<= low high))
(amb low (an-integer-between (+ low 1) high)))
(define (a-pythagorean-triple-between low high)
(let* ((i (an-integer-between low high))
(j (an-integer-between i high))
(k (an-integer-between j high)))
(require (= (+ (square i) (square j))
(square k)))
(list i j k)))
(print (amb-collect (a-pythagorean-triple-between 1 100)))
Welcome to , version 6.7 [ 3 m ] .
Language : swindle , with debugging ; memory limit : 128 MB .
( ( 3 4 5 ) ( 5 12 13 ) ( 6 8 10 ) ( 7 24 25 ) ( 8 15 17 ) ( 9 12 15 ) ( 9 40 41 ) ( 10 24 26 ) ( 11 60 61 ) ( 12 16 20 ) ( 12 35 37 ) ( 13 84 85 ) ( 14 48 50 ) ( 15 20 25 ) ( 15 36 39 ) ( 16 30 34 ) ( 16 63 65 ) ( 18 24 30 ) ( 18 80 82 ) ( 20 21 29 ) ... )
; > | null | https://raw.githubusercontent.com/soulomoon/SICP/1c6cbf5ecf6397eaeb990738a938d48c193af1bb/Chapter4/Exercise4.35.scm | scheme | (define (a-pythagorean-triple-between low high)
(let ((i (an-integer-between low high)))
(let ((j (an-integer-between i high)))
(let ((k (an-integer-between j high)))
(require (= (+ (* i i) (* j j))
(* k k)))
(list i j k)))))
memory limit : 128 MB .
> | Exercise 4.35 : Write a procedure an - integer - between that returns an integer between two given bounds . This can be used to implement a procedure that finds Pythagorean triples , i.e. , triples of integers ( i , j , ) between the given bounds such that i≤ji≤j and i2+j2 = k2i2+j2 = k2 , as follows :
#lang swindle
(define (square x) (* x x))
(define (require p)
(if (not p) (amb)))
(define (an-integer-between low high)
(require (<= low high))
(amb low (an-integer-between (+ low 1) high)))
(define (a-pythagorean-triple-between low high)
(let* ((i (an-integer-between low high))
(j (an-integer-between i high))
(k (an-integer-between j high)))
(require (= (+ (square i) (square j))
(square k)))
(list i j k)))
(print (amb-collect (a-pythagorean-triple-between 1 100)))
Welcome to , version 6.7 [ 3 m ] .
( ( 3 4 5 ) ( 5 12 13 ) ( 6 8 10 ) ( 7 24 25 ) ( 8 15 17 ) ( 9 12 15 ) ( 9 40 41 ) ( 10 24 26 ) ( 11 60 61 ) ( 12 16 20 ) ( 12 35 37 ) ( 13 84 85 ) ( 14 48 50 ) ( 15 20 25 ) ( 15 36 39 ) ( 16 30 34 ) ( 16 63 65 ) ( 18 24 30 ) ( 18 80 82 ) ( 20 21 29 ) ... ) |
d51411749b4eb39e2fc2ecc7014e143c9ac940726ebef1f87f269ef0406ebeb6 | AstRonin/sgi | sgi.erl | -module(sgi).
-compile(export_all).
-spec pv(Key, List) -> term() when
Key :: term(),
List :: [term()].
pv(K, L) -> pv(K, L, undefined).
-spec pv(Key, List, Default) -> term() when
Key :: term(),
List :: [term()],
Default :: term().
pv(K, L, D) -> proplists:get_value(K, L, D).
-spec mv(Key, Map) -> term() when
Key :: term(),
Map :: map().
mv(Key, Map) -> mv(Key, Map, undefined).
-spec mv(Key, Map, Default) -> term() when
Key :: term(),
Map :: map(),
Default :: term().
mv(Key, Map, Default) ->
try
maps:get(Key, Map, Default)
catch
_:_ ->
Default
end.
-spec ct(TimerRef) -> Result | ok when
TimerRef :: reference(),
Time :: non_neg_integer(),
Result :: Time | false.
ct(Timer) ->
case is_reference(Timer) of true -> erlang:cancel_timer(Timer, [{async, true}, {info, false}]); _ -> ok end.
-spec is_alive(Pid :: pid()) -> boolean().
is_alive(Pid) when is_pid(Pid) -> erlang:is_process_alive(Pid);
is_alive(RegName) when is_atom(RegName) andalso RegName =/= undefined -> is_alive(erlang:whereis(RegName));
is_alive(_) -> false.
-spec time_now() -> non_neg_integer().
time_now() ->
erlang:system_time(seconds).
inc_state(K, Num) ->
case wf:state(K) of
undefined ->
wf:state(K, Num),
Num;
S when is_number(S) ->
wf:state(K, S+Num),
S+Num;
_ -> ok
end.
| null | https://raw.githubusercontent.com/AstRonin/sgi/3854b62b6ce46ba71abd1e76e9cf5200dc0b1df3/src/sgi.erl | erlang | -module(sgi).
-compile(export_all).
-spec pv(Key, List) -> term() when
Key :: term(),
List :: [term()].
pv(K, L) -> pv(K, L, undefined).
-spec pv(Key, List, Default) -> term() when
Key :: term(),
List :: [term()],
Default :: term().
pv(K, L, D) -> proplists:get_value(K, L, D).
-spec mv(Key, Map) -> term() when
Key :: term(),
Map :: map().
mv(Key, Map) -> mv(Key, Map, undefined).
-spec mv(Key, Map, Default) -> term() when
Key :: term(),
Map :: map(),
Default :: term().
mv(Key, Map, Default) ->
try
maps:get(Key, Map, Default)
catch
_:_ ->
Default
end.
-spec ct(TimerRef) -> Result | ok when
TimerRef :: reference(),
Time :: non_neg_integer(),
Result :: Time | false.
ct(Timer) ->
case is_reference(Timer) of true -> erlang:cancel_timer(Timer, [{async, true}, {info, false}]); _ -> ok end.
-spec is_alive(Pid :: pid()) -> boolean().
is_alive(Pid) when is_pid(Pid) -> erlang:is_process_alive(Pid);
is_alive(RegName) when is_atom(RegName) andalso RegName =/= undefined -> is_alive(erlang:whereis(RegName));
is_alive(_) -> false.
-spec time_now() -> non_neg_integer().
time_now() ->
erlang:system_time(seconds).
inc_state(K, Num) ->
case wf:state(K) of
undefined ->
wf:state(K, Num),
Num;
S when is_number(S) ->
wf:state(K, S+Num),
S+Num;
_ -> ok
end.
| |
2ac7ae473dc32dbe01e3f17efa02fd82d2ba6f48eae36482bb6f8ea79051c311 | tqtezos/minter-sdk | Collections.hs | module Test.Swaps.Collections where
import Prelude hiding (swap, toStrict)
import qualified Data.Sized as Sized (toList)
import qualified Data.Set as Set
import Hedgehog (Gen, Property, forAll, property)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Lorentz.Contracts.Spec.FA2Interface as FA2I
import qualified Indigo.Contracts.FA2Sample as FA2
import Michelson.Interpret.Pack
import GHC.Integer (negateInteger)
import Test.Tasty (TestTree, testGroup)
import Morley.Nettest
import Morley.Nettest.Tasty (nettestScenarioCaps)
import Lorentz.Address
import qualified Lorentz.Contracts.Swaps.Basic as Basic
import Lorentz.Contracts.Swaps.Collections
import Lorentz.Test (contractConsumer)
import Lorentz.Value
import Lorentz.Contracts.Spec.FA2Interface (TokenId(..))
import Test.NonPausableSimpleAdmin
import Test.Swaps.Util
import Test.Util
import Tezos.Address (unsafeParseAddress)
import Tezos.Crypto
data TestData = TestData
{ numOffers :: Natural
, token1Offer :: Natural
, token2Offer :: Natural
}
deriving stock (Show)
genTestData :: Gen TestData
genTestData = do
let genNat = Gen.integral (Range.constant 1 20)
numOffers <- genNat
token1Offer <- genNat
token2Offer <- genNat
pure $ TestData
{ numOffers = numOffers
, token1Offer = token1Offer
, token2Offer = token2Offer
}
----------------------------------------------------------------------------
-- Permit Tests
----------------------------------------------------------------------------
hprop_Sending_fake_permit_to_offchain_accept_fails :: Property
hprop_Sending_fake_permit_to_offchain_accept_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender alice $
addOperatorOnTokens tokensList (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let acceptParam = AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = one $ (initCollectionId, tokenId1)
}
missignedBytes <- fst <$> mkPermitToForge acceptParam swap
withSender admin $ do
(offchainAcceptForged acceptParam alice swap)
& expectTransferFailure
[failedWith $ constant ([mt|MISSIGNED|], missignedBytes)]
hprop_Offchain_accept_not_admin_submitted_fails :: Property
hprop_Offchain_accept_not_admin_submitted_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender alice $
addOperatorOnTokens tokensList (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let tokensSent = one $ (initCollectionId, tokenId1)
withSender alice $ do
offchainAccept tokensSent alice swap
& expectTransferFailure [failedWith $ constant errNotAdmin]
hprop_Consecutive_offchain_accept_equals_iterative_accept :: Property
hprop_Consecutive_offchain_accept_equals_iterative_accept =
property $ do
TestData{numOffers,token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup @("addresses" :# 50) @("tokens" :# 2)
let admin1 ::< admin2 ::< remainingAddresses = sAddresses setup
let addresses = take (fromIntegral numOffers) (Sized.toList remainingAddresses)
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2_1 <- originateFA2 "fa2_1" setup []
fa2_2 <- originateFA2 "fa2_2" setup []
swap1 <- originateOffchainCollections admin1 (toAddress fa2_1)
swap2 <- originateOffchainCollections admin2 (toAddress fa2_2)
withSender admin1 $ do
addOperatorOnTokens [tokenId1, tokenId2] (toAddress swap1) admin1 fa2_1
withSender admin2 $ do
addOperatorOnTokens [tokenId1, tokenId2] (toAddress swap2) admin2 fa2_2
withSender admin1 $ do
call swap1 (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2_1 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
withSender admin2 $ do
call swap2 (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2_2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
let acceptParam = AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = Set.empty
}
withSender admin1 $ do
offchainAcceptAllConsecutive Set.empty addresses swap1
withSender admin2 $ do
offchainAcceptBatch acceptParam addresses swap2
swapStorage1 <- toVal . swaps <$> getStorage' swap1
swapStorage2 <- toVal . swaps <$> getStorage' swap2
swapStorage1 @== swapStorage2
------------------------------------------------------------------------------
---- Swap + Burn Tests Using Offchain_accept
------------------------------------------------------------------------------
hprop_Accepting_with_zero_balance_fails :: Property
hprop_Accepting_with_zero_balance_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
addressWithZeroBalance <- newAddress "test"
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender addressWithZeroBalance $
addOperatorOnTokens tokensList (toAddress swap) addressWithZeroBalance fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2)]
withSender admin
(offchainAccept tokensSent addressWithZeroBalance swap
& expectTransferFailure [failedWith $ constant (errSwapRequestedFA2BalanceInvalid 1 0)])
hprop_Start_callable_by_admin_only :: Property
hprop_Start_callable_by_admin_only =
property $ do
TestData{numOffers} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< nonAdmin ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender nonAdmin
(call swap (Call @"Start") (mkNOffers numOffers SwapOffer
{ assetsOffered = []
, assetsRequested = []
}) & expectError errNotAdmin)
hprop_Correct_final_balances_on_acceptance :: Property
hprop_Correct_final_balances_on_acceptance =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
addCollection' (Set.fromList [tokenId3, tokenId4]) swap
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: negateInteger (fromIntegral $ token1Offer * numOffers)
, (admin, tokenId2) -: negateInteger (fromIntegral $ token2Offer * numOffers)
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, tokenId3) -: 1
, (nullAddress, tokenId4) -: 1
, (alice, tokenId1) -: fromIntegral (token1Offer - 1)
, (alice, tokenId2) -: fromIntegral (token2Offer - 1)
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId, incrementCollectionId initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2),
(incrementCollectionId initCollectionId, tokenId3),
(incrementCollectionId initCollectionId, tokenId4)]
withSender admin $
offchainAccept tokensSent alice swap
hprop_Correct_final_balances_on_cancel :: Property
hprop_Correct_final_balances_on_cancel =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (alice, tokenId1) -: 0
, (alice, tokenId2) -: 0
, (nullAddress, tokenId1) -: 0
, (nullAddress, tokenId2) -: 0
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
withSender admin $
call swap (Call @"Cancel") Basic.initSwapId
--
hprop_Correct_num_tokens_transferred_to_contract_on_start :: Property
hprop_Correct_num_tokens_transferred_to_contract_on_start =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: negateInteger (fromIntegral $ token1Offer * numOffers)
, (admin, tokenId2) -: negateInteger (fromIntegral $ token2Offer * numOffers)
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
hprop_Contract_balance_goes_to_zero_when_sale_concludes :: Property
hprop_Contract_balance_goes_to_zero_when_sale_concludes =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
let swapAddress = toAddress swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) alice fa2
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (swapAddress, tokenId1) -: 0
, (swapAddress, tokenId2) -: 0
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = [initCollectionId, initCollectionId]
}
withSender admin $
replicateM_ (fromIntegral numOffers) $ do
offchainAccept (Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2)]) alice swap
test_CollectionsIntegrational :: TestTree
test_CollectionsIntegrational = testGroup "Basic colections functionality"
[ statusChecks
, swapIdChecks
, authorizationChecks
, invalidFA2sChecks
, swapTokensSentChecks
]
statusChecks :: TestTree
statusChecks = testGroup "Statuses"
[ nettestScenarioCaps "Operations with accepted swap fail" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
let tokensSent = Set.empty
offchainAccept' tokensSent alice swap
offchainAccept' tokensSent alice swap
& expectError errSwapFinished
offchainAccept' tokensSent alice swap
& expectError errSwapFinished
, nettestScenarioCaps "Operations with cancelled swap fail" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
let tokensSent = Set.empty
call swap (Call @"Cancel") Basic.initSwapId
offchainAccept' tokensSent alice swap
& expectError errSwapCancelled
call swap (Call @"Cancel") Basic.initSwapId
& expectError errSwapCancelled
]
swapTokensSentChecks :: TestTree
swapTokensSentChecks = testGroup "TokensSent"
[ nettestScenarioCaps "Sending too few tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
, nettestScenarioCaps "Sending too many tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId2), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
, nettestScenarioCaps "Sending incorrect tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
]
swapIdChecks :: TestTree
swapIdChecks = testGroup "SwapIds"
[ nettestScenarioCaps "Swap ids are properly assigned and can be worked with" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1]) swap
addCollection' (Set.fromList [tokenId2]) swap
addCollection' (Set.fromList [tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
for_ [CollectionId 1, CollectionId 2, CollectionId 3] $ \collectionId ->
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [collectionId]
}
assertingBalanceDeltas fa2
[ (alice, tokenId1) -: -1
, (alice, tokenId2) -: 0
, (alice, tokenId3) -: -1
] $ do
withSender admin $ do
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = one $ (initCollectionId, tokenId1)
}
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.incrementSwapId $ Basic.incrementSwapId Basic.initSwapId ,
tokensSent = one $ (incrementCollectionId $ incrementCollectionId $ initCollectionId, tokenId3)
}
, nettestScenarioCaps "Accessing non-existing swap fails respectively" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
offchainAccept' Set.empty alice swap
& expectError errSwapNotExist
withSender admin
(call swap (Call @"Cancel") Basic.initSwapId
& expectError errSwapNotExist)
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer (SwapOffer [] [])
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.incrementSwapId Basic.initSwapId ,
tokensSent = Set.empty
}
& expectError errSwapNotExist
call swap (Call @"Cancel") (Basic.incrementSwapId Basic.initSwapId)
& expectError errSwapNotExist
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = Set.empty
}
]
authorizationChecks :: TestTree
authorizationChecks = testGroup "Authorization checks"
[ nettestScenarioCaps "Swap can be cancelled by seller only" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
call swap (Call @"Cancel") Basic.initSwapId
& expectError errNotAdmin
withSender alice $
call swap (Call @"Cancel") Basic.initSwapId
& expectError errNotAdmin
]
invalidFA2sChecks :: TestTree
invalidFA2sChecks = testGroup "Invalid FA2s"
[ nettestScenarioCaps "Swap can be cancelled by seller only" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< SNil = sTokens setup
fakeFa2 <- originateSimple "fake-fa2" ([] :: [Integer]) contractConsumer
let nonExistingFa2 = ContractHandler "non-existing FA2"
(unsafeParseAddress "tz1b7p3PPBd3vxmMHZkvtC61C7ttYE6g683F")
let pseudoFa2s = [("fake FA2", fakeFa2), ("non existing FA2", nonExistingFa2)]
for_ pseudoFa2s $ \(desc, fa2) -> do
comment $ "Trying " <> desc
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection' (Set.fromList [tokenId1]) swap
comment "Checking offered FA2"
withSender admin $
call swap (Call @"Start") (mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, 1)]
, assetsRequested = []
})
& expectError errSwapOfferedFA2Invalid
comment "Checking requested FA2"
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errSwapRequestedFA2Invalid
]
----------------------------------------------------------------------------
-- Swap + Burn Tests Using normal Accept
----------------------------------------------------------------------------
test_Integrational :: TestTree
test_Integrational = testGroup "Integrational"
[ -- Check that storage updates work
nettestScenarioCaps "Simple accepted swap" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5] (toAddress swap) alice fa2
withSender admin $
addOperatorOnTokens' [adminToken] (toAddress swap) admin fa2
assertingBalanceDeltas fa2
[ (admin, adminToken) -: -10
, (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, adminToken) -: 0
, (alice, tokenId1) -: -1
, (alice, tokenId2) -: -1
, (alice, adminToken) -: 10
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]) swap
addCollection' (Set.fromList [tokenId1, tokenId4, tokenId5]) swap
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId]
}
withSender alice $
call swap (Call @"Accept") AcceptParam
{
swapId = Basic.initSwapId ,
tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2), (incrementCollectionId initCollectionId, tokenId5)]
}
]
test_IntegrationalWithFA2GlobalOperators :: TestTree
test_IntegrationalWithFA2GlobalOperators = testGroup "Integrational"
[ -- Check that storage updates work
nettestScenarioCaps "Simple accepted swap" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
fa2 <- originateFA2WithGlobalOperators "fa2" setup Set.empty admin []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
call fa2 (Call @"Update_global_operators") (one $ toAddress swap)
assertingBalanceDeltas' fa2
[ (admin, adminToken) -: -10
, (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, adminToken) -: 0
, (alice, tokenId1) -: -1
, (alice, tokenId2) -: -1
, (alice, adminToken) -: 10
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]) swap
addCollection' (Set.fromList [tokenId1, tokenId4, tokenId5]) swap
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId]
}
withSender alice $
call swap (Call @"Accept") AcceptParam
{
swapId = Basic.initSwapId
, tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2), (incrementCollectionId initCollectionId, tokenId5)]
}
]
----------------------------------------------------------------------------
-- Admin Checks
----------------------------------------------------------------------------
test_AdminChecks :: TestTree
test_AdminChecks = do
adminOwnershipTransferChecks (\admin -> originateOffchainCollections admin exampleFA2Address)
----------------------------------------------------------------------------
-- Helpers
----------------------------------------------------------------------------
addCollection' :: (HasCallStack, MonadNettest caps base m) => Set TokenId -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
addCollection' collection contract = call contract (Call @"Add_collection") collection
addCollections' :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> ContractHandler OffchainCollectionsEntrypoints st -> m [[TokenId]]
addCollections' tokenIds contract = do
let collections = map sort (filter (not . null) (filterM (const [True, False]) tokenIds))
mapM_ (\collection -> addCollection (Set.fromList collection) contract) collections
return collections
addCollection :: (HasCallStack, MonadEmulated caps base m) => Set TokenId -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
addCollection collection contract = call contract (Call @"Add_collection") collection
addCollections :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> ContractHandler OffchainCollectionsEntrypoints st -> m [[TokenId]]
addCollections tokenIds contract = do
let collections = map sort (filter (not . null) (filterM (const [True, False]) tokenIds))
mapM_ (\collection -> addCollection (Set.fromList collection) contract) collections
return collections
addOperatorOnTokens :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> Address -> Address -> ContractHandler FA2.FA2SampleParameter st -> m()
addOperatorOnTokens tokens operator owner fa2 =
call fa2 (Call @"Update_operators") operatorParamList
where
operatorParamList = map (\token -> FA2I.AddOperator FA2I.OperatorParam
{ opOwner = owner
, opOperator = toAddress operator
, opTokenId = token
}) tokens
addOperatorOnTokens' :: (HasCallStack, MonadNettest caps base m) => [TokenId] -> Address -> Address -> ContractHandler FA2.FA2SampleParameter st -> m()
addOperatorOnTokens' tokens operator owner fa2 =
call fa2 (Call @"Update_operators") operatorParamList
where
operatorParamList = map (\token -> FA2I.AddOperator FA2I.OperatorParam
{ opOwner = owner
, opOperator = toAddress operator
, opTokenId = token
}) tokens
--N addresses accept all N assets in a sale conseutively, and then all N are confirmed
offchainAcceptAllConsecutive :: (HasCallStack, MonadEmulated caps base m) => Set (CollectionId, TokenId) -> [Address] -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptAllConsecutive tokensSent addresses contract = do
forM_ addresses $ \buyer -> do
offchainAccept tokensSent buyer contract
offchainAcceptBatch :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> [Address] -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptBatch acceptParam buyers contract = do
param <- forM buyers $ \buyer -> do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign acceptParam contract
signature <- signBytes unsigned buyer
return OffchainAcceptParam {
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
call contract (Call @"Offchain_accept") (toList param)
mkPermitToForge :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m (ByteString, PublicKey)
mkPermitToForge acceptParam contract = do
aliasAddress <- newAddress "forged"
aliasPK <- getPublicKey aliasAddress
unsignedPermit <- mkPermitToSign acceptParam contract
pure (unsignedPermit, aliasPK)
mkPermitToSign :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
mkPermitToSign acceptParam contract = do
marketplaceChainId <- getChainId
let unsigned = packValue' $ toVal ((marketplaceChainId, contractAddress), (0 :: Natural, acceptParamHash))
pure unsigned
where acceptParamHash = blake2b $ packValue' $ toVal acceptParam
contractAddress = toAddress contract
mkPermitToSign' :: (HasCallStack, MonadNettest caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
mkPermitToSign' acceptParam contract = do
marketplaceChainId <- getChainId
let unsigned = packValue' $ toVal ((marketplaceChainId, contractAddress), (0 :: Natural, acceptParamHash))
pure unsigned
where acceptParamHash = blake2b $ packValue' $ toVal acceptParam
contractAddress = toAddress contract
offchainAcceptSwapId :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptSwapId acceptParam buyer contract = do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign acceptParam contract
signature <- signBytes unsigned buyer
call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
]
offchainAcceptSwapId' :: (HasCallStack, MonadNettest caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptSwapId' acceptParam buyer contract = do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign' acceptParam contract
signature <- signBytes unsigned buyer
call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
]
offchainAccept :: (HasCallStack, MonadEmulated caps base m) => Set (CollectionId, TokenId) -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAccept tokensSent = offchainAcceptSwapId AcceptParam {
swapId = Basic.initSwapId,
tokensSent = tokensSent
}
offchainAccept' :: (HasCallStack, MonadNettest caps base m) => Set (CollectionId, TokenId) -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAccept' tokensSent = offchainAcceptSwapId' AcceptParam {
swapId = Basic.initSwapId,
tokensSent = tokensSent
}
offchainAcceptForged :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
offchainAcceptForged acceptParam buyer contract = do
(unsigned, forgedPK) <- mkPermitToForge acceptParam contract
signature <- signBytes unsigned buyer
(\() -> unsigned) <$> call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = forgedPK
, signature = signature
}
}
]
assertingBurnAddressStatus
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> (Address -> Address -> m ())
-> m a
assertingBurnAddressStatus swapContract action changedStatus = do
initBurnAddress <- getBurnAddress swapContract
res <- action
finalBurnAddress <- getBurnAddress swapContract
initBurnAddress `changedStatus` finalBurnAddress
return res
where
getBurnAddress c =
burnAddress <$> getStorage' c
assertingBurnAddressUnchanged
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> m a
assertingBurnAddressUnchanged swapContract action =
assertingBurnAddressStatus swapContract action (@==)
assertingBurnAddressChanged
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> m a
assertingBurnAddressChanged swapContract action =
assertingBurnAddressStatus swapContract action (@/=)
| null | https://raw.githubusercontent.com/tqtezos/minter-sdk/6239f6ee8435977085c00c194224d4223386841a/packages/minter-contracts/test-hs/Test/Swaps/Collections.hs | haskell | --------------------------------------------------------------------------
Permit Tests
--------------------------------------------------------------------------
----------------------------------------------------------------------------
-- Swap + Burn Tests Using Offchain_accept
----------------------------------------------------------------------------
--------------------------------------------------------------------------
Swap + Burn Tests Using normal Accept
--------------------------------------------------------------------------
Check that storage updates work
Check that storage updates work
--------------------------------------------------------------------------
Admin Checks
--------------------------------------------------------------------------
--------------------------------------------------------------------------
Helpers
--------------------------------------------------------------------------
N addresses accept all N assets in a sale conseutively, and then all N are confirmed | module Test.Swaps.Collections where
import Prelude hiding (swap, toStrict)
import qualified Data.Sized as Sized (toList)
import qualified Data.Set as Set
import Hedgehog (Gen, Property, forAll, property)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Lorentz.Contracts.Spec.FA2Interface as FA2I
import qualified Indigo.Contracts.FA2Sample as FA2
import Michelson.Interpret.Pack
import GHC.Integer (negateInteger)
import Test.Tasty (TestTree, testGroup)
import Morley.Nettest
import Morley.Nettest.Tasty (nettestScenarioCaps)
import Lorentz.Address
import qualified Lorentz.Contracts.Swaps.Basic as Basic
import Lorentz.Contracts.Swaps.Collections
import Lorentz.Test (contractConsumer)
import Lorentz.Value
import Lorentz.Contracts.Spec.FA2Interface (TokenId(..))
import Test.NonPausableSimpleAdmin
import Test.Swaps.Util
import Test.Util
import Tezos.Address (unsafeParseAddress)
import Tezos.Crypto
data TestData = TestData
{ numOffers :: Natural
, token1Offer :: Natural
, token2Offer :: Natural
}
deriving stock (Show)
genTestData :: Gen TestData
genTestData = do
let genNat = Gen.integral (Range.constant 1 20)
numOffers <- genNat
token1Offer <- genNat
token2Offer <- genNat
pure $ TestData
{ numOffers = numOffers
, token1Offer = token1Offer
, token2Offer = token2Offer
}
hprop_Sending_fake_permit_to_offchain_accept_fails :: Property
hprop_Sending_fake_permit_to_offchain_accept_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender alice $
addOperatorOnTokens tokensList (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let acceptParam = AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = one $ (initCollectionId, tokenId1)
}
missignedBytes <- fst <$> mkPermitToForge acceptParam swap
withSender admin $ do
(offchainAcceptForged acceptParam alice swap)
& expectTransferFailure
[failedWith $ constant ([mt|MISSIGNED|], missignedBytes)]
hprop_Offchain_accept_not_admin_submitted_fails :: Property
hprop_Offchain_accept_not_admin_submitted_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender alice $
addOperatorOnTokens tokensList (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let tokensSent = one $ (initCollectionId, tokenId1)
withSender alice $ do
offchainAccept tokensSent alice swap
& expectTransferFailure [failedWith $ constant errNotAdmin]
hprop_Consecutive_offchain_accept_equals_iterative_accept :: Property
hprop_Consecutive_offchain_accept_equals_iterative_accept =
property $ do
TestData{numOffers,token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup @("addresses" :# 50) @("tokens" :# 2)
let admin1 ::< admin2 ::< remainingAddresses = sAddresses setup
let addresses = take (fromIntegral numOffers) (Sized.toList remainingAddresses)
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2_1 <- originateFA2 "fa2_1" setup []
fa2_2 <- originateFA2 "fa2_2" setup []
swap1 <- originateOffchainCollections admin1 (toAddress fa2_1)
swap2 <- originateOffchainCollections admin2 (toAddress fa2_2)
withSender admin1 $ do
addOperatorOnTokens [tokenId1, tokenId2] (toAddress swap1) admin1 fa2_1
withSender admin2 $ do
addOperatorOnTokens [tokenId1, tokenId2] (toAddress swap2) admin2 fa2_2
withSender admin1 $ do
call swap1 (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2_1 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
withSender admin2 $ do
call swap2 (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2_2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
let acceptParam = AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = Set.empty
}
withSender admin1 $ do
offchainAcceptAllConsecutive Set.empty addresses swap1
withSender admin2 $ do
offchainAcceptBatch acceptParam addresses swap2
swapStorage1 <- toVal . swaps <$> getStorage' swap1
swapStorage2 <- toVal . swaps <$> getStorage' swap2
swapStorage1 @== swapStorage2
hprop_Accepting_with_zero_balance_fails :: Property
hprop_Accepting_with_zero_balance_fails =
property $ do
clevelandProp $ do
setup <- doFA2Setup
let admin ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< SNil = sTokens setup
let tokensList = [tokenId1, tokenId2]
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
addressWithZeroBalance <- newAddress "test"
withSender admin $
addCollection (Set.fromList tokensList) swap
withSender addressWithZeroBalance $
addOperatorOnTokens tokensList (toAddress swap) addressWithZeroBalance fa2
withSender admin $ do
addOperatorOnTokens [adminToken] (toAddress swap) admin fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2)]
withSender admin
(offchainAccept tokensSent addressWithZeroBalance swap
& expectTransferFailure [failedWith $ constant (errSwapRequestedFA2BalanceInvalid 1 0)])
hprop_Start_callable_by_admin_only :: Property
hprop_Start_callable_by_admin_only =
property $ do
TestData{numOffers} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< nonAdmin ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender nonAdmin
(call swap (Call @"Start") (mkNOffers numOffers SwapOffer
{ assetsOffered = []
, assetsRequested = []
}) & expectError errNotAdmin)
hprop_Correct_final_balances_on_acceptance :: Property
hprop_Correct_final_balances_on_acceptance =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) alice fa2
withSender admin $ do
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
addCollection' (Set.fromList [tokenId3, tokenId4]) swap
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: negateInteger (fromIntegral $ token1Offer * numOffers)
, (admin, tokenId2) -: negateInteger (fromIntegral $ token2Offer * numOffers)
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, tokenId3) -: 1
, (nullAddress, tokenId4) -: 1
, (alice, tokenId1) -: fromIntegral (token1Offer - 1)
, (alice, tokenId2) -: fromIntegral (token2Offer - 1)
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId, incrementCollectionId initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2),
(incrementCollectionId initCollectionId, tokenId3),
(incrementCollectionId initCollectionId, tokenId4)]
withSender admin $
offchainAccept tokensSent alice swap
hprop_Correct_final_balances_on_cancel :: Property
hprop_Correct_final_balances_on_cancel =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (alice, tokenId1) -: 0
, (alice, tokenId2) -: 0
, (nullAddress, tokenId1) -: 0
, (nullAddress, tokenId2) -: 0
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
withSender admin $
call swap (Call @"Cancel") Basic.initSwapId
hprop_Correct_num_tokens_transferred_to_contract_on_start :: Property
hprop_Correct_num_tokens_transferred_to_contract_on_start =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (admin, tokenId1) -: negateInteger (fromIntegral $ token1Offer * numOffers)
, (admin, tokenId2) -: negateInteger (fromIntegral $ token2Offer * numOffers)
] $ do
withSender admin $
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = []
}
hprop_Contract_balance_goes_to_zero_when_sale_concludes :: Property
hprop_Contract_balance_goes_to_zero_when_sale_concludes =
property $ do
TestData{numOffers, token1Offer, token2Offer} <- forAll genTestData
clevelandProp $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
let swapAddress = toAddress swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) alice fa2
withSender admin $
addOperatorOnTokens' [tokenId1, tokenId2] (toAddress swap) admin fa2
assertingBurnAddressUnchanged swap $ do
assertingBalanceDeltas fa2
[ (swapAddress, tokenId1) -: 0
, (swapAddress, tokenId2) -: 0
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
call swap (Call @"Start") $ mkNOffers numOffers SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, token1Offer), (tokenId2, token2Offer)]
, assetsRequested = [initCollectionId, initCollectionId]
}
withSender admin $
replicateM_ (fromIntegral numOffers) $ do
offchainAccept (Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2)]) alice swap
test_CollectionsIntegrational :: TestTree
test_CollectionsIntegrational = testGroup "Basic colections functionality"
[ statusChecks
, swapIdChecks
, authorizationChecks
, invalidFA2sChecks
, swapTokensSentChecks
]
statusChecks :: TestTree
statusChecks = testGroup "Statuses"
[ nettestScenarioCaps "Operations with accepted swap fail" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
let tokensSent = Set.empty
offchainAccept' tokensSent alice swap
offchainAccept' tokensSent alice swap
& expectError errSwapFinished
offchainAccept' tokensSent alice swap
& expectError errSwapFinished
, nettestScenarioCaps "Operations with cancelled swap fail" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
let tokensSent = Set.empty
call swap (Call @"Cancel") Basic.initSwapId
offchainAccept' tokensSent alice swap
& expectError errSwapCancelled
call swap (Call @"Cancel") Basic.initSwapId
& expectError errSwapCancelled
]
swapTokensSentChecks :: TestTree
swapTokensSentChecks = testGroup "TokensSent"
[ nettestScenarioCaps "Sending too few tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
, nettestScenarioCaps "Sending too many tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId2), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
, nettestScenarioCaps "Sending incorrect tokens fails" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId, initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1), (initCollectionId , tokenId3)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errTokensSentInvalid
]
swapIdChecks :: TestTree
swapIdChecks = testGroup "SwapIds"
[ nettestScenarioCaps "Swap ids are properly assigned and can be worked with" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< tokenId2 ::< tokenId3 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $ do
addCollection' (Set.fromList [tokenId1]) swap
addCollection' (Set.fromList [tokenId2]) swap
addCollection' (Set.fromList [tokenId3]) swap
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3] (toAddress swap) alice fa2
withSender admin $
for_ [CollectionId 1, CollectionId 2, CollectionId 3] $ \collectionId ->
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [collectionId]
}
assertingBalanceDeltas fa2
[ (alice, tokenId1) -: -1
, (alice, tokenId2) -: 0
, (alice, tokenId3) -: -1
] $ do
withSender admin $ do
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = one $ (initCollectionId, tokenId1)
}
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.incrementSwapId $ Basic.incrementSwapId Basic.initSwapId ,
tokensSent = one $ (incrementCollectionId $ incrementCollectionId $ initCollectionId, tokenId3)
}
, nettestScenarioCaps "Accessing non-existing swap fails respectively" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
offchainAccept' Set.empty alice swap
& expectError errSwapNotExist
withSender admin
(call swap (Call @"Cancel") Basic.initSwapId
& expectError errSwapNotExist)
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer (SwapOffer [] [])
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.incrementSwapId Basic.initSwapId ,
tokensSent = Set.empty
}
& expectError errSwapNotExist
call swap (Call @"Cancel") (Basic.incrementSwapId Basic.initSwapId)
& expectError errSwapNotExist
(\acceptParam -> offchainAcceptSwapId' acceptParam alice swap) AcceptParam {
swapId = Basic.initSwapId ,
tokensSent = Set.empty
}
]
authorizationChecks :: TestTree
authorizationChecks = testGroup "Authorization checks"
[ nettestScenarioCaps "Swap can be cancelled by seller only" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let !SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
call swap (Call @"Start") $ mkSingleOffer $ SwapOffer [] []
call swap (Call @"Cancel") Basic.initSwapId
& expectError errNotAdmin
withSender alice $
call swap (Call @"Cancel") Basic.initSwapId
& expectError errNotAdmin
]
invalidFA2sChecks :: TestTree
invalidFA2sChecks = testGroup "Invalid FA2s"
[ nettestScenarioCaps "Swap can be cancelled by seller only" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let tokenId1 ::< SNil = sTokens setup
fakeFa2 <- originateSimple "fake-fa2" ([] :: [Integer]) contractConsumer
let nonExistingFa2 = ContractHandler "non-existing FA2"
(unsafeParseAddress "tz1b7p3PPBd3vxmMHZkvtC61C7ttYE6g683F")
let pseudoFa2s = [("fake FA2", fakeFa2), ("non existing FA2", nonExistingFa2)]
for_ pseudoFa2s $ \(desc, fa2) -> do
comment $ "Trying " <> desc
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
addCollection' (Set.fromList [tokenId1]) swap
comment "Checking offered FA2"
withSender admin $
call swap (Call @"Start") (mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(tokenId1, 1)]
, assetsRequested = []
})
& expectError errSwapOfferedFA2Invalid
comment "Checking requested FA2"
withSender admin $ do
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = []
, assetsRequested = [initCollectionId]
}
let tokensSent = Set.fromList [(initCollectionId , tokenId1)]
withSender admin
(offchainAccept' tokensSent alice swap)
& expectError errSwapRequestedFA2Invalid
]
test_Integrational :: TestTree
test_Integrational = testGroup "Integrational"
nettestScenarioCaps "Simple accepted swap" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
fa2 <- originateFA2 "fa2" setup []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender alice $
addOperatorOnTokens' [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5] (toAddress swap) alice fa2
withSender admin $
addOperatorOnTokens' [adminToken] (toAddress swap) admin fa2
assertingBalanceDeltas fa2
[ (admin, adminToken) -: -10
, (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, adminToken) -: 0
, (alice, tokenId1) -: -1
, (alice, tokenId2) -: -1
, (alice, adminToken) -: 10
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]) swap
addCollection' (Set.fromList [tokenId1, tokenId4, tokenId5]) swap
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId]
}
withSender alice $
call swap (Call @"Accept") AcceptParam
{
swapId = Basic.initSwapId ,
tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2), (incrementCollectionId initCollectionId, tokenId5)]
}
]
test_IntegrationalWithFA2GlobalOperators :: TestTree
test_IntegrationalWithFA2GlobalOperators = testGroup "Integrational"
nettestScenarioCaps "Simple accepted swap" $ do
setup <- doFA2Setup
let admin ::< alice ::< SNil = sAddresses setup
let adminToken ::< tokenId1 ::< tokenId2 ::< tokenId3 ::< tokenId4 ::< tokenId5 ::< SNil = sTokens setup
fa2 <- originateFA2WithGlobalOperators "fa2" setup Set.empty admin []
let fa2Address = toAddress fa2
swap <- originateOffchainCollections admin fa2Address
withSender admin $
call fa2 (Call @"Update_global_operators") (one $ toAddress swap)
assertingBalanceDeltas' fa2
[ (admin, adminToken) -: -10
, (admin, tokenId1) -: 0
, (admin, tokenId2) -: 0
, (nullAddress, tokenId1) -: 1
, (nullAddress, tokenId2) -: 1
, (nullAddress, adminToken) -: 0
, (alice, tokenId1) -: -1
, (alice, tokenId2) -: -1
, (alice, adminToken) -: 10
] $ do
withSender admin $ do
addCollection' (Set.fromList [tokenId1, tokenId2, tokenId3, tokenId4, tokenId5]) swap
addCollection' (Set.fromList [tokenId1, tokenId4, tokenId5]) swap
call swap (Call @"Start") $ mkSingleOffer SwapOffer
{ assetsOffered = Basic.tokens $ mkFA2Assets fa2 [(adminToken, 10)]
, assetsRequested = [initCollectionId, initCollectionId, incrementCollectionId initCollectionId]
}
withSender alice $
call swap (Call @"Accept") AcceptParam
{
swapId = Basic.initSwapId
, tokensSent = Set.fromList [(initCollectionId, tokenId1), (initCollectionId, tokenId2), (incrementCollectionId initCollectionId, tokenId5)]
}
]
test_AdminChecks :: TestTree
test_AdminChecks = do
adminOwnershipTransferChecks (\admin -> originateOffchainCollections admin exampleFA2Address)
addCollection' :: (HasCallStack, MonadNettest caps base m) => Set TokenId -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
addCollection' collection contract = call contract (Call @"Add_collection") collection
addCollections' :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> ContractHandler OffchainCollectionsEntrypoints st -> m [[TokenId]]
addCollections' tokenIds contract = do
let collections = map sort (filter (not . null) (filterM (const [True, False]) tokenIds))
mapM_ (\collection -> addCollection (Set.fromList collection) contract) collections
return collections
addCollection :: (HasCallStack, MonadEmulated caps base m) => Set TokenId -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
addCollection collection contract = call contract (Call @"Add_collection") collection
addCollections :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> ContractHandler OffchainCollectionsEntrypoints st -> m [[TokenId]]
addCollections tokenIds contract = do
let collections = map sort (filter (not . null) (filterM (const [True, False]) tokenIds))
mapM_ (\collection -> addCollection (Set.fromList collection) contract) collections
return collections
addOperatorOnTokens :: (HasCallStack, MonadEmulated caps base m) => [TokenId] -> Address -> Address -> ContractHandler FA2.FA2SampleParameter st -> m()
addOperatorOnTokens tokens operator owner fa2 =
call fa2 (Call @"Update_operators") operatorParamList
where
operatorParamList = map (\token -> FA2I.AddOperator FA2I.OperatorParam
{ opOwner = owner
, opOperator = toAddress operator
, opTokenId = token
}) tokens
addOperatorOnTokens' :: (HasCallStack, MonadNettest caps base m) => [TokenId] -> Address -> Address -> ContractHandler FA2.FA2SampleParameter st -> m()
addOperatorOnTokens' tokens operator owner fa2 =
call fa2 (Call @"Update_operators") operatorParamList
where
operatorParamList = map (\token -> FA2I.AddOperator FA2I.OperatorParam
{ opOwner = owner
, opOperator = toAddress operator
, opTokenId = token
}) tokens
offchainAcceptAllConsecutive :: (HasCallStack, MonadEmulated caps base m) => Set (CollectionId, TokenId) -> [Address] -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptAllConsecutive tokensSent addresses contract = do
forM_ addresses $ \buyer -> do
offchainAccept tokensSent buyer contract
offchainAcceptBatch :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> [Address] -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptBatch acceptParam buyers contract = do
param <- forM buyers $ \buyer -> do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign acceptParam contract
signature <- signBytes unsigned buyer
return OffchainAcceptParam {
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
call contract (Call @"Offchain_accept") (toList param)
mkPermitToForge :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m (ByteString, PublicKey)
mkPermitToForge acceptParam contract = do
aliasAddress <- newAddress "forged"
aliasPK <- getPublicKey aliasAddress
unsignedPermit <- mkPermitToSign acceptParam contract
pure (unsignedPermit, aliasPK)
mkPermitToSign :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
mkPermitToSign acceptParam contract = do
marketplaceChainId <- getChainId
let unsigned = packValue' $ toVal ((marketplaceChainId, contractAddress), (0 :: Natural, acceptParamHash))
pure unsigned
where acceptParamHash = blake2b $ packValue' $ toVal acceptParam
contractAddress = toAddress contract
mkPermitToSign' :: (HasCallStack, MonadNettest caps base m) => AcceptParam -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
mkPermitToSign' acceptParam contract = do
marketplaceChainId <- getChainId
let unsigned = packValue' $ toVal ((marketplaceChainId, contractAddress), (0 :: Natural, acceptParamHash))
pure unsigned
where acceptParamHash = blake2b $ packValue' $ toVal acceptParam
contractAddress = toAddress contract
offchainAcceptSwapId :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptSwapId acceptParam buyer contract = do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign acceptParam contract
signature <- signBytes unsigned buyer
call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
]
offchainAcceptSwapId' :: (HasCallStack, MonadNettest caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAcceptSwapId' acceptParam buyer contract = do
buyerPK <- getPublicKey buyer
unsigned <- mkPermitToSign' acceptParam contract
signature <- signBytes unsigned buyer
call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = buyerPK
, signature = signature
}
}
]
offchainAccept :: (HasCallStack, MonadEmulated caps base m) => Set (CollectionId, TokenId) -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAccept tokensSent = offchainAcceptSwapId AcceptParam {
swapId = Basic.initSwapId,
tokensSent = tokensSent
}
offchainAccept' :: (HasCallStack, MonadNettest caps base m) => Set (CollectionId, TokenId) -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ()
offchainAccept' tokensSent = offchainAcceptSwapId' AcceptParam {
swapId = Basic.initSwapId,
tokensSent = tokensSent
}
offchainAcceptForged :: (HasCallStack, MonadEmulated caps base m) => AcceptParam -> Address -> ContractHandler OffchainCollectionsEntrypoints st -> m ByteString
offchainAcceptForged acceptParam buyer contract = do
(unsigned, forgedPK) <- mkPermitToForge acceptParam contract
signature <- signBytes unsigned buyer
(\() -> unsigned) <$> call contract (Call @"Offchain_accept")
[OffchainAcceptParam
{
acceptParam = acceptParam
, permit = Permit
{
signerKey = forgedPK
, signature = signature
}
}
]
assertingBurnAddressStatus
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> (Address -> Address -> m ())
-> m a
assertingBurnAddressStatus swapContract action changedStatus = do
initBurnAddress <- getBurnAddress swapContract
res <- action
finalBurnAddress <- getBurnAddress swapContract
initBurnAddress `changedStatus` finalBurnAddress
return res
where
getBurnAddress c =
burnAddress <$> getStorage' c
assertingBurnAddressUnchanged
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> m a
assertingBurnAddressUnchanged swapContract action =
assertingBurnAddressStatus swapContract action (@==)
assertingBurnAddressChanged
:: (MonadEmulated caps base m, HasCallStack)
=> ContractHandler b CollectionsStorage
-> m a
-> m a
assertingBurnAddressChanged swapContract action =
assertingBurnAddressStatus swapContract action (@/=)
|
389ebbc832081f58125398524a47111a1b76f805208686f6666ae31edd3bf9ef | exoscale/clojure-kubernetes-client | v1_event_list.clj | (ns clojure-kubernetes-client.specs.v1-event-list
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-event :refer :all]
[clojure-kubernetes-client.specs.v1-list-meta :refer :all]
)
(:import (java.io File)))
(declare v1-event-list-data v1-event-list)
(def v1-event-list-data
{
(ds/opt :apiVersion) string?
(ds/req :items) (s/coll-of v1-event)
(ds/opt :kind) string?
(ds/opt :metadata) v1-list-meta
})
(def v1-event-list
(ds/spec
{:name ::v1-event-list
:spec v1-event-list-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_event_list.clj | clojure | (ns clojure-kubernetes-client.specs.v1-event-list
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-event :refer :all]
[clojure-kubernetes-client.specs.v1-list-meta :refer :all]
)
(:import (java.io File)))
(declare v1-event-list-data v1-event-list)
(def v1-event-list-data
{
(ds/opt :apiVersion) string?
(ds/req :items) (s/coll-of v1-event)
(ds/opt :kind) string?
(ds/opt :metadata) v1-list-meta
})
(def v1-event-list
(ds/spec
{:name ::v1-event-list
:spec v1-event-list-data}))
| |
21a6ae5ddd6cbb1ccf2fc8c3242129eef43647859fbf9ae873a24b691a4225fe | pkamenarsky/knit | Eot.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
-- | @generics-eot@ tries to be a library for datatype generic programming
-- that is easy to understand. "eot" stands for "eithers of tuples".
--
A tutorial on how to use @generics - eot@ can be found here :
-- -eot.readthedocs.io/.
module Generics.Eot (
HasEot(..),
Named(..),
-- * Meta Information
Datatype(..),
Constructor(..),
Fields(..),
-- * Useful Re-exports
Generic,
Proxy(..),
Void,
absurd,
) where
import Data.Proxy
import Data.Void
import GHC.Exts (Constraint)
import GHC.Generics hiding (Datatype, Constructor)
import Generics.Eot.Datatype
import Generics.Eot.Eot
-- | An instance (@'HasEot' a@) allows us to
--
-- - convert values of an arbitrary algebraic datatype @a@ to and from a generic
-- representation (@'Eot' a@) (see 'toEot' and 'fromEot').
-- - extract meta information about the type @a@ (see 'datatype').
--
-- Once an algebraic datatype has an instance for 'GHC.Generics.Generic' it
-- automatically gets one for 'HasEot'.
class HasEot a where
| ' ' is a type level function that maps arbitrary ADTs to isomorphic
-- generic representations. Here's an example:
--
-- > data Foo = A Int Bool | B String
--
-- would be mapped to:
--
> Either ( Int , ( , ( ) ) ) ( Either ( String , ( ) ) Void )
--
-- These representations follow these rules:
--
-- - The choice between constructors is mapped to right-nested 'Either's.
-- - There's always a so-called end-marker 'Void'. It's an invalid choice (and
-- 'Void' is uninhabited to make sure you don't accidentally create such a value).
So e.g. @data Foo = A@ would be mapped to ( ) Void@ , and a type
-- with no constructors is mapped to @Void@.
- The fields of one constructor are mapped to right - nested tuples .
- Again there 's always an end - marker , this time of type @()@.
A constructor with three fields @a@ , @b@ , @c@ is mapped to
@(a , ( b , ( c , ( ) ) ) ) @ , one field @a@ is mapped to @(a , ( ) ) @ , and no
fields are mapped to @()@ ( just the end - marker ) .
--
-- These rules (and the end-markers) are necessary to make sure generic
-- functions know exactly which parts of the generic representation are field
-- types and which parts belong to the generic skeleton.
type Eot a :: *
-- | Convert a value of type @a@ to its generic representation.
toEot :: a -> Eot a
-- | Convert a value in a generic representation to @a@ (inverse of 'toEot').
fromEot :: Eot a -> a
| Extract meta information about the ADT .
datatype :: Proxy a -> Datatype
instance (Generic a, ImpliedByGeneric a c f) => HasEot a where
type Eot a = EotG (Rep a)
toEot = toEotG . from
fromEot = to . fromEotG
datatype Proxy = datatypeC (Proxy :: Proxy (Rep a))
type family ImpliedByGeneric a c f :: Constraint where
ImpliedByGeneric a c f =
(GenericDatatype (Rep a),
Rep a ~ D1 c f,
GenericConstructors f,
HasEotG (Rep a))
| null | https://raw.githubusercontent.com/pkamenarsky/knit/fc1f73f46f34db871cf3a5c7639d336cafe93cb9/vendor/generics-eot/src/Generics/Eot.hs | haskell | | @generics-eot@ tries to be a library for datatype generic programming
that is easy to understand. "eot" stands for "eithers of tuples".
-eot.readthedocs.io/.
* Meta Information
* Useful Re-exports
| An instance (@'HasEot' a@) allows us to
- convert values of an arbitrary algebraic datatype @a@ to and from a generic
representation (@'Eot' a@) (see 'toEot' and 'fromEot').
- extract meta information about the type @a@ (see 'datatype').
Once an algebraic datatype has an instance for 'GHC.Generics.Generic' it
automatically gets one for 'HasEot'.
generic representations. Here's an example:
> data Foo = A Int Bool | B String
would be mapped to:
These representations follow these rules:
- The choice between constructors is mapped to right-nested 'Either's.
- There's always a so-called end-marker 'Void'. It's an invalid choice (and
'Void' is uninhabited to make sure you don't accidentally create such a value).
with no constructors is mapped to @Void@.
These rules (and the end-markers) are necessary to make sure generic
functions know exactly which parts of the generic representation are field
types and which parts belong to the generic skeleton.
| Convert a value of type @a@ to its generic representation.
| Convert a value in a generic representation to @a@ (inverse of 'toEot'). | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
A tutorial on how to use @generics - eot@ can be found here :
module Generics.Eot (
HasEot(..),
Named(..),
Datatype(..),
Constructor(..),
Fields(..),
Generic,
Proxy(..),
Void,
absurd,
) where
import Data.Proxy
import Data.Void
import GHC.Exts (Constraint)
import GHC.Generics hiding (Datatype, Constructor)
import Generics.Eot.Datatype
import Generics.Eot.Eot
class HasEot a where
| ' ' is a type level function that maps arbitrary ADTs to isomorphic
> Either ( Int , ( , ( ) ) ) ( Either ( String , ( ) ) Void )
So e.g. @data Foo = A@ would be mapped to ( ) Void@ , and a type
- The fields of one constructor are mapped to right - nested tuples .
- Again there 's always an end - marker , this time of type @()@.
A constructor with three fields @a@ , @b@ , @c@ is mapped to
@(a , ( b , ( c , ( ) ) ) ) @ , one field @a@ is mapped to @(a , ( ) ) @ , and no
fields are mapped to @()@ ( just the end - marker ) .
type Eot a :: *
toEot :: a -> Eot a
fromEot :: Eot a -> a
| Extract meta information about the ADT .
datatype :: Proxy a -> Datatype
instance (Generic a, ImpliedByGeneric a c f) => HasEot a where
type Eot a = EotG (Rep a)
toEot = toEotG . from
fromEot = to . fromEotG
datatype Proxy = datatypeC (Proxy :: Proxy (Rep a))
type family ImpliedByGeneric a c f :: Constraint where
ImpliedByGeneric a c f =
(GenericDatatype (Rep a),
Rep a ~ D1 c f,
GenericConstructors f,
HasEotG (Rep a))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.