_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
f0203dccdd454430e60ee0f7c9ca4461c3902d300e842d720e2115f84f9d7790 | Engelberg/instaparse | core.cljc | (ns instaparse.core
(#?(:clj :require :cljs :require-macros)
[instaparse.macros :refer [defclone
set-global-var!]])
(:require [clojure.walk :as walk]
[instaparse.gll :as gll]
[instaparse.cfg :as cfg]
[instaparse.failure :as fail]
[instaparse.print :as print]
[instaparse.reduction :as red]
[instaparse.transform :as t]
[instaparse.abnf :as abnf]
[instaparse.repeat :as repeat]
[instaparse.combinators-source :as c]
[instaparse.line-col :as lc]
[instaparse.viz :as viz]
[instaparse.util :refer [throw-illegal-argument-exception]]))
(def ^:dynamic *default-output-format* :hiccup)
(defn set-default-output-format!
"Changes the default output format. Input should be :hiccup or :enlive"
[type]
{:pre [(#{:hiccup :enlive} type)]}
(set-global-var! *default-output-format* type))
(def ^:dynamic *default-input-format* :ebnf)
(defn set-default-input-format!
"Changes the default input format. Input should be :abnf or :ebnf"
[type]
{:pre [(#{:abnf :ebnf} type)]}
(set-global-var! *default-input-format* type))
(declare failure? standard-whitespace-parsers enable-tracing!)
(defn- unhide-parser [parser unhide]
(case unhide
nil parser
:content
(assoc parser :grammar (c/unhide-all-content (:grammar parser)))
:tags
(assoc parser :grammar (c/unhide-tags (:output-format parser)
(:grammar parser)))
:all
(assoc parser :grammar (c/unhide-all (:output-format parser)
(:grammar parser)))))
(defn parse
"Use parser to parse the text. Returns first parse tree found
that completely parses the text. If no parse tree is possible, returns
a Failure object.
Optional keyword arguments:
:start :keyword (where :keyword is name of starting production rule)
:partial true (parses that don't consume the whole string are okay)
:total true (if parse fails, embed failure node in tree)
:unhide <:tags or :content or :all> (for this parse, disable hiding)
:optimize :memory (when possible, employ strategy to use less memory)
Clj only:
:trace true (print diagnostic trace while parsing)"
[parser text &{:as options}]
{:pre [(contains? #{:tags :content :all nil} (get options :unhide))
(contains? #{:memory nil} (get options :optimize))]}
(let [start-production
(get options :start (:start-production parser)),
partial?
(get options :partial false)
optimize?
(get options :optimize false)
unhide
(get options :unhide)
trace?
(get options :trace false)
#?@(:clj [_ (when (and trace? (not gll/TRACE)) (enable-tracing!))])
parser (unhide-parser parser unhide)]
(->> (cond
(:total options)
(gll/parse-total (:grammar parser) start-production text
partial? (red/node-builders (:output-format parser)))
(and optimize? (not partial?))
(let [result (repeat/try-repeating-parse-strategy parser text start-production)]
(if (failure? result)
(gll/parse (:grammar parser) start-production text partial?)
result))
:else
(gll/parse (:grammar parser) start-production text partial?))
#?(:clj (gll/bind-trace trace?)))))
(defn parses
"Use parser to parse the text. Returns lazy seq of all parse trees
that completely parse the text. If no parse tree is possible, returns
() with a Failure object attached as metadata.
Optional keyword arguments:
:start :keyword (where :keyword is name of starting production rule)
:partial true (parses that don't consume the whole string are okay)
:total true (if parse fails, embed failure node in tree)
:unhide <:tags or :content or :all> (for this parse, disable hiding)
Clj only:
:trace true (print diagnostic trace while parsing)"
[parser text &{:as options}]
{:pre [(contains? #{:tags :content :all nil} (get options :unhide))]}
(let [start-production
(get options :start (:start-production parser)),
partial?
(get options :partial false)
unhide
(get options :unhide)
trace?
(get options :trace false)
#?@(:clj [_ (when (and trace? (not gll/TRACE)) (enable-tracing!))])
parser (unhide-parser parser unhide)]
(->> (cond
(:total options)
(gll/parses-total (:grammar parser) start-production text
partial? (red/node-builders (:output-format parser)))
:else
(gll/parses (:grammar parser) start-production text partial?))
#?(:clj (gll/bind-trace trace?)))))
(defrecord Parser [grammar start-production output-format]
#?@(:clj
[clojure.lang.IFn
(invoke [parser text] (parse parser text))
(invoke [parser text key1 val1] (parse parser text key1 val1))
(invoke [parser text key1 val1 key2 val2] (parse parser text key1 val1 key2 val2))
(invoke [parser text key1 val1 key2 val2 key3 val3] (parse parser text key1 val1 key2 val2 key3 val3))
(applyTo [parser args] (apply parse parser args))]
:cljs
[IFn
(-invoke [parser text] (parse parser text))
(-invoke [parser text key1 val1] (parse parser text key1 val1))
(-invoke [parser text key1 val1 key2 val2] (parse parser text key1 val1 key2 val2))
(-invoke [parser text key1 val1 key2 val2 key3 val3] (parse parser text key1 val1 key2 val2 key3 val3))
(-invoke [parser text a b c d e f g h] (parse parser text a b c d e f g h))
(-invoke [parser text a b c d e f g h i j] (parse parser text a b c d e f g h i j))
(-invoke [parser text a b c d e f g h i j k l] (parse parser text a b c d e f g h i j k l))
(-invoke [parser text a b c d e f g h i j k l m n] (parse parser text a b c d e f g h i j k l m n))
(-invoke [parser text a b c d e f g h i j k l m n o p] (parse parser text a b c d e f g h i j k l m n o p))
(-invoke [parser text a b c d e f g h i j k l m n o p q r] (parse parser text a b c d e f g h i j k l m n o p))
(-invoke [parser text a b c d e f g h i j k l m n o p q r s more] (apply parse parser text a b c d e f g h i j k l m n o p q r s more))]))
#?(:clj
(defmethod clojure.core/print-method Parser [x writer]
(binding [*out* writer]
(println (print/Parser->str x))))
:cljs
(extend-protocol IPrintWithWriter
instaparse.core/Parser
(-pr-writer [parser writer _]
(-write writer (print/Parser->str parser)))))
(defn parser
"Takes a string specification of a context-free grammar,
or a URI for a text file containing such a specification (Clj only),
or a map of parser combinators and returns a parser for that grammar.
Optional keyword arguments:
:input-format :ebnf
or
:input-format :abnf
:output-format :enlive
or
:output-format :hiccup
:start :keyword (where :keyword is name of starting production rule)
:string-ci true (treat all string literals as case insensitive)
:auto-whitespace (:standard or :comma)
or
:auto-whitespace custom-whitespace-parser
Clj only:
:no-slurp true (disables use of slurp to auto-detect whether
input is a URI. When using this option, input
must be a grammar string or grammar map. Useful
for platforms where slurp is slow or not available.)"
[grammar-specification &{:as options}]
{:pre [(contains? #{:abnf :ebnf nil} (get options :input-format))
(contains? #{:enlive :hiccup nil} (get options :output-format))
(let [ws-parser (get options :auto-whitespace)]
(or (nil? ws-parser)
(contains? standard-whitespace-parsers ws-parser)
(and
(map? ws-parser)
(contains? ws-parser :grammar)
(contains? ws-parser :start-production))))]}
(let [input-format (get options :input-format *default-input-format*)
build-parser
(fn [spec output-format]
(binding [cfg/*case-insensitive-literals* (:string-ci options :default)]
(case input-format
:abnf (abnf/build-parser spec output-format)
:ebnf (cfg/build-parser spec output-format))))
output-format (get options :output-format *default-output-format*)
start (get options :start nil)
built-parser
(cond
(string? grammar-specification)
(let [parser
#?(:clj
(if (get options :no-slurp)
;; if :no-slurp is set to true, string is a grammar spec
(build-parser grammar-specification output-format)
otherwise , grammar - specification might be a URI ,
;; let's slurp to see
(try (let [spec (slurp grammar-specification)]
(build-parser spec output-format))
(catch java.io.FileNotFoundException e
(build-parser grammar-specification output-format))))
:cljs
(build-parser grammar-specification output-format))]
(if start (map->Parser (assoc parser :start-production start))
(map->Parser parser)))
(map? grammar-specification)
(let [parser
(cfg/build-parser-from-combinators grammar-specification
output-format
start)]
(map->Parser parser))
(vector? grammar-specification)
(let [start (if start start (grammar-specification 0))
parser
(cfg/build-parser-from-combinators (apply hash-map grammar-specification)
output-format
start)]
(map->Parser parser))
:else
#?(:clj
(let [spec (slurp grammar-specification)
parser (build-parser spec output-format)]
(if start (map->Parser (assoc parser :start-production start))
(map->Parser parser)))
:cljs
(throw-illegal-argument-exception
"Expected string, map, or vector as grammar specification, got "
(pr-str grammar-specification))))]
(let [auto-whitespace (get options :auto-whitespace)
; auto-whitespace is keyword, parser, or nil
whitespace-parser (if (keyword? auto-whitespace)
(get standard-whitespace-parsers auto-whitespace)
auto-whitespace)]
(if-let [{ws-grammar :grammar ws-start :start-production} whitespace-parser]
(assoc built-parser :grammar
(c/auto-whitespace (:grammar built-parser) (:start-production built-parser)
ws-grammar ws-start))
built-parser))))
#?(:clj
(defmacro defparser
"Takes a string specification of a context-free grammar,
or a string URI for a text file containing such a specification,
or a map/vector of parser combinators, and sets a variable to a parser for that grammar.
String specifications are processed at macro-time, not runtime, so this is an
appealing alternative to (def _ (parser \"...\")) for ClojureScript users.
Optional keyword arguments unique to `defparser`:
- :instaparse.abnf/case-insensitive true"
[name grammar & {:as opts}]
;; For each of the macro-time opts, ensure that they are the data
;; types we expect, not more complex quoted expressions.
{:pre [(or (nil? (:input-format opts))
(keyword? (:input-format opts)))
(or (nil? (:output-format opts))
(keyword? (:output-format opts)))
(contains? #{true false nil} (:string-ci opts))
(contains? #{true false nil} (:no-slurp opts))]}
(if (string? grammar)
`(def ~name
(map->Parser
~(binding [abnf/*case-insensitive* (:instaparse.abnf/case-insensitive opts false)]
(let [macro-time-opts (select-keys opts [:input-format
:output-format
:string-ci
:no-slurp])
runtime-opts (dissoc opts :start)
macro-time-parser (apply parser grammar (apply concat macro-time-opts))
pre-processed-grammar (:grammar macro-time-parser)
grammar-producing-code
(->> pre-processed-grammar
(walk/postwalk
(fn [form]
(cond
;; Lists cannot be evaluated verbatim
(seq? form)
(list* 'list form)
Regexp terminals are handled differently in cljs
(= :regexp (:tag form))
`(merge (c/regexp ~(str (:regexp form)))
~(dissoc form :tag :regexp))
:else form))))
start-production
(or (:start opts) (:start-production macro-time-parser))]
`(parser ~grammar-producing-code
:start ~start-production
~@(apply concat runtime-opts))))))
`(def ~name (parser ~grammar ~@(apply concat opts))))))
(defn failure?
"Tests whether a parse result is a failure."
[result]
(or
(instance? gll/failure-type result)
(instance? gll/failure-type (meta result))))
(defn get-failure
"Extracts failure object from failed parse result."
[result]
(cond
(instance? gll/failure-type result)
result
(instance? gll/failure-type (meta result))
(meta result)
:else
nil))
(def ^:private standard-whitespace-parsers
{:standard (parser "whitespace = #'\\s+'")
:comma (parser "whitespace = #'[,\\s]+'")})
#?(:clj
(defn enable-tracing!
"Recompiles instaparse with tracing enabled.
This is called implicitly the first time you invoke a parser with
`:trace true` so usually you will not need to call this directly."
[]
(alter-var-root #'gll/TRACE (constantly true))
(alter-var-root #'gll/PROFILE (constantly true))
(require 'instaparse.gll :reload)))
#?(:clj
(defn disable-tracing!
"Recompiles instaparse with tracing disabled.
Call this to restore regular performance characteristics, eliminating
the small performance hit imposed by tracing."
[]
(alter-var-root #'gll/TRACE (constantly false))
(alter-var-root #'gll/PROFILE (constantly false))
(require 'instaparse.gll :reload)))
(defclone transform t/transform)
(defclone add-line-and-column-info-to-metadata lc/add-line-col-spans)
(defclone span viz/span)
#?(:clj (defclone visualize viz/tree-viz))
| null | https://raw.githubusercontent.com/Engelberg/instaparse/804b1651aac935c884dd3a911662ac0b5e181200/src/instaparse/core.cljc | clojure | if :no-slurp is set to true, string is a grammar spec
let's slurp to see
auto-whitespace is keyword, parser, or nil
For each of the macro-time opts, ensure that they are the data
types we expect, not more complex quoted expressions.
Lists cannot be evaluated verbatim | (ns instaparse.core
(#?(:clj :require :cljs :require-macros)
[instaparse.macros :refer [defclone
set-global-var!]])
(:require [clojure.walk :as walk]
[instaparse.gll :as gll]
[instaparse.cfg :as cfg]
[instaparse.failure :as fail]
[instaparse.print :as print]
[instaparse.reduction :as red]
[instaparse.transform :as t]
[instaparse.abnf :as abnf]
[instaparse.repeat :as repeat]
[instaparse.combinators-source :as c]
[instaparse.line-col :as lc]
[instaparse.viz :as viz]
[instaparse.util :refer [throw-illegal-argument-exception]]))
(def ^:dynamic *default-output-format* :hiccup)
(defn set-default-output-format!
"Changes the default output format. Input should be :hiccup or :enlive"
[type]
{:pre [(#{:hiccup :enlive} type)]}
(set-global-var! *default-output-format* type))
(def ^:dynamic *default-input-format* :ebnf)
(defn set-default-input-format!
"Changes the default input format. Input should be :abnf or :ebnf"
[type]
{:pre [(#{:abnf :ebnf} type)]}
(set-global-var! *default-input-format* type))
(declare failure? standard-whitespace-parsers enable-tracing!)
(defn- unhide-parser [parser unhide]
(case unhide
nil parser
:content
(assoc parser :grammar (c/unhide-all-content (:grammar parser)))
:tags
(assoc parser :grammar (c/unhide-tags (:output-format parser)
(:grammar parser)))
:all
(assoc parser :grammar (c/unhide-all (:output-format parser)
(:grammar parser)))))
(defn parse
"Use parser to parse the text. Returns first parse tree found
that completely parses the text. If no parse tree is possible, returns
a Failure object.
Optional keyword arguments:
:start :keyword (where :keyword is name of starting production rule)
:partial true (parses that don't consume the whole string are okay)
:total true (if parse fails, embed failure node in tree)
:unhide <:tags or :content or :all> (for this parse, disable hiding)
:optimize :memory (when possible, employ strategy to use less memory)
Clj only:
:trace true (print diagnostic trace while parsing)"
[parser text &{:as options}]
{:pre [(contains? #{:tags :content :all nil} (get options :unhide))
(contains? #{:memory nil} (get options :optimize))]}
(let [start-production
(get options :start (:start-production parser)),
partial?
(get options :partial false)
optimize?
(get options :optimize false)
unhide
(get options :unhide)
trace?
(get options :trace false)
#?@(:clj [_ (when (and trace? (not gll/TRACE)) (enable-tracing!))])
parser (unhide-parser parser unhide)]
(->> (cond
(:total options)
(gll/parse-total (:grammar parser) start-production text
partial? (red/node-builders (:output-format parser)))
(and optimize? (not partial?))
(let [result (repeat/try-repeating-parse-strategy parser text start-production)]
(if (failure? result)
(gll/parse (:grammar parser) start-production text partial?)
result))
:else
(gll/parse (:grammar parser) start-production text partial?))
#?(:clj (gll/bind-trace trace?)))))
(defn parses
"Use parser to parse the text. Returns lazy seq of all parse trees
that completely parse the text. If no parse tree is possible, returns
() with a Failure object attached as metadata.
Optional keyword arguments:
:start :keyword (where :keyword is name of starting production rule)
:partial true (parses that don't consume the whole string are okay)
:total true (if parse fails, embed failure node in tree)
:unhide <:tags or :content or :all> (for this parse, disable hiding)
Clj only:
:trace true (print diagnostic trace while parsing)"
[parser text &{:as options}]
{:pre [(contains? #{:tags :content :all nil} (get options :unhide))]}
(let [start-production
(get options :start (:start-production parser)),
partial?
(get options :partial false)
unhide
(get options :unhide)
trace?
(get options :trace false)
#?@(:clj [_ (when (and trace? (not gll/TRACE)) (enable-tracing!))])
parser (unhide-parser parser unhide)]
(->> (cond
(:total options)
(gll/parses-total (:grammar parser) start-production text
partial? (red/node-builders (:output-format parser)))
:else
(gll/parses (:grammar parser) start-production text partial?))
#?(:clj (gll/bind-trace trace?)))))
(defrecord Parser [grammar start-production output-format]
#?@(:clj
[clojure.lang.IFn
(invoke [parser text] (parse parser text))
(invoke [parser text key1 val1] (parse parser text key1 val1))
(invoke [parser text key1 val1 key2 val2] (parse parser text key1 val1 key2 val2))
(invoke [parser text key1 val1 key2 val2 key3 val3] (parse parser text key1 val1 key2 val2 key3 val3))
(applyTo [parser args] (apply parse parser args))]
:cljs
[IFn
(-invoke [parser text] (parse parser text))
(-invoke [parser text key1 val1] (parse parser text key1 val1))
(-invoke [parser text key1 val1 key2 val2] (parse parser text key1 val1 key2 val2))
(-invoke [parser text key1 val1 key2 val2 key3 val3] (parse parser text key1 val1 key2 val2 key3 val3))
(-invoke [parser text a b c d e f g h] (parse parser text a b c d e f g h))
(-invoke [parser text a b c d e f g h i j] (parse parser text a b c d e f g h i j))
(-invoke [parser text a b c d e f g h i j k l] (parse parser text a b c d e f g h i j k l))
(-invoke [parser text a b c d e f g h i j k l m n] (parse parser text a b c d e f g h i j k l m n))
(-invoke [parser text a b c d e f g h i j k l m n o p] (parse parser text a b c d e f g h i j k l m n o p))
(-invoke [parser text a b c d e f g h i j k l m n o p q r] (parse parser text a b c d e f g h i j k l m n o p))
(-invoke [parser text a b c d e f g h i j k l m n o p q r s more] (apply parse parser text a b c d e f g h i j k l m n o p q r s more))]))
#?(:clj
(defmethod clojure.core/print-method Parser [x writer]
(binding [*out* writer]
(println (print/Parser->str x))))
:cljs
(extend-protocol IPrintWithWriter
instaparse.core/Parser
(-pr-writer [parser writer _]
(-write writer (print/Parser->str parser)))))
(defn parser
"Takes a string specification of a context-free grammar,
or a URI for a text file containing such a specification (Clj only),
or a map of parser combinators and returns a parser for that grammar.
Optional keyword arguments:
:input-format :ebnf
or
:input-format :abnf
:output-format :enlive
or
:output-format :hiccup
:start :keyword (where :keyword is name of starting production rule)
:string-ci true (treat all string literals as case insensitive)
:auto-whitespace (:standard or :comma)
or
:auto-whitespace custom-whitespace-parser
Clj only:
:no-slurp true (disables use of slurp to auto-detect whether
input is a URI. When using this option, input
must be a grammar string or grammar map. Useful
for platforms where slurp is slow or not available.)"
[grammar-specification &{:as options}]
{:pre [(contains? #{:abnf :ebnf nil} (get options :input-format))
(contains? #{:enlive :hiccup nil} (get options :output-format))
(let [ws-parser (get options :auto-whitespace)]
(or (nil? ws-parser)
(contains? standard-whitespace-parsers ws-parser)
(and
(map? ws-parser)
(contains? ws-parser :grammar)
(contains? ws-parser :start-production))))]}
(let [input-format (get options :input-format *default-input-format*)
build-parser
(fn [spec output-format]
(binding [cfg/*case-insensitive-literals* (:string-ci options :default)]
(case input-format
:abnf (abnf/build-parser spec output-format)
:ebnf (cfg/build-parser spec output-format))))
output-format (get options :output-format *default-output-format*)
start (get options :start nil)
built-parser
(cond
(string? grammar-specification)
(let [parser
#?(:clj
(if (get options :no-slurp)
(build-parser grammar-specification output-format)
otherwise , grammar - specification might be a URI ,
(try (let [spec (slurp grammar-specification)]
(build-parser spec output-format))
(catch java.io.FileNotFoundException e
(build-parser grammar-specification output-format))))
:cljs
(build-parser grammar-specification output-format))]
(if start (map->Parser (assoc parser :start-production start))
(map->Parser parser)))
(map? grammar-specification)
(let [parser
(cfg/build-parser-from-combinators grammar-specification
output-format
start)]
(map->Parser parser))
(vector? grammar-specification)
(let [start (if start start (grammar-specification 0))
parser
(cfg/build-parser-from-combinators (apply hash-map grammar-specification)
output-format
start)]
(map->Parser parser))
:else
#?(:clj
(let [spec (slurp grammar-specification)
parser (build-parser spec output-format)]
(if start (map->Parser (assoc parser :start-production start))
(map->Parser parser)))
:cljs
(throw-illegal-argument-exception
"Expected string, map, or vector as grammar specification, got "
(pr-str grammar-specification))))]
(let [auto-whitespace (get options :auto-whitespace)
whitespace-parser (if (keyword? auto-whitespace)
(get standard-whitespace-parsers auto-whitespace)
auto-whitespace)]
(if-let [{ws-grammar :grammar ws-start :start-production} whitespace-parser]
(assoc built-parser :grammar
(c/auto-whitespace (:grammar built-parser) (:start-production built-parser)
ws-grammar ws-start))
built-parser))))
#?(:clj
(defmacro defparser
"Takes a string specification of a context-free grammar,
or a string URI for a text file containing such a specification,
or a map/vector of parser combinators, and sets a variable to a parser for that grammar.
String specifications are processed at macro-time, not runtime, so this is an
appealing alternative to (def _ (parser \"...\")) for ClojureScript users.
Optional keyword arguments unique to `defparser`:
- :instaparse.abnf/case-insensitive true"
[name grammar & {:as opts}]
{:pre [(or (nil? (:input-format opts))
(keyword? (:input-format opts)))
(or (nil? (:output-format opts))
(keyword? (:output-format opts)))
(contains? #{true false nil} (:string-ci opts))
(contains? #{true false nil} (:no-slurp opts))]}
(if (string? grammar)
`(def ~name
(map->Parser
~(binding [abnf/*case-insensitive* (:instaparse.abnf/case-insensitive opts false)]
(let [macro-time-opts (select-keys opts [:input-format
:output-format
:string-ci
:no-slurp])
runtime-opts (dissoc opts :start)
macro-time-parser (apply parser grammar (apply concat macro-time-opts))
pre-processed-grammar (:grammar macro-time-parser)
grammar-producing-code
(->> pre-processed-grammar
(walk/postwalk
(fn [form]
(cond
(seq? form)
(list* 'list form)
Regexp terminals are handled differently in cljs
(= :regexp (:tag form))
`(merge (c/regexp ~(str (:regexp form)))
~(dissoc form :tag :regexp))
:else form))))
start-production
(or (:start opts) (:start-production macro-time-parser))]
`(parser ~grammar-producing-code
:start ~start-production
~@(apply concat runtime-opts))))))
`(def ~name (parser ~grammar ~@(apply concat opts))))))
(defn failure?
"Tests whether a parse result is a failure."
[result]
(or
(instance? gll/failure-type result)
(instance? gll/failure-type (meta result))))
(defn get-failure
"Extracts failure object from failed parse result."
[result]
(cond
(instance? gll/failure-type result)
result
(instance? gll/failure-type (meta result))
(meta result)
:else
nil))
(def ^:private standard-whitespace-parsers
{:standard (parser "whitespace = #'\\s+'")
:comma (parser "whitespace = #'[,\\s]+'")})
#?(:clj
(defn enable-tracing!
"Recompiles instaparse with tracing enabled.
This is called implicitly the first time you invoke a parser with
`:trace true` so usually you will not need to call this directly."
[]
(alter-var-root #'gll/TRACE (constantly true))
(alter-var-root #'gll/PROFILE (constantly true))
(require 'instaparse.gll :reload)))
#?(:clj
(defn disable-tracing!
"Recompiles instaparse with tracing disabled.
Call this to restore regular performance characteristics, eliminating
the small performance hit imposed by tracing."
[]
(alter-var-root #'gll/TRACE (constantly false))
(alter-var-root #'gll/PROFILE (constantly false))
(require 'instaparse.gll :reload)))
(defclone transform t/transform)
(defclone add-line-and-column-info-to-metadata lc/add-line-col-spans)
(defclone span viz/span)
#?(:clj (defclone visualize viz/tree-viz))
|
2180340627d7a562ae0c2f6f35d5b3d9b2f4836d1712a63f2563d2af3b884bb4 | McParen/croatoan | inch.lisp | (in-package :de.anvi.croatoan)
(defun extract-char (window &key y x position)
"Extract and return the single-byte complex char from the window.
If the position coordinates y (row) and x (column) are given, move the
cursor to the position first and then add the character.
The position can also be passed in form of a two-element list."
(when (and y x) (move window y x))
(when position (apply #'move window position))
(let* ((winptr (winptr window))
(chtype (ncurses:winch winptr)))
(chtype2xchar chtype)))
| null | https://raw.githubusercontent.com/McParen/croatoan/413e8855b78a2e408f90efc38e8485f880691684/src/inch.lisp | lisp | (in-package :de.anvi.croatoan)
(defun extract-char (window &key y x position)
"Extract and return the single-byte complex char from the window.
If the position coordinates y (row) and x (column) are given, move the
cursor to the position first and then add the character.
The position can also be passed in form of a two-element list."
(when (and y x) (move window y x))
(when position (apply #'move window position))
(let* ((winptr (winptr window))
(chtype (ncurses:winch winptr)))
(chtype2xchar chtype)))
| |
660fd1ed1cd51a2d7d2dd8b200af1e4d2484974d3dc07acf314da935ecb1041c | xtdb/xtdb-in-a-box | project.clj | (defproject xtdb-in-a-box "0.0.2"
:description "XTDB in a Box"
:dependencies [[org.clojure/clojure "1.10.1"]
;; required:
[com.xtdb/xtdb-core "1.19.0"]
[com.xtdb/xtdb-rocksdb "1.19.0"]
;; logging:
[org.clojure/tools.logging "1.1.0"]
[ch.qos.logback/logback-classic "1.2.3"]
[ch.qos.logback/logback-core "1.2.3"]
[org.slf4j/slf4j-api "1.7.30"]
;; optional:
[com.xtdb/xtdb-lucene "1.19.0"]
[com.xtdb/xtdb-http-server "1.19.0"]]
:repl-options {:init-ns xtdb-in-a-box.db}
XTDB SHA1 workaround for JDK 17 on MacOS :
:plugins [[lein-with-env-vars "0.2.0"]]
;; logging:
:jvm-opts ["-Dclojure.tools.logging.factory=clojure.tools.logging.impl/slf4j-factory"
the following option is required for JDK 16 and 17 :
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED"]
the following implements the XTDB SHA1 workaround for JDK 17 on MacOS :
:hooks [leiningen.with-env-vars/auto-inject]
:env-vars {:XTDB_ENABLE_BYTEUTILS_SHA1 "true"})
| null | https://raw.githubusercontent.com/xtdb/xtdb-in-a-box/aca1b20a069245b5687654debc43c01df400fb9e/clj/project.clj | clojure | required:
logging:
optional:
logging: | (defproject xtdb-in-a-box "0.0.2"
:description "XTDB in a Box"
:dependencies [[org.clojure/clojure "1.10.1"]
[com.xtdb/xtdb-core "1.19.0"]
[com.xtdb/xtdb-rocksdb "1.19.0"]
[org.clojure/tools.logging "1.1.0"]
[ch.qos.logback/logback-classic "1.2.3"]
[ch.qos.logback/logback-core "1.2.3"]
[org.slf4j/slf4j-api "1.7.30"]
[com.xtdb/xtdb-lucene "1.19.0"]
[com.xtdb/xtdb-http-server "1.19.0"]]
:repl-options {:init-ns xtdb-in-a-box.db}
XTDB SHA1 workaround for JDK 17 on MacOS :
:plugins [[lein-with-env-vars "0.2.0"]]
:jvm-opts ["-Dclojure.tools.logging.factory=clojure.tools.logging.impl/slf4j-factory"
the following option is required for JDK 16 and 17 :
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED"]
the following implements the XTDB SHA1 workaround for JDK 17 on MacOS :
:hooks [leiningen.with-env-vars/auto-inject]
:env-vars {:XTDB_ENABLE_BYTEUTILS_SHA1 "true"})
|
ef09fba75417a13fe90d2ca29be0a22b18880269cdb790c5b655200efbb5e79b | wavewave/hxournal | Type.hs | -----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Type
Copyright : ( c ) 2011 , 2012
--
-- License : BSD3
Maintainer : < >
-- Stability : experimental
Portability : GHC
--
-----------------------------------------------------------------------------
module Application.HXournal.Type
( module Application.HXournal.Type.Event
, module Application.HXournal.Type.Enum
, module Application.HXournal.Type.Canvas
, module Application.HXournal.Type.XournalState
, module Application.HXournal.Type.Coroutine
) where
import Application.HXournal.Type.Event
import Application.HXournal.Type.Enum
import Application.HXournal.Type.Canvas
import Application.HXournal.Type.XournalState
import Application.HXournal.Type.Coroutine
| null | https://raw.githubusercontent.com/wavewave/hxournal/b8eb538a2d1a474cc74bc0b5f5c7f251dafe75b9/lib/Application/HXournal/Type.hs | haskell | ---------------------------------------------------------------------------
|
Module : Application.HXournal.Type
License : BSD3
Stability : experimental
--------------------------------------------------------------------------- | Copyright : ( c ) 2011 , 2012
Maintainer : < >
Portability : GHC
module Application.HXournal.Type
( module Application.HXournal.Type.Event
, module Application.HXournal.Type.Enum
, module Application.HXournal.Type.Canvas
, module Application.HXournal.Type.XournalState
, module Application.HXournal.Type.Coroutine
) where
import Application.HXournal.Type.Event
import Application.HXournal.Type.Enum
import Application.HXournal.Type.Canvas
import Application.HXournal.Type.XournalState
import Application.HXournal.Type.Coroutine
|
b81998285e129db0f2dc32ff77346e67bf4a7cecb839af4114c93ef26359db73 | artyom-poptsov/guile-ics | content-line-context.scm | ;;; content-line-context.scm -- Context for the content line reader.
Copyright ( C ) 2022 < >
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
;; This module contains the iCalendar content line parser context, as well as
;; the required guards, actions and other procedures.
;;; Code:
(define-module (ics fsm content-line-context)
#:use-module (oop goops)
#:use-module (ice-9 textual-ports)
#:use-module (ics fsm context)
#:export (<content-line>
content-line?
content-line-name
content-line-parameters
content-line-parameters-set!
content-line-parameter
content-line-parameter-set!
content-line-value
content-line-value-set!
Content line types .
content-line-name=?
content-line-value=?
content-line-vcalendar-begin?
content-line-vcalendar-end?
content-line-component-begin?
content-line-component-end?
<content-line-context>
content-line-context?
content-line-context-eof?
content-line-context-buffer
content-line-context-buffer-set!
content-line-context-result
content-line-context-result-set!
FSM procedures .
content-line:valid-name-character?
content-line:safe-char?
content-line:qsafe-char?
content-line:value-char?
content-line:control?
content-line:store-escaped
content-line:create
content-line:store-name
content-line:store-value
content-line:store-value/unget-char
content-line:store-param-name
content-line:store-param-value
content-line:store-param-value/list
content-line:error-invalid-name
content-line:error-param-eof
content-line:error-invalid-content-line
;; Misc procedures.
context-buffer->string))
;;; Constants.
;; See <#section-3.6>.
(define %ics-token-begin "BEGIN")
(define %ics-token-end "END")
;; See <#section-3.4>.
(define %ics-token-vcalendar "VCALENDAR")
(define %content-line-error-key 'content-line-error)
;; Classes.
(define-class <content-line> ()
;; The name of the content line.
;;
;; <string> | #f
(name
#:init-value #f
#:init-keyword #:name
#:getter content-line-name)
;; The associative list of parameters.
;;
;; <alist>
(parameters
#:init-value '()
#:init-keyword #:parameters
#:getter content-line-parameters
#:setter content-line-parameters-set!)
;; The value of the content line.
;;
;; <string> | #f
(value
#:init-value #f
#:init-keyword #:value
#:getter content-line-value
#:setter content-line-value-set!))
(define (content-line? x)
"Check if X is a <content-line> instance."
(is-a? x <content-line>))
(define-class <content-line-context> (<char-context>)
;; The buffer to store the string that is being read.
;;
;; <string> | #f
(string-buffer
#:init-value #f
#:getter content-line-context-buffer
#:setter content-line-context-buffer-set!)
;; The result of the parser's work. When no data is read the slot contains #f.
;;
;; <content-line> | #f
(result
#:init-value #f
#:init-keyword #:content-line
#:getter content-line-context-result
#:setter content-line-context-result-set!))
(define (content-line-context? x)
"Check if X is a <content-line-context> instance."
(is-a? x <content-line-context>))
(define-method (content-line-context-eof? (context <content-line-context>))
"Check if a CONTEXT contains no result (that is, the iCalendar stream ended with
EOF.)"
(equal? (content-line-context-result context) #f))
(define-method (content-line-parameter-set! (content-line <content-line>)
(name <symbol>)
(value <top>))
"Set a CONTENT-LINE parameter."
(let ((parameters (content-line-parameters content-line)))
(content-line-parameters-set! content-line (acons name value parameters))))
(define-method (content-line-parameter content-line name)
"Return the value for a CONTENT-LINE parameter with the specified NAME."
(assoc-ref (content-line-parameters content-line) name))
;; Predicates (guards).
(define (content-line:control? ctx ch)
"All the controls except HTAB."
(let ((codepoint (char->integer ch)))
(or (and (>= codepoint #x00)
(<= codepoint #x08))
(and (>= codepoint #xA)
(<= codepoint #x1F))
(= codepoint #x7F))))
(define (content-line:safe-char? ctx ch)
(and (not (content-line:control? ctx ch))
(not (guard:double-quote? ctx ch))
(not (guard:semicolon? ctx ch))
(not (guard:colon? ctx ch))
(not (guard:comma? ctx ch))))
(define (content-line:qsafe-char? ctx ch)
(and (not (content-line:control? ctx ch))
(not (guard:double-quote? ctx ch))))
(define (content-line:value-char? ctx ch)
"Check if a CH is a valid character for a value."
(let ((codepoint (char->integer ch)))
(>= codepoint #x20)))
(define (content-line:valid-name-character? ctx ch)
"Check if a character CH is a valid content line name."
(or (guard:hyphen-minus? ctx ch)
(char-set-contains? char-set:letter+digit ch)))
;;; Actions.
(define (context-buffer->string ctx)
(list->string (reverse (context-buffer ctx))))
(define (content-line:store-escaped ctx ch)
(case ch
((#\n #\N)
(action:store ctx #\newline))
(else
(action:store ctx ch))))
(define (content-line:create ctx ch)
(content-line-context-result-set! ctx
(make <content-line>
#:name (context-buffer->string ctx)))
(context-buffer-clear! ctx)
ctx)
(define (content-line:store-value ctx ch)
(let* ((content-line (content-line-context-result ctx))
(current-value (content-line-value content-line))
(new-value (context-buffer->string ctx)))
(if current-value
(if (list? current-value)
(content-line-value-set! content-line
(append current-value
(list new-value)))
(content-line-value-set! content-line
(append (list current-value)
(list new-value))))
(content-line-value-set! content-line new-value))
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-param-name ctx ch)
(content-line-context-buffer-set! ctx (context-buffer->string ctx))
(context-buffer-clear! ctx)
ctx)
(define (content-line:store-param-value ctx ch)
(let* ((content-line (content-line-context-result ctx))
(param-name (string->symbol (content-line-context-buffer ctx)))
(param-value (context-buffer->string ctx))
(param-current (content-line-parameter content-line param-name)))
(when param-current
(error "Duplicated parameter" param-name param-value))
(content-line-parameter-set! content-line param-name param-value)
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-param-value/list ctx ch)
"Append a value to the list of parameter values for the parameter that is being
read."
(let* ((content-line (content-line-context-result ctx))
(param-name (string->symbol (content-line-context-buffer ctx)))
(param-value (context-buffer->string ctx))
(param-current (content-line-parameter content-line param-name)))
(if param-current
(content-line-parameter-set! content-line
param-name
(append param-current (list param-value)))
(content-line-parameter-set! content-line
param-name
(list param-value)))
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-value/unget-char ctx ch)
"Return a character CH to the iCalendar stream port from the context CTX. Return
the context."
(content-line:store-value ctx ch)
(unget-char (char-context-port ctx) ch)
ctx)
;;; Errors.
(define (content-line:error-invalid-name ctx ch)
(let ((msg "Invalid name"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
(define (content-line:error-param-eof ctx ch)
(let ((msg "Unexpected EOF during parameter read"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
(define (content-line:error-invalid-content-line ctx ch)
(let ((msg "Invalid content line"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
Content line predicates .
(define-method (content-line-name=? (content-line <content-line>)
(name <string>))
(string=? (content-line-name content-line) name))
(define-method (content-line-value=? (content-line <content-line>)
(value <string>))
(string=? (content-line-value content-line) value))
(define-method (content-line-component-begin? (content-line <content-line>))
(content-line-name=? content-line %ics-token-begin))
(define-method (content-line-component-end? (content-line <content-line>))
(content-line-name=? content-line %ics-token-end))
(define-method (content-line-vcalendar-begin? (content-line <content-line>))
(and (content-line-name=? content-line %ics-token-begin)
(content-line-value=? content-line %ics-token-vcalendar)))
(define-method (content-line-vcalendar-end? (content-line <content-line>))
(and (content-line-name=? content-line %ics-token-end)
(content-line-value=? content-line %ics-token-vcalendar)))
;;; content-line-context.scm ends here.
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-ics/6e18975f9abd69d0cdb46502bf6b85d3441de62d/modules/ics/fsm/content-line-context.scm | scheme | content-line-context.scm -- Context for the content line reader.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
This module contains the iCalendar content line parser context, as well as
the required guards, actions and other procedures.
Code:
Misc procedures.
Constants.
See <#section-3.6>.
See <#section-3.4>.
Classes.
The name of the content line.
<string> | #f
The associative list of parameters.
<alist>
The value of the content line.
<string> | #f
The buffer to store the string that is being read.
<string> | #f
The result of the parser's work. When no data is read the slot contains #f.
<content-line> | #f
Predicates (guards).
Actions.
Errors.
content-line-context.scm ends here. |
Copyright ( C ) 2022 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define-module (ics fsm content-line-context)
#:use-module (oop goops)
#:use-module (ice-9 textual-ports)
#:use-module (ics fsm context)
#:export (<content-line>
content-line?
content-line-name
content-line-parameters
content-line-parameters-set!
content-line-parameter
content-line-parameter-set!
content-line-value
content-line-value-set!
Content line types .
content-line-name=?
content-line-value=?
content-line-vcalendar-begin?
content-line-vcalendar-end?
content-line-component-begin?
content-line-component-end?
<content-line-context>
content-line-context?
content-line-context-eof?
content-line-context-buffer
content-line-context-buffer-set!
content-line-context-result
content-line-context-result-set!
FSM procedures .
content-line:valid-name-character?
content-line:safe-char?
content-line:qsafe-char?
content-line:value-char?
content-line:control?
content-line:store-escaped
content-line:create
content-line:store-name
content-line:store-value
content-line:store-value/unget-char
content-line:store-param-name
content-line:store-param-value
content-line:store-param-value/list
content-line:error-invalid-name
content-line:error-param-eof
content-line:error-invalid-content-line
context-buffer->string))
(define %ics-token-begin "BEGIN")
(define %ics-token-end "END")
(define %ics-token-vcalendar "VCALENDAR")
(define %content-line-error-key 'content-line-error)
(define-class <content-line> ()
(name
#:init-value #f
#:init-keyword #:name
#:getter content-line-name)
(parameters
#:init-value '()
#:init-keyword #:parameters
#:getter content-line-parameters
#:setter content-line-parameters-set!)
(value
#:init-value #f
#:init-keyword #:value
#:getter content-line-value
#:setter content-line-value-set!))
(define (content-line? x)
"Check if X is a <content-line> instance."
(is-a? x <content-line>))
(define-class <content-line-context> (<char-context>)
(string-buffer
#:init-value #f
#:getter content-line-context-buffer
#:setter content-line-context-buffer-set!)
(result
#:init-value #f
#:init-keyword #:content-line
#:getter content-line-context-result
#:setter content-line-context-result-set!))
(define (content-line-context? x)
"Check if X is a <content-line-context> instance."
(is-a? x <content-line-context>))
(define-method (content-line-context-eof? (context <content-line-context>))
"Check if a CONTEXT contains no result (that is, the iCalendar stream ended with
EOF.)"
(equal? (content-line-context-result context) #f))
(define-method (content-line-parameter-set! (content-line <content-line>)
(name <symbol>)
(value <top>))
"Set a CONTENT-LINE parameter."
(let ((parameters (content-line-parameters content-line)))
(content-line-parameters-set! content-line (acons name value parameters))))
(define-method (content-line-parameter content-line name)
"Return the value for a CONTENT-LINE parameter with the specified NAME."
(assoc-ref (content-line-parameters content-line) name))
(define (content-line:control? ctx ch)
"All the controls except HTAB."
(let ((codepoint (char->integer ch)))
(or (and (>= codepoint #x00)
(<= codepoint #x08))
(and (>= codepoint #xA)
(<= codepoint #x1F))
(= codepoint #x7F))))
(define (content-line:safe-char? ctx ch)
(and (not (content-line:control? ctx ch))
(not (guard:double-quote? ctx ch))
(not (guard:semicolon? ctx ch))
(not (guard:colon? ctx ch))
(not (guard:comma? ctx ch))))
(define (content-line:qsafe-char? ctx ch)
(and (not (content-line:control? ctx ch))
(not (guard:double-quote? ctx ch))))
(define (content-line:value-char? ctx ch)
"Check if a CH is a valid character for a value."
(let ((codepoint (char->integer ch)))
(>= codepoint #x20)))
(define (content-line:valid-name-character? ctx ch)
"Check if a character CH is a valid content line name."
(or (guard:hyphen-minus? ctx ch)
(char-set-contains? char-set:letter+digit ch)))
(define (context-buffer->string ctx)
(list->string (reverse (context-buffer ctx))))
(define (content-line:store-escaped ctx ch)
(case ch
((#\n #\N)
(action:store ctx #\newline))
(else
(action:store ctx ch))))
(define (content-line:create ctx ch)
(content-line-context-result-set! ctx
(make <content-line>
#:name (context-buffer->string ctx)))
(context-buffer-clear! ctx)
ctx)
(define (content-line:store-value ctx ch)
(let* ((content-line (content-line-context-result ctx))
(current-value (content-line-value content-line))
(new-value (context-buffer->string ctx)))
(if current-value
(if (list? current-value)
(content-line-value-set! content-line
(append current-value
(list new-value)))
(content-line-value-set! content-line
(append (list current-value)
(list new-value))))
(content-line-value-set! content-line new-value))
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-param-name ctx ch)
(content-line-context-buffer-set! ctx (context-buffer->string ctx))
(context-buffer-clear! ctx)
ctx)
(define (content-line:store-param-value ctx ch)
(let* ((content-line (content-line-context-result ctx))
(param-name (string->symbol (content-line-context-buffer ctx)))
(param-value (context-buffer->string ctx))
(param-current (content-line-parameter content-line param-name)))
(when param-current
(error "Duplicated parameter" param-name param-value))
(content-line-parameter-set! content-line param-name param-value)
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-param-value/list ctx ch)
"Append a value to the list of parameter values for the parameter that is being
read."
(let* ((content-line (content-line-context-result ctx))
(param-name (string->symbol (content-line-context-buffer ctx)))
(param-value (context-buffer->string ctx))
(param-current (content-line-parameter content-line param-name)))
(if param-current
(content-line-parameter-set! content-line
param-name
(append param-current (list param-value)))
(content-line-parameter-set! content-line
param-name
(list param-value)))
(context-buffer-clear! ctx)
ctx))
(define (content-line:store-value/unget-char ctx ch)
"Return a character CH to the iCalendar stream port from the context CTX. Return
the context."
(content-line:store-value ctx ch)
(unget-char (char-context-port ctx) ch)
ctx)
(define (content-line:error-invalid-name ctx ch)
(let ((msg "Invalid name"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
(define (content-line:error-param-eof ctx ch)
(let ((msg "Unexpected EOF during parameter read"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
(define (content-line:error-invalid-content-line ctx ch)
(let ((msg "Invalid content line"))
(log-error "~a:~a:~a: ~a"
(char-context-port ctx)
(char-context-row ctx)
(char-context-col ctx)
msg)
(throw %content-line-error-key msg ctx ch)))
Content line predicates .
(define-method (content-line-name=? (content-line <content-line>)
(name <string>))
(string=? (content-line-name content-line) name))
(define-method (content-line-value=? (content-line <content-line>)
(value <string>))
(string=? (content-line-value content-line) value))
(define-method (content-line-component-begin? (content-line <content-line>))
(content-line-name=? content-line %ics-token-begin))
(define-method (content-line-component-end? (content-line <content-line>))
(content-line-name=? content-line %ics-token-end))
(define-method (content-line-vcalendar-begin? (content-line <content-line>))
(and (content-line-name=? content-line %ics-token-begin)
(content-line-value=? content-line %ics-token-vcalendar)))
(define-method (content-line-vcalendar-end? (content-line <content-line>))
(and (content-line-name=? content-line %ics-token-end)
(content-line-value=? content-line %ics-token-vcalendar)))
|
caab2d1976b9a7d1ed36d4bd57fe92c9754194adc375a094f6f24763722f2f4b | lispbuilder/lispbuilder | package.lisp | ;;; -*- lisp -*-
(in-package #:cl-user)
(defpackage #:lispbuilder-cal3d
(:use #:cl #:cffi)
(:nicknames #:cal3d)
(:documentation "The main package of `lispbuilder-cal3d'."))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-cal3d/cal3d/package.lisp | lisp | -*- lisp -*- |
(in-package #:cl-user)
(defpackage #:lispbuilder-cal3d
(:use #:cl #:cffi)
(:nicknames #:cal3d)
(:documentation "The main package of `lispbuilder-cal3d'."))
|
806db3f56aa1e42d398a0b84a2bb747253bda63322aca91f921907954905ee0a | cnuernber/dtype-next | object_arrays_test.clj | (ns tech.v3.datatype.object-arrays-test
(:require [clojure.test :refer :all]
[tech.v3.datatype :as dtype]
[tech.v3.datatype.list]
[tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.unary-op :as unary-op]
[tech.v3.datatype.binary-op :as binary-op])
(:import [java.util List UUID]))
(deftest boolean-array-test
(let [test-ary (dtype/make-container :boolean 5)]
(is (= [false false false false false]
(dtype/->vector test-ary)))
(is (= :boolean
(dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 2 true)
(is (= [false false true false false]
(dtype/->vector test-ary)))
(is (= [false false true false false]
(-> (dtype/copy! test-ary (dtype/make-container :boolean 5))
(dtype/->vector))))))
(deftest string-array-test
(let [test-ary (dtype/make-container :string 5)]
(is (= [nil nil nil nil nil]
(dtype/->vector test-ary)))
(is (= :string (dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 3 "hi")
(is (= [nil nil nil "hi" nil]
(dtype/->vector test-ary)))
(let [sub-buf (dtype-proto/sub-buffer test-ary 2 3)]
(is (= :string (dtype/get-datatype sub-buf)))
(dtype/set-value! sub-buf 0 "bye!")
(is (= [nil nil "bye!" "hi" nil]
(dtype/->vector test-ary)))
(is (= ["bye!" "hi" nil]
(dtype/->vector sub-buf)))))
(let [test-ary (dtype/make-container :string ["a" "b" "c"])
test-rdr (->> test-ary
(unary-op/reader
#(.concat ^String % "_str")
:string))
test-iter (->> test-ary
(unary-op/iterable
#(.concat % "_str")
:string))]
(is (= :string (dtype/get-datatype test-rdr)))
(is (= ["a_str" "b_str" "c_str"]
(vec test-rdr)))
(is (= :string (dtype/get-datatype test-iter)))
(is (= ["a_str" "b_str" "c_str"]
(vec test-iter))))
(let [test-ary (dtype/make-container :string ["a" "b" "c"])
test-rdr (binary-op/reader
#(str %1 "_" %2)
:string
test-ary test-ary)
test-iterable (binary-op/iterable
#(str %1 "_" %2)
:string
test-ary test-ary)]
(is (= :string (dtype/get-datatype test-rdr)))
(is (= :string (dtype/get-datatype test-iterable)))
(is (= ["a_a" "b_b" "c_c"]
(vec test-rdr)))
(is (= ["a_a" "b_b" "c_c"]
(vec test-iterable)))))
(deftest new-string-container
(is (= ["a_str" "b_str" "c_str"]
(->> (dtype/make-container :string ["a" "b" "c"])
(unary-op/reader
#(.concat % "_str")
:string)
(dtype/make-container :java-array :string)
vec))))
(deftest object-array-test
(let [test-ary (dtype/make-container Object 5)]
(is (= [nil nil nil nil nil]
(dtype/->vector test-ary)))
(is (= :object (dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 3 "hi")
(is (= [nil nil nil "hi" nil]
(dtype/->vector test-ary)))
(let [sub-buf (dtype-proto/sub-buffer test-ary 2 3)]
(is (= :object (dtype/get-datatype sub-buf)))
(dtype/set-value! sub-buf 0 "bye!")
(is (= [nil nil "bye!" "hi" nil]
(dtype/->vector test-ary)))
(is (= ["bye!" "hi" nil]
(dtype/->vector sub-buf)))))
(let [test-ary (into-array Object (repeat 10 (set (range 10))))]
(is (= 10 (dtype/ecount test-ary)))))
(deftest generic-list-test
(let [^List data (dtype/make-container :list :keyword [:a :b :c :d :e])]
(.addAll data [:f :g :h :i])
(is (= [:a :b :c :d :e :f :g :h :i]
(vec (.toArray data))))))
(deftest decimal-test
(let [test-decimal (bigdec "12.34")
decimal-ary (dtype/make-container :java-array :decimal (repeat 5 test-decimal))
decimal-list (dtype/make-container :list :decimal (repeat 5 test-decimal))]
(is (= :decimal (dtype/get-datatype decimal-ary)))
(is (thrown? Throwable (dtype/set-value! decimal-ary 2 "hey")))
(is (= (vec (repeat 5 test-decimal))
(vec decimal-ary)))
(is (= :decimal (dtype/get-datatype decimal-list)))
(is (thrown? Throwable (dtype/set-value! decimal-list 2 "hey")))
(is (= (vec (repeat 5 test-decimal))
(vec decimal-list)))
(is (instance? List decimal-list))))
(deftest uuid-test
(let [test-uuid (UUID/randomUUID)
uuid-ary (dtype/make-container :java-array :uuid (repeat 5 test-uuid))
uuid-list (dtype/make-container :list :uuid (repeat 5 test-uuid))]
(is (= :uuid (dtype/get-datatype uuid-ary)))
(is (thrown? Throwable (dtype/set-value! uuid-ary 2 "hey")))
(is (= (vec (repeat 5 test-uuid))
(vec uuid-ary)))
(is (= :uuid (dtype/get-datatype uuid-list)))
(is (thrown? Throwable (dtype/set-value! uuid-list 2 "hey")))
(is (= (vec (repeat 5 test-uuid))
(vec uuid-list)))
(is (instance? List uuid-list))))
| null | https://raw.githubusercontent.com/cnuernber/dtype-next/3fe3b62669bf83f6273235ae91988e4fd68d3026/test/tech/v3/datatype/object_arrays_test.clj | clojure | (ns tech.v3.datatype.object-arrays-test
(:require [clojure.test :refer :all]
[tech.v3.datatype :as dtype]
[tech.v3.datatype.list]
[tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.unary-op :as unary-op]
[tech.v3.datatype.binary-op :as binary-op])
(:import [java.util List UUID]))
(deftest boolean-array-test
(let [test-ary (dtype/make-container :boolean 5)]
(is (= [false false false false false]
(dtype/->vector test-ary)))
(is (= :boolean
(dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 2 true)
(is (= [false false true false false]
(dtype/->vector test-ary)))
(is (= [false false true false false]
(-> (dtype/copy! test-ary (dtype/make-container :boolean 5))
(dtype/->vector))))))
(deftest string-array-test
(let [test-ary (dtype/make-container :string 5)]
(is (= [nil nil nil nil nil]
(dtype/->vector test-ary)))
(is (= :string (dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 3 "hi")
(is (= [nil nil nil "hi" nil]
(dtype/->vector test-ary)))
(let [sub-buf (dtype-proto/sub-buffer test-ary 2 3)]
(is (= :string (dtype/get-datatype sub-buf)))
(dtype/set-value! sub-buf 0 "bye!")
(is (= [nil nil "bye!" "hi" nil]
(dtype/->vector test-ary)))
(is (= ["bye!" "hi" nil]
(dtype/->vector sub-buf)))))
(let [test-ary (dtype/make-container :string ["a" "b" "c"])
test-rdr (->> test-ary
(unary-op/reader
#(.concat ^String % "_str")
:string))
test-iter (->> test-ary
(unary-op/iterable
#(.concat % "_str")
:string))]
(is (= :string (dtype/get-datatype test-rdr)))
(is (= ["a_str" "b_str" "c_str"]
(vec test-rdr)))
(is (= :string (dtype/get-datatype test-iter)))
(is (= ["a_str" "b_str" "c_str"]
(vec test-iter))))
(let [test-ary (dtype/make-container :string ["a" "b" "c"])
test-rdr (binary-op/reader
#(str %1 "_" %2)
:string
test-ary test-ary)
test-iterable (binary-op/iterable
#(str %1 "_" %2)
:string
test-ary test-ary)]
(is (= :string (dtype/get-datatype test-rdr)))
(is (= :string (dtype/get-datatype test-iterable)))
(is (= ["a_a" "b_b" "c_c"]
(vec test-rdr)))
(is (= ["a_a" "b_b" "c_c"]
(vec test-iterable)))))
(deftest new-string-container
(is (= ["a_str" "b_str" "c_str"]
(->> (dtype/make-container :string ["a" "b" "c"])
(unary-op/reader
#(.concat % "_str")
:string)
(dtype/make-container :java-array :string)
vec))))
(deftest object-array-test
(let [test-ary (dtype/make-container Object 5)]
(is (= [nil nil nil nil nil]
(dtype/->vector test-ary)))
(is (= :object (dtype/get-datatype test-ary)))
(dtype/set-value! test-ary 3 "hi")
(is (= [nil nil nil "hi" nil]
(dtype/->vector test-ary)))
(let [sub-buf (dtype-proto/sub-buffer test-ary 2 3)]
(is (= :object (dtype/get-datatype sub-buf)))
(dtype/set-value! sub-buf 0 "bye!")
(is (= [nil nil "bye!" "hi" nil]
(dtype/->vector test-ary)))
(is (= ["bye!" "hi" nil]
(dtype/->vector sub-buf)))))
(let [test-ary (into-array Object (repeat 10 (set (range 10))))]
(is (= 10 (dtype/ecount test-ary)))))
(deftest generic-list-test
(let [^List data (dtype/make-container :list :keyword [:a :b :c :d :e])]
(.addAll data [:f :g :h :i])
(is (= [:a :b :c :d :e :f :g :h :i]
(vec (.toArray data))))))
(deftest decimal-test
(let [test-decimal (bigdec "12.34")
decimal-ary (dtype/make-container :java-array :decimal (repeat 5 test-decimal))
decimal-list (dtype/make-container :list :decimal (repeat 5 test-decimal))]
(is (= :decimal (dtype/get-datatype decimal-ary)))
(is (thrown? Throwable (dtype/set-value! decimal-ary 2 "hey")))
(is (= (vec (repeat 5 test-decimal))
(vec decimal-ary)))
(is (= :decimal (dtype/get-datatype decimal-list)))
(is (thrown? Throwable (dtype/set-value! decimal-list 2 "hey")))
(is (= (vec (repeat 5 test-decimal))
(vec decimal-list)))
(is (instance? List decimal-list))))
(deftest uuid-test
(let [test-uuid (UUID/randomUUID)
uuid-ary (dtype/make-container :java-array :uuid (repeat 5 test-uuid))
uuid-list (dtype/make-container :list :uuid (repeat 5 test-uuid))]
(is (= :uuid (dtype/get-datatype uuid-ary)))
(is (thrown? Throwable (dtype/set-value! uuid-ary 2 "hey")))
(is (= (vec (repeat 5 test-uuid))
(vec uuid-ary)))
(is (= :uuid (dtype/get-datatype uuid-list)))
(is (thrown? Throwable (dtype/set-value! uuid-list 2 "hey")))
(is (= (vec (repeat 5 test-uuid))
(vec uuid-list)))
(is (instance? List uuid-list))))
| |
1cec4c24aabf295bd27a26fc6eb24e9ad69df1eb493ca24bde97e3e280d91a3b | mirage/mirage-skeleton | config.ml | open Mirage
let main =
main "Unikernel.Main" ~packages:[ package "fmt" ] ~extra_deps:[ dep app_info ]
(console @-> job)
let () = register "app-info" [ main $ default_console ]
| null | https://raw.githubusercontent.com/mirage/mirage-skeleton/144d68992a284730c383eb3d39c409a061bc452e/tutorial/app_info/config.ml | ocaml | open Mirage
let main =
main "Unikernel.Main" ~packages:[ package "fmt" ] ~extra_deps:[ dep app_info ]
(console @-> job)
let () = register "app-info" [ main $ default_console ]
| |
9d8767cb914012b2dbc35100193d5006f63ffc965a5ed62868edfafec8f7bc73 | mpdairy/posh-todo | project.clj | (defproject posh-todo "0.1.0-SNAPSHOT"
:description "An example of a Todo using Posh"
:url "/"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.7.228"]
[org.clojure/core.match "0.3.0-alpha4"]
[datascript "0.15.0"]
[posh "0.5"]
[reagent "0.6.0-rc"]
[figwheel-sidecar "0.5.0-SNAPSHOT" :scope "test"]]
:plugins [[lein-cljsbuild "1.1.3"]]
:cljsbuild {
:builds [ {:id "posh-todo"
:source-paths ["src/"]
:figwheel false
:compiler {:main "posh-todo.core"
:asset-path "js"
:output-to "resources/public/js/main.js"
:output-dir "resources/public/js"} } ]
})
| null | https://raw.githubusercontent.com/mpdairy/posh-todo/e6c6596be0b022ea3301e971f9c5eaaa1ce87f0b/project.clj | clojure | (defproject posh-todo "0.1.0-SNAPSHOT"
:description "An example of a Todo using Posh"
:url "/"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.7.228"]
[org.clojure/core.match "0.3.0-alpha4"]
[datascript "0.15.0"]
[posh "0.5"]
[reagent "0.6.0-rc"]
[figwheel-sidecar "0.5.0-SNAPSHOT" :scope "test"]]
:plugins [[lein-cljsbuild "1.1.3"]]
:cljsbuild {
:builds [ {:id "posh-todo"
:source-paths ["src/"]
:figwheel false
:compiler {:main "posh-todo.core"
:asset-path "js"
:output-to "resources/public/js/main.js"
:output-dir "resources/public/js"} } ]
})
| |
94fdfc9b88b344ff201632b015c553fef06bf21e12d9ca2b9f000cdb8943307d | TrustInSoft/tis-interpreter | printSlice.mli | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
val print_fct_from_pdg :
Format.formatter ->
?ff:SlicingInternals.fct_slice -> PdgTypes.Pdg.t -> unit
val print_marked_ff : Format.formatter -> SlicingInternals.fct_slice -> unit
val print_original_glob : Format.formatter -> Cil_types.global -> unit
val print_fct_stmts :
Format.formatter ->
(SlicingTypes.sl_project * Cil_types.kernel_function) ->
unit
val build_dot_project : string -> string -> SlicingInternals.project -> unit
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/slicing/printSlice.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************ | Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
val print_fct_from_pdg :
Format.formatter ->
?ff:SlicingInternals.fct_slice -> PdgTypes.Pdg.t -> unit
val print_marked_ff : Format.formatter -> SlicingInternals.fct_slice -> unit
val print_original_glob : Format.formatter -> Cil_types.global -> unit
val print_fct_stmts :
Format.formatter ->
(SlicingTypes.sl_project * Cil_types.kernel_function) ->
unit
val build_dot_project : string -> string -> SlicingInternals.project -> unit
|
f8591df98af34ed946f87ecc3bbedf77fea1b58c0f6f17ecd1aea14f3ad0bb4d | district0x/cljs-web3-next | db.cljs | (ns cljs-web3-next.db
"Functions on LevelDB.
A fast key-value storage library that provides an ordered mapping from string
keys to string values."
(:require [oops.core :refer [ocall oget oset! oapply+]]))
(defn get-db
"Gets leveldb object from web3-instance.
Parameter:
web3 - web3 instance"
[web3]
(oget web3 "db"))
(defn put-string!
"This method should be called, when we want to store a string in the local
leveldb database.
Parameters:
web3 - web3 instance
args:
db - The database (string) to store to.
key - The name (string) of the store.
value - The string value to store.
Returns true if successful, otherwise false."
[web3 & [db key value cb :as args]]
(oapply+ (get-db web3) "putString" args))
(defn get-string
"This method should be called, when we want to get string from the local
leveldb database.
Parameters:
db - The database (string) name to retrieve from.
key - The name (string) of the store.
Returns the stored value string."
[web3 & [db key :as args]]
(oapply+ (get-db web3) "getString" args))
(defn put-hex!
"This method should be called, when we want to store binary data in HEX form
in the local leveldb database.
Parameters:
db - The database (string) to store to.
key - The name (string) of the store.
value - The HEX string to store.
Returns true if successful, otherwise false."
[web3 & [db key value :as args]]
(oapply+ (get-db web3) "putHex" args))
(defn get-hex
"This method should be called, when we want to get a binary data in HEX form
from the local leveldb database.
Parameters:
db - The database (string) to store to.
key - The name (string) of the store.
Returns the stored HEX value."
[web3 & [db key :as args]]
(oapply+ (get-db web3) "getHex" args))
| null | https://raw.githubusercontent.com/district0x/cljs-web3-next/a473ac9c21d994ecb6abfb4899ef5474d5dfc669/src/cljs_web3_next/db.cljs | clojure | (ns cljs-web3-next.db
"Functions on LevelDB.
A fast key-value storage library that provides an ordered mapping from string
keys to string values."
(:require [oops.core :refer [ocall oget oset! oapply+]]))
(defn get-db
"Gets leveldb object from web3-instance.
Parameter:
web3 - web3 instance"
[web3]
(oget web3 "db"))
(defn put-string!
"This method should be called, when we want to store a string in the local
leveldb database.
Parameters:
web3 - web3 instance
args:
db - The database (string) to store to.
key - The name (string) of the store.
value - The string value to store.
Returns true if successful, otherwise false."
[web3 & [db key value cb :as args]]
(oapply+ (get-db web3) "putString" args))
(defn get-string
"This method should be called, when we want to get string from the local
leveldb database.
Parameters:
db - The database (string) name to retrieve from.
key - The name (string) of the store.
Returns the stored value string."
[web3 & [db key :as args]]
(oapply+ (get-db web3) "getString" args))
(defn put-hex!
"This method should be called, when we want to store binary data in HEX form
in the local leveldb database.
Parameters:
db - The database (string) to store to.
key - The name (string) of the store.
value - The HEX string to store.
Returns true if successful, otherwise false."
[web3 & [db key value :as args]]
(oapply+ (get-db web3) "putHex" args))
(defn get-hex
"This method should be called, when we want to get a binary data in HEX form
from the local leveldb database.
Parameters:
db - The database (string) to store to.
key - The name (string) of the store.
Returns the stored HEX value."
[web3 & [db key :as args]]
(oapply+ (get-db web3) "getHex" args))
| |
1f71439beaf9e5d41af719fcdc0e3ab42f770f6dfb5d5574c35a9f515acb801f | ocaml-multicore/tezos | sapling_services.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2019 - 2020 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Alpha_context
let custom_root =
(RPC_path.(open_root / "context" / "sapling")
: RPC_context.t RPC_path.context)
type diff_query = {
offset_commitment : Int64.t option;
offset_nullifier : Int64.t option;
}
module S = struct
module Args = struct
type ('query_type, 'output_type) t = {
name : string;
description : string;
query : 'query_type RPC_query.t;
output : 'output_type Data_encoding.t;
f : context -> Sapling.Id.t -> 'query_type -> 'output_type tzresult Lwt.t;
}
let get_diff_query : diff_query RPC_query.t =
let open RPC_query in
query (fun offset_commitment offset_nullifier ->
{offset_commitment; offset_nullifier})
|+ opt_field
~descr:
"Commitments and ciphertexts are returned from the specified \
offset up to the most recent."
"offset_commitment"
RPC_arg.uint63
(fun {offset_commitment; _} -> offset_commitment)
|+ opt_field
~descr:
"Nullifiers are returned from the specified offset up to the most \
recent."
"offset_nullifier"
RPC_arg.uint63
(fun {offset_nullifier; _} -> offset_nullifier)
|> seal
let encoding =
let open Data_encoding in
merge_objs (obj1 (req "root" Sapling.root_encoding)) Sapling.diff_encoding
let get_diff =
{
name = "get_diff";
description =
"Returns the root and a diff of a state starting from an optional \
offset which is zero by default.";
query = get_diff_query;
output = encoding;
f =
(fun ctxt id {offset_commitment; offset_nullifier} ->
Sapling.get_diff ctxt id ?offset_commitment ?offset_nullifier ());
}
end
let make_service Args.{name; description; query; output; f} =
let path = RPC_path.(custom_root /: Sapling.rpc_arg / name) in
let service = RPC_service.get_service ~description ~query ~output path in
(service, fun ctxt id q () -> f ctxt id q)
let get_diff = make_service Args.get_diff
end
let register () =
let reg ~chunked (service, f) =
Services_registration.register1 ~chunked service f
in
reg ~chunked:false S.get_diff
let mk_call1 (service, _f) ctxt block id q =
RPC_context.make_call1 service ctxt block id q ()
let get_diff ctxt block id ?offset_commitment ?offset_nullifier () =
mk_call1 S.get_diff ctxt block id {offset_commitment; offset_nullifier}
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_012_Psithaca/lib_protocol/sapling_services.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*************************************************************************** | Copyright ( c ) 2019 - 2020 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Alpha_context
let custom_root =
(RPC_path.(open_root / "context" / "sapling")
: RPC_context.t RPC_path.context)
type diff_query = {
offset_commitment : Int64.t option;
offset_nullifier : Int64.t option;
}
module S = struct
module Args = struct
type ('query_type, 'output_type) t = {
name : string;
description : string;
query : 'query_type RPC_query.t;
output : 'output_type Data_encoding.t;
f : context -> Sapling.Id.t -> 'query_type -> 'output_type tzresult Lwt.t;
}
let get_diff_query : diff_query RPC_query.t =
let open RPC_query in
query (fun offset_commitment offset_nullifier ->
{offset_commitment; offset_nullifier})
|+ opt_field
~descr:
"Commitments and ciphertexts are returned from the specified \
offset up to the most recent."
"offset_commitment"
RPC_arg.uint63
(fun {offset_commitment; _} -> offset_commitment)
|+ opt_field
~descr:
"Nullifiers are returned from the specified offset up to the most \
recent."
"offset_nullifier"
RPC_arg.uint63
(fun {offset_nullifier; _} -> offset_nullifier)
|> seal
let encoding =
let open Data_encoding in
merge_objs (obj1 (req "root" Sapling.root_encoding)) Sapling.diff_encoding
let get_diff =
{
name = "get_diff";
description =
"Returns the root and a diff of a state starting from an optional \
offset which is zero by default.";
query = get_diff_query;
output = encoding;
f =
(fun ctxt id {offset_commitment; offset_nullifier} ->
Sapling.get_diff ctxt id ?offset_commitment ?offset_nullifier ());
}
end
let make_service Args.{name; description; query; output; f} =
let path = RPC_path.(custom_root /: Sapling.rpc_arg / name) in
let service = RPC_service.get_service ~description ~query ~output path in
(service, fun ctxt id q () -> f ctxt id q)
let get_diff = make_service Args.get_diff
end
let register () =
let reg ~chunked (service, f) =
Services_registration.register1 ~chunked service f
in
reg ~chunked:false S.get_diff
let mk_call1 (service, _f) ctxt block id q =
RPC_context.make_call1 service ctxt block id q ()
let get_diff ctxt block id ?offset_commitment ?offset_nullifier () =
mk_call1 S.get_diff ctxt block id {offset_commitment; offset_nullifier}
|
57909b0163c8a01b38d807b81d145745a33e64cdf227cb20cf657754264906c2 | appleby/Lisp-In-Small-Pieces | symbol-append.scm | ;;;(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
;;; This file is derived from the files that accompany the book:
LISP Implantation Semantique Programmation ( InterEditions , France )
or Lisp In Small Pieces ( Cambridge University Press ) .
By Christian Queinnec < >
;;; The original sources can be downloaded from the author's website at
;;; -systeme.lip6.fr/Christian.Queinnec/WWW/LiSP.html
;;; This file may have been altered from the original in order to work with
;;; modern schemes. The latest copy of these altered sources can be found at
;;; -In-Small-Pieces
;;; If you want to report a bug in this program, open a GitHub Issue at the
;;; repo mentioned above.
;;; Check the README file before using this file.
;;;(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
(define (symbol-append . args)
(string->symbol
(apply string-append
(map (lambda (s)
(cond ((string? s) s)
((symbol? s) (symbol->string s))
((number? s) (number->string s))
(else (error 'symbol-append 'bad-args args)) ) )
args ) ) ) )
| null | https://raw.githubusercontent.com/appleby/Lisp-In-Small-Pieces/af4139232bb7170f32470774a92d647e83254721/common/compat/symbol-append.scm | scheme | (((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
This file is derived from the files that accompany the book:
The original sources can be downloaded from the author's website at
-systeme.lip6.fr/Christian.Queinnec/WWW/LiSP.html
This file may have been altered from the original in order to work with
modern schemes. The latest copy of these altered sources can be found at
-In-Small-Pieces
If you want to report a bug in this program, open a GitHub Issue at the
repo mentioned above.
Check the README file before using this file.
(((((((((((((((((((((((((((((((( L i S P )))))))))))))))))))))))))))))))) | LISP Implantation Semantique Programmation ( InterEditions , France )
or Lisp In Small Pieces ( Cambridge University Press ) .
By Christian Queinnec < >
(define (symbol-append . args)
(string->symbol
(apply string-append
(map (lambda (s)
(cond ((string? s) s)
((symbol? s) (symbol->string s))
((number? s) (number->string s))
(else (error 'symbol-append 'bad-args args)) ) )
args ) ) ) )
|
f9b7d028afef9903ae947b240955375c20d6144bff47731e20ab3bcebdff243e | dpiponi/Moodler | mock_modular.hs | do
(x0, y0) <- mouse
let (x, y) = quantise2 quantum (x0, y0)
root <- currentPlane
id10 <- new' "id"
id11 <- new' "id"
id12 <- new' "id"
id13 <- new' "id"
id14 <- new' "id"
id15 <- new' "id"
id16 <- new' "id"
id17 <- new' "id"
id18 <- new' "id"
id19 <- new' "id"
id20 <- new' "id"
id21 <- new' "id"
id22 <- new' "id"
id23 <- new' "id"
id24 <- new' "id"
id25 <- new' "id"
id26 <- new' "id"
id27 <- new' "id"
id28 <- new' "id"
id29 <- new' "id"
id30 <- new' "id"
id31 <- new' "id"
id32 <- new' "id"
id33 <- new' "id"
id34 <- new' "id"
id35 <- new' "id"
id36 <- new' "id"
id37 <- new' "id"
id38 <- new' "id"
id39 <- new' "id"
id40 <- new' "id"
id41 <- new' "id"
id42 <- new' "id"
id43 <- new' "id"
id44 <- new' "id"
id45 <- new' "id"
id46 <- new' "id"
id47 <- new' "id"
id48 <- new' "id"
id49 <- new' "id"
id7 <- new' "id"
id8 <- new' "id"
id9 <- new' "id"
input50 <- new' "input"
input51 <- new' "input"
input52 <- new' "input"
input53 <- new' "input"
input54 <- new' "input"
input55 <- new' "input"
input56 <- new' "input"
input57 <- new' "input"
container58 <- container' "panel_mock.png" (x+(0.0), y+(0.0)) (Inside root)
proxy158 <- container' "panel_mock_back.png" (-12.0,48.0) (Inside container58)
plugin59 <- plugin' (id10 ! "signal") (108.0,-96.0) (Outside proxy158)
setColour plugin59 "#sample"
plugin60 <- plugin' (id11 ! "signal") (156.0,-96.0) (Outside proxy158)
setColour plugin60 "#sample"
plugin61 <- plugin' (id7 ! "signal") (252.0,-96.0) (Outside proxy158)
setColour plugin61 "#sample"
plugin62 <- plugin' (id14 ! "signal") (204.0,-96.0) (Outside proxy158)
setColour plugin62 "#sample"
plugin63 <- plugin' (id35 ! "signal") (-276.0,96.0) (Outside proxy158)
setColour plugin63 "#control"
plugin64 <- plugin' (id19 ! "signal") (-228.0,96.0) (Outside proxy158)
setColour plugin64 "#control"
plugin65 <- plugin' (id33 ! "signal") (-180.0,96.0) (Outside proxy158)
setColour plugin65 "#control"
plugin66 <- plugin' (id18 ! "signal") (12.0,96.0) (Outside proxy158)
setColour plugin66 "#control"
plugin67 <- plugin' (id17 ! "signal") (-36.0,96.0) (Outside proxy158)
setColour plugin67 "#control"
plugin68 <- plugin' (id16 ! "signal") (-84.0,96.0) (Outside proxy158)
setColour plugin68 "#control"
plugin69 <- plugin' (id47 ! "signal") (-132.0,96.0) (Outside proxy158)
setColour plugin69 "#control"
plugin70 <- plugin' (id24 ! "signal") (204.0,96.0) (Outside proxy158)
setColour plugin70 "#control"
plugin71 <- plugin' (id23 ! "signal") (156.0,96.0) (Outside proxy158)
setColour plugin71 "#control"
plugin72 <- plugin' (id21 ! "signal") (108.0,96.0) (Outside proxy158)
setColour plugin72 "#control"
plugin73 <- plugin' (id20 ! "signal") (60.0,96.0) (Outside proxy158)
setColour plugin73 "#control"
plugin74 <- plugin' (id26 ! "signal") (252.0,96.0) (Outside proxy158)
setColour plugin74 "#control"
plugin75 <- plugin' (id15 ! "signal") (288.0,48.0) (Outside proxy158)
setColour plugin75 "#control"
plugout100 <- plugout' (id41 ! "result") (156.0,192.0) (Outside proxy158)
setColour plugout100 "#control"
plugout101 <- plugout' (id40 ! "result") (108.0,192.0) (Outside proxy158)
setColour plugout101 "#control"
plugout76 <- plugout' (id39 ! "result") (60.0,192.0) (Outside proxy158)
setColour plugout76 "#control"
plugout77 <- plugout' (id46 ! "result") (252.0,192.0) (Outside proxy158)
setColour plugout77 "#control"
plugout78 <- plugout' (id27 ! "result") (132.0,-48.0) (Outside proxy158)
setColour plugout78 "#control"
plugout79 <- plugout' (id28 ! "result") (228.0,-48.0) (Outside proxy158)
setColour plugout79 "#control"
plugout80 <- plugout' (id48 ! "result") (-312.0,72.0) (Outside proxy158)
setColour plugout80 "#control"
plugout81 <- plugout' (id49 ! "result") (-312.0,24.0) (Outside proxy158)
setColour plugout81 "#control"
plugout82 <- plugout' (id9 ! "result") (252.0,0.0) (Outside proxy158)
setColour plugout82 "#sample"
plugout83 <- plugout' (id8 ! "result") (204.0,0.0) (Outside proxy158)
setColour plugout83 "#sample"
plugout84 <- plugout' (id13 ! "result") (156.0,0.0) (Outside proxy158)
setColour plugout84 "#sample"
plugout85 <- plugout' (id12 ! "result") (108.0,0.0) (Outside proxy158)
setColour plugout85 "#sample"
plugout86 <- plugout' (id29 ! "result") (-252.0,144.0) (Outside proxy158)
setColour plugout86 "#control"
plugout87 <- plugout' (id38 ! "result") (-156.0,144.0) (Outside proxy158)
setColour plugout87 "#control"
plugout88 <- plugout' (id45 ! "result") (-60.0,144.0) (Outside proxy158)
setColour plugout88 "#control"
plugout89 <- plugout' (id22 ! "result") (36.0,144.0) (Outside proxy158)
setColour plugout89 "#control"
plugout90 <- plugout' (id25 ! "result") (132.0,144.0) (Outside proxy158)
setColour plugout90 "#control"
plugout91 <- plugout' (id31 ! "result") (228.0,144.0) (Outside proxy158)
setColour plugout91 "#control"
plugout92 <- plugout' (id34 ! "result") (-276.0,192.0) (Outside proxy158)
setColour plugout92 "#control"
plugout93 <- plugout' (id42 ! "result") (-228.0,192.0) (Outside proxy158)
setColour plugout93 "#control"
plugout94 <- plugout' (id32 ! "result") (-180.0,192.0) (Outside proxy158)
setColour plugout94 "#control"
plugout95 <- plugout' (id43 ! "result") (-132.0,192.0) (Outside proxy158)
setColour plugout95 "#control"
plugout96 <- plugout' (id30 ! "result") (-84.0,192.0) (Outside proxy158)
setColour plugout96 "#control"
plugout97 <- plugout' (id37 ! "result") (12.0,192.0) (Outside proxy158)
setColour plugout97 "#control"
plugout98 <- plugout' (id36 ! "result") (-36.0,192.0) (Outside proxy158)
setColour plugout98 "#control"
plugout99 <- plugout' (id44 ! "result") (204.0,192.0) (Outside proxy158)
setColour plugout99 "#control"
knob102 <- knob' (input57 ! "result") (x+(-240.0), y+(96.0)) (Outside container58)
knob103 <- knob' (input52 ! "result") (x+(-144.0), y+(96.0)) (Outside container58)
knob104 <- knob' (input53 ! "result") (x+(-48.0), y+(96.0)) (Outside container58)
knob105 <- knob' (input54 ! "result") (x+(48.0), y+(96.0)) (Outside container58)
knob106 <- knob' (input55 ! "result") (x+(144.0), y+(96.0)) (Outside container58)
knob107 <- knob' (input56 ! "result") (x+(240.0), y+(96.0)) (Outside container58)
knob108 <- knob' (input50 ! "result") (x+(144.0), y+(-96.0)) (Outside container58)
knob109 <- knob' (input51 ! "result") (x+(240.0), y+(-96.0)) (Outside container58)
plugin110 <- plugin' (id27 ! "signal") (x+(144.0), y+(-96.0)) (Outside container58)
setColour plugin110 "#control"
hide plugin110
plugin111 <- plugin' (id28 ! "signal") (x+(240.0), y+(-96.0)) (Outside container58)
setColour plugin111 "#control"
hide plugin111
plugin112 <- plugin' (id48 ! "signal") (x+(-300.0), y+(24.0)) (Outside container58)
setColour plugin112 "#control"
plugin113 <- plugin' (id49 ! "signal") (x+(-300.0), y+(-24.0)) (Outside container58)
setColour plugin113 "#control"
plugin114 <- plugin' (id34 ! "signal") (x+(-264.0), y+(144.0)) (Outside container58)
setColour plugin114 "#control"
plugin115 <- plugin' (id42 ! "signal") (x+(-216.0), y+(144.0)) (Outside container58)
setColour plugin115 "#control"
plugin116 <- plugin' (id32 ! "signal") (x+(-168.0), y+(144.0)) (Outside container58)
setColour plugin116 "#control"
plugin117 <- plugin' (id29 ! "signal") (x+(-240.0), y+(96.0)) (Outside container58)
setColour plugin117 "#control"
hide plugin117
plugin118 <- plugin' (id38 ! "signal") (x+(-144.0), y+(96.0)) (Outside container58)
setColour plugin118 "#control"
hide plugin118
plugin119 <- plugin' (id45 ! "signal") (x+(-48.0), y+(96.0)) (Outside container58)
setColour plugin119 "#control"
hide plugin119
plugin120 <- plugin' (id22 ! "signal") (x+(48.0), y+(96.0)) (Outside container58)
setColour plugin120 "#control"
hide plugin120
plugin121 <- plugin' (id25 ! "signal") (x+(144.0), y+(96.0)) (Outside container58)
setColour plugin121 "#control"
hide plugin121
plugin122 <- plugin' (id31 ! "signal") (x+(240.0), y+(96.0)) (Outside container58)
setColour plugin122 "#control"
hide plugin122
plugin123 <- plugin' (id43 ! "signal") (x+(-120.0), y+(144.0)) (Outside container58)
setColour plugin123 "#control"
plugin124 <- plugin' (id30 ! "signal") (x+(-72.0), y+(144.0)) (Outside container58)
setColour plugin124 "#control"
plugin125 <- plugin' (id37 ! "signal") (x+(24.0), y+(144.0)) (Outside container58)
setColour plugin125 "#control"
plugin126 <- plugin' (id36 ! "signal") (x+(-24.0), y+(144.0)) (Outside container58)
setColour plugin126 "#control"
plugin127 <- plugin' (id44 ! "signal") (x+(216.0), y+(144.0)) (Outside container58)
setColour plugin127 "#control"
plugin128 <- plugin' (id41 ! "signal") (x+(168.0), y+(144.0)) (Outside container58)
setColour plugin128 "#control"
plugin129 <- plugin' (id40 ! "signal") (x+(120.0), y+(144.0)) (Outside container58)
setColour plugin129 "#control"
plugin130 <- plugin' (id39 ! "signal") (x+(72.0), y+(144.0)) (Outside container58)
setColour plugin130 "#control"
plugin131 <- plugin' (id46 ! "signal") (x+(264.0), y+(144.0)) (Outside container58)
setColour plugin131 "#control"
plugin132 <- plugin' (id13 ! "signal") (x+(168.0), y+(-48.0)) (Outside container58)
setColour plugin132 "#sample"
plugin133 <- plugin' (id8 ! "signal") (x+(216.0), y+(-48.0)) (Outside container58)
setColour plugin133 "#sample"
plugin134 <- plugin' (id9 ! "signal") (x+(264.0), y+(-48.0)) (Outside container58)
setColour plugin134 "#sample"
plugin135 <- plugin' (id12 ! "signal") (x+(120.0), y+(-48.0)) (Outside container58)
setColour plugin135 "#sample"
plugout136 <- plugout' (id35 ! "result") (x+(-264.0), y+(48.0)) (Outside container58)
setColour plugout136 "#control"
plugout137 <- plugout' (id19 ! "result") (x+(-216.0), y+(48.0)) (Outside container58)
setColour plugout137 "#control"
plugout138 <- plugout' (id33 ! "result") (x+(-168.0), y+(48.0)) (Outside container58)
setColour plugout138 "#control"
plugout139 <- plugout' (id18 ! "result") (x+(24.0), y+(48.0)) (Outside container58)
setColour plugout139 "#control"
plugout140 <- plugout' (id17 ! "result") (x+(-24.0), y+(48.0)) (Outside container58)
setColour plugout140 "#control"
plugout141 <- plugout' (id16 ! "result") (x+(-72.0), y+(48.0)) (Outside container58)
setColour plugout141 "#control"
plugout142 <- plugout' (id47 ! "result") (x+(-120.0), y+(48.0)) (Outside container58)
setColour plugout142 "#control"
plugout143 <- plugout' (id24 ! "result") (x+(216.0), y+(48.0)) (Outside container58)
setColour plugout143 "#control"
plugout144 <- plugout' (id23 ! "result") (x+(168.0), y+(48.0)) (Outside container58)
setColour plugout144 "#control"
plugout145 <- plugout' (id21 ! "result") (x+(120.0), y+(48.0)) (Outside container58)
setColour plugout145 "#control"
plugout146 <- plugout' (id20 ! "result") (x+(72.0), y+(48.0)) (Outside container58)
setColour plugout146 "#control"
plugout147 <- plugout' (id26 ! "result") (x+(264.0), y+(48.0)) (Outside container58)
setColour plugout147 "#control"
plugout148 <- plugout' (id15 ! "result") (x+(300.0), y+(0.0)) (Outside container58)
setColour plugout148 "#control"
plugout149 <- plugout' (id11 ! "result") (x+(168.0), y+(-144.0)) (Outside container58)
setColour plugout149 "#sample"
plugout150 <- plugout' (id10 ! "result") (x+(120.0), y+(-144.0)) (Outside container58)
setColour plugout150 "#sample"
plugout151 <- plugout' (id7 ! "result") (x+(264.0), y+(-144.0)) (Outside container58)
setColour plugout151 "#sample"
plugout152 <- plugout' (id14 ! "result") (x+(216.0), y+(-144.0)) (Outside container58)
setColour plugout152 "#sample"
cable knob108 plugin110
cable knob109 plugin111
cable knob102 plugin117
cable knob103 plugin118
cable knob104 plugin119
cable knob105 plugin120
cable knob106 plugin121
cable knob107 plugin122
recompile
set knob102 (1.7860383e-2)
set knob103 (0.0)
set knob104 (0.0)
set knob105 (3.0e-2)
set knob106 (0.0)
set knob107 (0.0)
set knob108 (0.0)
set knob109 (0.0)
alias "in-1" id34
alias "in-10" id41
alias "in-11" id44
alias "in-12" id46
alias "in-2" id42
alias "in-25" id9
alias "in-26" id10
alias "in-27" id13
alias "in-28" id14
alias "in-3" id32
alias "in-4" id43
alias "in-5" id30
alias "in-6" id30
alias "in-7" id37
alias "in-8" id39
alias "in-9" id40
alias "knob-1" input57
alias "knob-2" input52
alias "knob-3" input53
alias "knob-4" input54
alias "knob-5" input55
alias "knob-6" input56
alias "knob-7" input50
alias "knob-8" input51
alias "out-13" id35
alias "out-14" id19
alias "out-15" id33
alias "out-16" id47
alias "out-17" id16
alias "out-18" id17
alias "out-19" id18
alias "out-20" id20
alias "out-21" id21
alias "out-22" id23
alias "out-23" id24
alias "out-24" id26
alias "out-29" id7
alias "out-30" id8
alias "out-31" id11
alias "out-32" id12
return ()
| null | https://raw.githubusercontent.com/dpiponi/Moodler/a0c984c36abae52668d00f25eb3749e97e8936d3/Moodler/scripts/mock_modular.hs | haskell | do
(x0, y0) <- mouse
let (x, y) = quantise2 quantum (x0, y0)
root <- currentPlane
id10 <- new' "id"
id11 <- new' "id"
id12 <- new' "id"
id13 <- new' "id"
id14 <- new' "id"
id15 <- new' "id"
id16 <- new' "id"
id17 <- new' "id"
id18 <- new' "id"
id19 <- new' "id"
id20 <- new' "id"
id21 <- new' "id"
id22 <- new' "id"
id23 <- new' "id"
id24 <- new' "id"
id25 <- new' "id"
id26 <- new' "id"
id27 <- new' "id"
id28 <- new' "id"
id29 <- new' "id"
id30 <- new' "id"
id31 <- new' "id"
id32 <- new' "id"
id33 <- new' "id"
id34 <- new' "id"
id35 <- new' "id"
id36 <- new' "id"
id37 <- new' "id"
id38 <- new' "id"
id39 <- new' "id"
id40 <- new' "id"
id41 <- new' "id"
id42 <- new' "id"
id43 <- new' "id"
id44 <- new' "id"
id45 <- new' "id"
id46 <- new' "id"
id47 <- new' "id"
id48 <- new' "id"
id49 <- new' "id"
id7 <- new' "id"
id8 <- new' "id"
id9 <- new' "id"
input50 <- new' "input"
input51 <- new' "input"
input52 <- new' "input"
input53 <- new' "input"
input54 <- new' "input"
input55 <- new' "input"
input56 <- new' "input"
input57 <- new' "input"
container58 <- container' "panel_mock.png" (x+(0.0), y+(0.0)) (Inside root)
proxy158 <- container' "panel_mock_back.png" (-12.0,48.0) (Inside container58)
plugin59 <- plugin' (id10 ! "signal") (108.0,-96.0) (Outside proxy158)
setColour plugin59 "#sample"
plugin60 <- plugin' (id11 ! "signal") (156.0,-96.0) (Outside proxy158)
setColour plugin60 "#sample"
plugin61 <- plugin' (id7 ! "signal") (252.0,-96.0) (Outside proxy158)
setColour plugin61 "#sample"
plugin62 <- plugin' (id14 ! "signal") (204.0,-96.0) (Outside proxy158)
setColour plugin62 "#sample"
plugin63 <- plugin' (id35 ! "signal") (-276.0,96.0) (Outside proxy158)
setColour plugin63 "#control"
plugin64 <- plugin' (id19 ! "signal") (-228.0,96.0) (Outside proxy158)
setColour plugin64 "#control"
plugin65 <- plugin' (id33 ! "signal") (-180.0,96.0) (Outside proxy158)
setColour plugin65 "#control"
plugin66 <- plugin' (id18 ! "signal") (12.0,96.0) (Outside proxy158)
setColour plugin66 "#control"
plugin67 <- plugin' (id17 ! "signal") (-36.0,96.0) (Outside proxy158)
setColour plugin67 "#control"
plugin68 <- plugin' (id16 ! "signal") (-84.0,96.0) (Outside proxy158)
setColour plugin68 "#control"
plugin69 <- plugin' (id47 ! "signal") (-132.0,96.0) (Outside proxy158)
setColour plugin69 "#control"
plugin70 <- plugin' (id24 ! "signal") (204.0,96.0) (Outside proxy158)
setColour plugin70 "#control"
plugin71 <- plugin' (id23 ! "signal") (156.0,96.0) (Outside proxy158)
setColour plugin71 "#control"
plugin72 <- plugin' (id21 ! "signal") (108.0,96.0) (Outside proxy158)
setColour plugin72 "#control"
plugin73 <- plugin' (id20 ! "signal") (60.0,96.0) (Outside proxy158)
setColour plugin73 "#control"
plugin74 <- plugin' (id26 ! "signal") (252.0,96.0) (Outside proxy158)
setColour plugin74 "#control"
plugin75 <- plugin' (id15 ! "signal") (288.0,48.0) (Outside proxy158)
setColour plugin75 "#control"
plugout100 <- plugout' (id41 ! "result") (156.0,192.0) (Outside proxy158)
setColour plugout100 "#control"
plugout101 <- plugout' (id40 ! "result") (108.0,192.0) (Outside proxy158)
setColour plugout101 "#control"
plugout76 <- plugout' (id39 ! "result") (60.0,192.0) (Outside proxy158)
setColour plugout76 "#control"
plugout77 <- plugout' (id46 ! "result") (252.0,192.0) (Outside proxy158)
setColour plugout77 "#control"
plugout78 <- plugout' (id27 ! "result") (132.0,-48.0) (Outside proxy158)
setColour plugout78 "#control"
plugout79 <- plugout' (id28 ! "result") (228.0,-48.0) (Outside proxy158)
setColour plugout79 "#control"
plugout80 <- plugout' (id48 ! "result") (-312.0,72.0) (Outside proxy158)
setColour plugout80 "#control"
plugout81 <- plugout' (id49 ! "result") (-312.0,24.0) (Outside proxy158)
setColour plugout81 "#control"
plugout82 <- plugout' (id9 ! "result") (252.0,0.0) (Outside proxy158)
setColour plugout82 "#sample"
plugout83 <- plugout' (id8 ! "result") (204.0,0.0) (Outside proxy158)
setColour plugout83 "#sample"
plugout84 <- plugout' (id13 ! "result") (156.0,0.0) (Outside proxy158)
setColour plugout84 "#sample"
plugout85 <- plugout' (id12 ! "result") (108.0,0.0) (Outside proxy158)
setColour plugout85 "#sample"
plugout86 <- plugout' (id29 ! "result") (-252.0,144.0) (Outside proxy158)
setColour plugout86 "#control"
plugout87 <- plugout' (id38 ! "result") (-156.0,144.0) (Outside proxy158)
setColour plugout87 "#control"
plugout88 <- plugout' (id45 ! "result") (-60.0,144.0) (Outside proxy158)
setColour plugout88 "#control"
plugout89 <- plugout' (id22 ! "result") (36.0,144.0) (Outside proxy158)
setColour plugout89 "#control"
plugout90 <- plugout' (id25 ! "result") (132.0,144.0) (Outside proxy158)
setColour plugout90 "#control"
plugout91 <- plugout' (id31 ! "result") (228.0,144.0) (Outside proxy158)
setColour plugout91 "#control"
plugout92 <- plugout' (id34 ! "result") (-276.0,192.0) (Outside proxy158)
setColour plugout92 "#control"
plugout93 <- plugout' (id42 ! "result") (-228.0,192.0) (Outside proxy158)
setColour plugout93 "#control"
plugout94 <- plugout' (id32 ! "result") (-180.0,192.0) (Outside proxy158)
setColour plugout94 "#control"
plugout95 <- plugout' (id43 ! "result") (-132.0,192.0) (Outside proxy158)
setColour plugout95 "#control"
plugout96 <- plugout' (id30 ! "result") (-84.0,192.0) (Outside proxy158)
setColour plugout96 "#control"
plugout97 <- plugout' (id37 ! "result") (12.0,192.0) (Outside proxy158)
setColour plugout97 "#control"
plugout98 <- plugout' (id36 ! "result") (-36.0,192.0) (Outside proxy158)
setColour plugout98 "#control"
plugout99 <- plugout' (id44 ! "result") (204.0,192.0) (Outside proxy158)
setColour plugout99 "#control"
knob102 <- knob' (input57 ! "result") (x+(-240.0), y+(96.0)) (Outside container58)
knob103 <- knob' (input52 ! "result") (x+(-144.0), y+(96.0)) (Outside container58)
knob104 <- knob' (input53 ! "result") (x+(-48.0), y+(96.0)) (Outside container58)
knob105 <- knob' (input54 ! "result") (x+(48.0), y+(96.0)) (Outside container58)
knob106 <- knob' (input55 ! "result") (x+(144.0), y+(96.0)) (Outside container58)
knob107 <- knob' (input56 ! "result") (x+(240.0), y+(96.0)) (Outside container58)
knob108 <- knob' (input50 ! "result") (x+(144.0), y+(-96.0)) (Outside container58)
knob109 <- knob' (input51 ! "result") (x+(240.0), y+(-96.0)) (Outside container58)
plugin110 <- plugin' (id27 ! "signal") (x+(144.0), y+(-96.0)) (Outside container58)
setColour plugin110 "#control"
hide plugin110
plugin111 <- plugin' (id28 ! "signal") (x+(240.0), y+(-96.0)) (Outside container58)
setColour plugin111 "#control"
hide plugin111
plugin112 <- plugin' (id48 ! "signal") (x+(-300.0), y+(24.0)) (Outside container58)
setColour plugin112 "#control"
plugin113 <- plugin' (id49 ! "signal") (x+(-300.0), y+(-24.0)) (Outside container58)
setColour plugin113 "#control"
plugin114 <- plugin' (id34 ! "signal") (x+(-264.0), y+(144.0)) (Outside container58)
setColour plugin114 "#control"
plugin115 <- plugin' (id42 ! "signal") (x+(-216.0), y+(144.0)) (Outside container58)
setColour plugin115 "#control"
plugin116 <- plugin' (id32 ! "signal") (x+(-168.0), y+(144.0)) (Outside container58)
setColour plugin116 "#control"
plugin117 <- plugin' (id29 ! "signal") (x+(-240.0), y+(96.0)) (Outside container58)
setColour plugin117 "#control"
hide plugin117
plugin118 <- plugin' (id38 ! "signal") (x+(-144.0), y+(96.0)) (Outside container58)
setColour plugin118 "#control"
hide plugin118
plugin119 <- plugin' (id45 ! "signal") (x+(-48.0), y+(96.0)) (Outside container58)
setColour plugin119 "#control"
hide plugin119
plugin120 <- plugin' (id22 ! "signal") (x+(48.0), y+(96.0)) (Outside container58)
setColour plugin120 "#control"
hide plugin120
plugin121 <- plugin' (id25 ! "signal") (x+(144.0), y+(96.0)) (Outside container58)
setColour plugin121 "#control"
hide plugin121
plugin122 <- plugin' (id31 ! "signal") (x+(240.0), y+(96.0)) (Outside container58)
setColour plugin122 "#control"
hide plugin122
plugin123 <- plugin' (id43 ! "signal") (x+(-120.0), y+(144.0)) (Outside container58)
setColour plugin123 "#control"
plugin124 <- plugin' (id30 ! "signal") (x+(-72.0), y+(144.0)) (Outside container58)
setColour plugin124 "#control"
plugin125 <- plugin' (id37 ! "signal") (x+(24.0), y+(144.0)) (Outside container58)
setColour plugin125 "#control"
plugin126 <- plugin' (id36 ! "signal") (x+(-24.0), y+(144.0)) (Outside container58)
setColour plugin126 "#control"
plugin127 <- plugin' (id44 ! "signal") (x+(216.0), y+(144.0)) (Outside container58)
setColour plugin127 "#control"
plugin128 <- plugin' (id41 ! "signal") (x+(168.0), y+(144.0)) (Outside container58)
setColour plugin128 "#control"
plugin129 <- plugin' (id40 ! "signal") (x+(120.0), y+(144.0)) (Outside container58)
setColour plugin129 "#control"
plugin130 <- plugin' (id39 ! "signal") (x+(72.0), y+(144.0)) (Outside container58)
setColour plugin130 "#control"
plugin131 <- plugin' (id46 ! "signal") (x+(264.0), y+(144.0)) (Outside container58)
setColour plugin131 "#control"
plugin132 <- plugin' (id13 ! "signal") (x+(168.0), y+(-48.0)) (Outside container58)
setColour plugin132 "#sample"
plugin133 <- plugin' (id8 ! "signal") (x+(216.0), y+(-48.0)) (Outside container58)
setColour plugin133 "#sample"
plugin134 <- plugin' (id9 ! "signal") (x+(264.0), y+(-48.0)) (Outside container58)
setColour plugin134 "#sample"
plugin135 <- plugin' (id12 ! "signal") (x+(120.0), y+(-48.0)) (Outside container58)
setColour plugin135 "#sample"
plugout136 <- plugout' (id35 ! "result") (x+(-264.0), y+(48.0)) (Outside container58)
setColour plugout136 "#control"
plugout137 <- plugout' (id19 ! "result") (x+(-216.0), y+(48.0)) (Outside container58)
setColour plugout137 "#control"
plugout138 <- plugout' (id33 ! "result") (x+(-168.0), y+(48.0)) (Outside container58)
setColour plugout138 "#control"
plugout139 <- plugout' (id18 ! "result") (x+(24.0), y+(48.0)) (Outside container58)
setColour plugout139 "#control"
plugout140 <- plugout' (id17 ! "result") (x+(-24.0), y+(48.0)) (Outside container58)
setColour plugout140 "#control"
plugout141 <- plugout' (id16 ! "result") (x+(-72.0), y+(48.0)) (Outside container58)
setColour plugout141 "#control"
plugout142 <- plugout' (id47 ! "result") (x+(-120.0), y+(48.0)) (Outside container58)
setColour plugout142 "#control"
plugout143 <- plugout' (id24 ! "result") (x+(216.0), y+(48.0)) (Outside container58)
setColour plugout143 "#control"
plugout144 <- plugout' (id23 ! "result") (x+(168.0), y+(48.0)) (Outside container58)
setColour plugout144 "#control"
plugout145 <- plugout' (id21 ! "result") (x+(120.0), y+(48.0)) (Outside container58)
setColour plugout145 "#control"
plugout146 <- plugout' (id20 ! "result") (x+(72.0), y+(48.0)) (Outside container58)
setColour plugout146 "#control"
plugout147 <- plugout' (id26 ! "result") (x+(264.0), y+(48.0)) (Outside container58)
setColour plugout147 "#control"
plugout148 <- plugout' (id15 ! "result") (x+(300.0), y+(0.0)) (Outside container58)
setColour plugout148 "#control"
plugout149 <- plugout' (id11 ! "result") (x+(168.0), y+(-144.0)) (Outside container58)
setColour plugout149 "#sample"
plugout150 <- plugout' (id10 ! "result") (x+(120.0), y+(-144.0)) (Outside container58)
setColour plugout150 "#sample"
plugout151 <- plugout' (id7 ! "result") (x+(264.0), y+(-144.0)) (Outside container58)
setColour plugout151 "#sample"
plugout152 <- plugout' (id14 ! "result") (x+(216.0), y+(-144.0)) (Outside container58)
setColour plugout152 "#sample"
cable knob108 plugin110
cable knob109 plugin111
cable knob102 plugin117
cable knob103 plugin118
cable knob104 plugin119
cable knob105 plugin120
cable knob106 plugin121
cable knob107 plugin122
recompile
set knob102 (1.7860383e-2)
set knob103 (0.0)
set knob104 (0.0)
set knob105 (3.0e-2)
set knob106 (0.0)
set knob107 (0.0)
set knob108 (0.0)
set knob109 (0.0)
alias "in-1" id34
alias "in-10" id41
alias "in-11" id44
alias "in-12" id46
alias "in-2" id42
alias "in-25" id9
alias "in-26" id10
alias "in-27" id13
alias "in-28" id14
alias "in-3" id32
alias "in-4" id43
alias "in-5" id30
alias "in-6" id30
alias "in-7" id37
alias "in-8" id39
alias "in-9" id40
alias "knob-1" input57
alias "knob-2" input52
alias "knob-3" input53
alias "knob-4" input54
alias "knob-5" input55
alias "knob-6" input56
alias "knob-7" input50
alias "knob-8" input51
alias "out-13" id35
alias "out-14" id19
alias "out-15" id33
alias "out-16" id47
alias "out-17" id16
alias "out-18" id17
alias "out-19" id18
alias "out-20" id20
alias "out-21" id21
alias "out-22" id23
alias "out-23" id24
alias "out-24" id26
alias "out-29" id7
alias "out-30" id8
alias "out-31" id11
alias "out-32" id12
return ()
| |
871f37bca64c68ae45932e8842786b46b9cca33185da95eeafaa28a80e4e5e40 | abella-prover/abella | regress.ml |
* Author : < >
* Copyright ( C ) 2022 Inria ( Institut National de Recherche
* en Informatique et en Automatique )
* See LICENSE for licensing details .
* Author: Kaustuv Chaudhuri <>
* Copyright (C) 2022 Inria (Institut National de Recherche
* en Informatique et en Automatique)
* See LICENSE for licensing details.
*)
(* Run "abella -a" on all examples *)
let rec thmfiles dir =
let process f =
let f = Filename.concat dir f in
if Sys.is_directory f then
thmfiles f
else if Filename.check_suffix f ".thm" then
Seq.return f
else
Seq.empty
in
let fs = Sys.readdir dir in
Array.fast_sort String.compare fs ;
Array.map process fs
|> Array.fold_left Seq.append Seq.empty
;;
let () =
if not !Sys.interactive && Sys.unix then begin
let files = thmfiles "../../../examples" in
let buf = Buffer.create 19 in
let open Format in
let ff = formatter_of_buffer buf in
fprintf ff ".PHONY: all\nall:\n\n" ;
let rec loop fs =
match fs () with
| Seq.Nil -> ()
| Seq.Cons (thmfile, fs) ->
let jsonfile = String.map (function '/' | '.' -> '_' | c -> c) thmfile ^ ".json" in
fprintf ff "%s: %s\n" jsonfile thmfile ;
fprintf ff "\t../src/abella.exe -a $< -o $@@\n" ;
fprintf ff "all: %s\n" jsonfile ;
loop fs
in
loop files ;
let (mkfilename, mkfilech) =
Filename.open_temp_file ~mode:[Open_binary] "Makefile" "examples" in
Buffer.output_buffer mkfilech buf ;
close_out mkfilech ;
Printf.printf "make -f %s -j\n" mkfilename ;
Unix.execvp "make" [| "make" ; "-f" ; mkfilename ; "-j" |]
end
;;
| null | https://raw.githubusercontent.com/abella-prover/abella/f1ce97ebbce5c8423cd096e9f4345e32bb9f781e/test/regress.ml | ocaml | Run "abella -a" on all examples |
* Author : < >
* Copyright ( C ) 2022 Inria ( Institut National de Recherche
* en Informatique et en Automatique )
* See LICENSE for licensing details .
* Author: Kaustuv Chaudhuri <>
* Copyright (C) 2022 Inria (Institut National de Recherche
* en Informatique et en Automatique)
* See LICENSE for licensing details.
*)
let rec thmfiles dir =
let process f =
let f = Filename.concat dir f in
if Sys.is_directory f then
thmfiles f
else if Filename.check_suffix f ".thm" then
Seq.return f
else
Seq.empty
in
let fs = Sys.readdir dir in
Array.fast_sort String.compare fs ;
Array.map process fs
|> Array.fold_left Seq.append Seq.empty
;;
let () =
if not !Sys.interactive && Sys.unix then begin
let files = thmfiles "../../../examples" in
let buf = Buffer.create 19 in
let open Format in
let ff = formatter_of_buffer buf in
fprintf ff ".PHONY: all\nall:\n\n" ;
let rec loop fs =
match fs () with
| Seq.Nil -> ()
| Seq.Cons (thmfile, fs) ->
let jsonfile = String.map (function '/' | '.' -> '_' | c -> c) thmfile ^ ".json" in
fprintf ff "%s: %s\n" jsonfile thmfile ;
fprintf ff "\t../src/abella.exe -a $< -o $@@\n" ;
fprintf ff "all: %s\n" jsonfile ;
loop fs
in
loop files ;
let (mkfilename, mkfilech) =
Filename.open_temp_file ~mode:[Open_binary] "Makefile" "examples" in
Buffer.output_buffer mkfilech buf ;
close_out mkfilech ;
Printf.printf "make -f %s -j\n" mkfilename ;
Unix.execvp "make" [| "make" ; "-f" ; mkfilename ; "-j" |]
end
;;
|
e90b062cf3aa8210b4602f0e4fa6c268f862b082e194304e59dfbe4a56720007 | Glue42/gateway-modules | domain.cljc | (ns gateway.domain
(:require [gateway.state.spec.state :as state]
[gateway.common.spec.messages :as msg]
[clojure.spec.alpha :as s]))
;; specs
(s/def ::operation-result (s/nilable (s/tuple (s/nilable ::state/state)
(s/nilable (s/coll-of ::msg/outgoing-message)))))
(defprotocol Domain
(info [this])
(init [this state])
(destroy [this state])
(handle-message [this state request])
(state->messages [this state]
"Generates the messages needed to replicate the state onto a remote node. Can be a noop"))
| null | https://raw.githubusercontent.com/Glue42/gateway-modules/be48a132134b5f9f41fd6a6067800da6be5e6eca/common/src/gateway/domain.cljc | clojure | specs | (ns gateway.domain
(:require [gateway.state.spec.state :as state]
[gateway.common.spec.messages :as msg]
[clojure.spec.alpha :as s]))
(s/def ::operation-result (s/nilable (s/tuple (s/nilable ::state/state)
(s/nilable (s/coll-of ::msg/outgoing-message)))))
(defprotocol Domain
(info [this])
(init [this state])
(destroy [this state])
(handle-message [this state request])
(state->messages [this state]
"Generates the messages needed to replicate the state onto a remote node. Can be a noop"))
|
04d157ef4b6540244a2d95b5d2d049b2e7ec08795fb74afdfd3d8fc7c851deb3 | smahood/re-frame-conj-2016 | step3.cljs | (ns show.steps.step3
(:require
[reagent.core :as reagent]
[re-frame.core :as re-frame]
[re-frisk.core :as re-frisk]
[cljs.spec :as s]
[show.slides.views :as views]))
(declare slides)
(defn slideshow []
[:div.slideshow-half-width
[:div.slide
[(nth slides 0)]]])
(defn title-slide []
[:div.title-slide
[:div.slide-body
[:br]
[:div "Building a "]
[:div "Presentation with"]
[:img.re-frame-logo
{:src "img/re-frame-logo.png"
:style {:margin-top "20px"}}]
[:div "and"]
[:img.cljs-logo
{:src "img/cljs-logo.png"}]]
[:div.slide-footer
[:span ""]
[:span "Clojure/conj 2016"]
[:span ""]]])
(def slides
[title-slide])
(defn mount-root []
(reagent/render
[slideshow]
(js/document.getElementById "app")))
(mount-root)
| null | https://raw.githubusercontent.com/smahood/re-frame-conj-2016/51fe544a5f4e95f28c26995c11d64301ba9a2142/show/src/show/steps/step3.cljs | clojure | (ns show.steps.step3
(:require
[reagent.core :as reagent]
[re-frame.core :as re-frame]
[re-frisk.core :as re-frisk]
[cljs.spec :as s]
[show.slides.views :as views]))
(declare slides)
(defn slideshow []
[:div.slideshow-half-width
[:div.slide
[(nth slides 0)]]])
(defn title-slide []
[:div.title-slide
[:div.slide-body
[:br]
[:div "Building a "]
[:div "Presentation with"]
[:img.re-frame-logo
{:src "img/re-frame-logo.png"
:style {:margin-top "20px"}}]
[:div "and"]
[:img.cljs-logo
{:src "img/cljs-logo.png"}]]
[:div.slide-footer
[:span ""]
[:span "Clojure/conj 2016"]
[:span ""]]])
(def slides
[title-slide])
(defn mount-root []
(reagent/render
[slideshow]
(js/document.getElementById "app")))
(mount-root)
| |
a6862fa4c77e58fbbbfa6f687956971e7e93dc65ec370cf70b5659a5e83d6c00 | typedclojure/typedclojure | loop.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.cljc.checker.check.loop
(:require [clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.clj.checker.parse-unparse :as prs]
[typed.cljc.checker.check.let :as let]
[typed.cljc.checker.check.recur-utils :as recur-u]
[typed.cljc.checker.check.utils :as cu]
[typed.cljc.checker.type-rep :as r]))
(defn parse-annotation
"Parse the raw type annotation tsyn in the context of expr"
[tsyn {:keys [env] :as expr}]
(let [parsed-t (binding [vs/*current-env* env
prs/*parse-type-in-ns* (cu/expr-ns expr)]
(prs/parse-type tsyn))]
parsed-t))
(defn inline-annotations [expr]
{:pre [(= :loop (:op expr))]
:post [(or (nil? %)
(and (seq %)
(every? r/Type? %)))]}
(let [;; cljs.analyzer :binding's don't have forms yet
names (map (some-fn :form :name) (:bindings expr))
_ (assert (every? symbol? names))
maybe-anns (map (comp (fn [m]
;(prn "meta" m)
(when-let [[_ tsyn] (find m :clojure.core.typed/ann)]
(parse-annotation tsyn expr)))
meta)
names)
normalize (when (some identity maybe-anns)
;; annotate unannotated vars with Any
(seq (map (fn [t] (or t r/-any)) maybe-anns)))]
normalize))
;; `recur-u/*loop-bnd-anns*` is populated in `typed.cljc.checker.check.special.loop`
(defn check-loop [check expr expected]
(let [loop-bnd-anns recur-u/*loop-bnd-anns*
inlines (inline-annotations expr)
_ (when (and loop-bnd-anns inlines)
(err/int-error "Cannot provide both an annotation with t/loop and inline loop"))
_ ( prn " inlines " inlines )
anns (or loop-bnd-anns inlines)]
(binding [recur-u/*loop-bnd-anns* nil]
(let/check-let check expr expected
{:expected-bnds anns
:loop? true}))))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/45556897356f3c9cbc7b1b6b4df263086a9d5803/typed/clj.checker/src/typed/cljc/checker/check/loop.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
cljs.analyzer :binding's don't have forms yet
(prn "meta" m)
annotate unannotated vars with Any
`recur-u/*loop-bnd-anns*` is populated in `typed.cljc.checker.check.special.loop` | Copyright ( c ) , contributors .
(ns typed.cljc.checker.check.loop
(:require [clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.clj.checker.parse-unparse :as prs]
[typed.cljc.checker.check.let :as let]
[typed.cljc.checker.check.recur-utils :as recur-u]
[typed.cljc.checker.check.utils :as cu]
[typed.cljc.checker.type-rep :as r]))
(defn parse-annotation
"Parse the raw type annotation tsyn in the context of expr"
[tsyn {:keys [env] :as expr}]
(let [parsed-t (binding [vs/*current-env* env
prs/*parse-type-in-ns* (cu/expr-ns expr)]
(prs/parse-type tsyn))]
parsed-t))
(defn inline-annotations [expr]
{:pre [(= :loop (:op expr))]
:post [(or (nil? %)
(and (seq %)
(every? r/Type? %)))]}
names (map (some-fn :form :name) (:bindings expr))
_ (assert (every? symbol? names))
maybe-anns (map (comp (fn [m]
(when-let [[_ tsyn] (find m :clojure.core.typed/ann)]
(parse-annotation tsyn expr)))
meta)
names)
normalize (when (some identity maybe-anns)
(seq (map (fn [t] (or t r/-any)) maybe-anns)))]
normalize))
(defn check-loop [check expr expected]
(let [loop-bnd-anns recur-u/*loop-bnd-anns*
inlines (inline-annotations expr)
_ (when (and loop-bnd-anns inlines)
(err/int-error "Cannot provide both an annotation with t/loop and inline loop"))
_ ( prn " inlines " inlines )
anns (or loop-bnd-anns inlines)]
(binding [recur-u/*loop-bnd-anns* nil]
(let/check-let check expr expected
{:expected-bnds anns
:loop? true}))))
|
3565a2c2a70db507f04097003bd90dd3c131cd42542d6fc398f0c1c5f3641b03 | purebred-mua/purebred | Event.hs | -- This file is part of purebred
Copyright ( C ) 2017 - 2021 and
--
-- purebred is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see </>.
{- |
Event types and optics for Purebred.
-}
module Purebred.Types.Event
(
PurebredEvent(..)
, Generation
, firstGeneration
, nextGeneration
, UserMessage(..)
, umContext
, umSeverity
, MessageSeverity(..)
) where
import Control.Lens (Lens', lens)
import qualified Data.Text as T
import Purebred.Types.Error (Error)
import Purebred.Types.UI (Name)
-- | A serial number that can be used to match (or ignore as
-- irrelevant) asynchronous events to current application state.
--
Use the ' Eq ' and ' ' instances to compare generations . The
constructor is hidden ; use ' firstGeneration ' as the first
-- generation, and use 'nextGeneration' to monotonically increment
-- it.
--
newtype Generation = Generation Integer
deriving (Eq, Ord)
firstGeneration :: Generation
firstGeneration = Generation 0
nextGeneration :: Generation -> Generation
nextGeneration (Generation n) = Generation (succ n)
-- | Purebred event type. In the future we can abstract this over
-- a custom event type to allow plugins to define their own events.
-- But I've YAGNI'd it for now because it will require an event
type parameter on ' AppState ' , which will be a noisy change .
--
data PurebredEvent
= NotifyNumThreads Int Generation
| NotifyNewMailArrived Int
| InputValidated (Maybe UserMessage) -- ^ Event used for real time validation
data MessageSeverity
= Error Error
| Warning T.Text
| Info T.Text
deriving (Eq, Show)
-- | UI feedback shown to the user.
-- Uses context and severity to control visibility and importance.
data UserMessage = UserMessage
{ _umContext:: Name
, _umSeverity :: MessageSeverity
}
deriving (Eq, Show)
umContext :: Lens' UserMessage Name
umContext = lens _umContext (\m x -> m { _umContext = x })
umSeverity :: Lens' UserMessage MessageSeverity
umSeverity = lens _umSeverity (\m x -> m { _umSeverity = x })
| null | https://raw.githubusercontent.com/purebred-mua/purebred/f5ae68637d84fafde088df09420cea4e606b77ed/src/Purebred/Types/Event.hs | haskell | This file is part of purebred
purebred is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>.
|
Event types and optics for Purebred.
| A serial number that can be used to match (or ignore as
irrelevant) asynchronous events to current application state.
generation, and use 'nextGeneration' to monotonically increment
it.
| Purebred event type. In the future we can abstract this over
a custom event type to allow plugins to define their own events.
But I've YAGNI'd it for now because it will require an event
^ Event used for real time validation
| UI feedback shown to the user.
Uses context and severity to control visibility and importance. | Copyright ( C ) 2017 - 2021 and
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU Affero General Public License
module Purebred.Types.Event
(
PurebredEvent(..)
, Generation
, firstGeneration
, nextGeneration
, UserMessage(..)
, umContext
, umSeverity
, MessageSeverity(..)
) where
import Control.Lens (Lens', lens)
import qualified Data.Text as T
import Purebred.Types.Error (Error)
import Purebred.Types.UI (Name)
Use the ' Eq ' and ' ' instances to compare generations . The
constructor is hidden ; use ' firstGeneration ' as the first
newtype Generation = Generation Integer
deriving (Eq, Ord)
firstGeneration :: Generation
firstGeneration = Generation 0
nextGeneration :: Generation -> Generation
nextGeneration (Generation n) = Generation (succ n)
type parameter on ' AppState ' , which will be a noisy change .
data PurebredEvent
= NotifyNumThreads Int Generation
| NotifyNewMailArrived Int
data MessageSeverity
= Error Error
| Warning T.Text
| Info T.Text
deriving (Eq, Show)
data UserMessage = UserMessage
{ _umContext:: Name
, _umSeverity :: MessageSeverity
}
deriving (Eq, Show)
umContext :: Lens' UserMessage Name
umContext = lens _umContext (\m x -> m { _umContext = x })
umSeverity :: Lens' UserMessage MessageSeverity
umSeverity = lens _umSeverity (\m x -> m { _umSeverity = x })
|
c52497a387fc8675718aefa58b023c06bd0911d95d18c0ea6adc37e002963da5 | fosskers/aura | L.hs | -- |
Module : Aura . Commands . L
Copyright : ( c ) , 2012 - 2021
-- License : GPL3
Maintainer : < >
--
-- Handle all @-L@ flags - those which involve the pacman log file.
module Aura.Commands.L
( viewLogFile
, searchLogFile
, logInfoOnPkg
) where
import Aura.Colour (dtot, red)
import Aura.Core (Env(..), report)
import Aura.IO
import Aura.Languages
import Aura.Settings
import Aura.Types (PkgName(..))
import Aura.Utils
import Prettyprinter
import RIO
import qualified RIO.NonEmpty as NEL
import qualified RIO.Text as T
import qualified RIO.Text.Partial as T
import System.Process.Typed (proc, runProcess)
---
| The contents of the log file .
newtype Log = Log [Text]
data LogEntry = LogEntry
{ name :: !PkgName
, firstInstall :: !Text
, upgrades :: !Word
, recent :: ![Text] }
| Pipes the pacman log file through a @less@ session .
viewLogFile :: RIO Env ()
viewLogFile = do
pth <- asks (either id id . logPathOf . commonConfigOf . settings)
void . runProcess $ proc "less" [pth]
-- | Print all lines in the log file which contain a given `Text`.
searchLogFile :: Settings -> Text -> IO ()
searchLogFile ss input = do
let pth = either id id . logPathOf $ commonConfigOf ss
logFile <- T.lines . decodeUtf8Lenient <$> readFileBinary pth
traverse_ putTextLn $ searchLines input logFile
-- | The result of @-Li@.
logInfoOnPkg :: NonEmpty PkgName -> RIO Env ()
logInfoOnPkg pkgs = do
ss <- asks settings
let pth = either id id . logPathOf $ commonConfigOf ss
logFile <- Log . T.lines . decodeUtf8Lenient <$> readFileBinary pth
let (bads, goods) = fmapEither (logLookup logFile) $ toList pkgs
traverse_ (report red reportNotInLog_1) $ NEL.nonEmpty bads
traverse_ (putTextLn . renderEntry ss) goods
logLookup :: Log -> PkgName -> Either PkgName LogEntry
logLookup (Log lns) p = case matches of
[] -> Left p
(h:t) -> Right $
LogEntry { name = p
, firstInstall = T.take 16 $ T.tail h
, upgrades = fromIntegral . length $ filter (T.isInfixOf " upgraded ") t
, recent = reverse . take 5 $ reverse t }
where matches = filter (T.isInfixOf (" " <> pnName p <> " (")) lns
renderEntry :: Settings -> LogEntry -> Text
renderEntry ss (LogEntry (PkgName pn) fi us rs) =
dtot . colourCheck ss $ entrify ss fields entries <> hardline <> recents <> hardline
where fields = logLookUpFields $ langOf ss
entries = map pretty [ pn, fi, T.pack (show us), "" ]
recents = vsep $ map pretty rs
| null | https://raw.githubusercontent.com/fosskers/aura/08cd46eaa598094f7395455d66690d3d8c59e965/haskell/aura/exec/Aura/Commands/L.hs | haskell | |
License : GPL3
Handle all @-L@ flags - those which involve the pacman log file.
-
| Print all lines in the log file which contain a given `Text`.
| The result of @-Li@. | Module : Aura . Commands . L
Copyright : ( c ) , 2012 - 2021
Maintainer : < >
module Aura.Commands.L
( viewLogFile
, searchLogFile
, logInfoOnPkg
) where
import Aura.Colour (dtot, red)
import Aura.Core (Env(..), report)
import Aura.IO
import Aura.Languages
import Aura.Settings
import Aura.Types (PkgName(..))
import Aura.Utils
import Prettyprinter
import RIO
import qualified RIO.NonEmpty as NEL
import qualified RIO.Text as T
import qualified RIO.Text.Partial as T
import System.Process.Typed (proc, runProcess)
| The contents of the log file .
newtype Log = Log [Text]
data LogEntry = LogEntry
{ name :: !PkgName
, firstInstall :: !Text
, upgrades :: !Word
, recent :: ![Text] }
| Pipes the pacman log file through a @less@ session .
viewLogFile :: RIO Env ()
viewLogFile = do
pth <- asks (either id id . logPathOf . commonConfigOf . settings)
void . runProcess $ proc "less" [pth]
searchLogFile :: Settings -> Text -> IO ()
searchLogFile ss input = do
let pth = either id id . logPathOf $ commonConfigOf ss
logFile <- T.lines . decodeUtf8Lenient <$> readFileBinary pth
traverse_ putTextLn $ searchLines input logFile
logInfoOnPkg :: NonEmpty PkgName -> RIO Env ()
logInfoOnPkg pkgs = do
ss <- asks settings
let pth = either id id . logPathOf $ commonConfigOf ss
logFile <- Log . T.lines . decodeUtf8Lenient <$> readFileBinary pth
let (bads, goods) = fmapEither (logLookup logFile) $ toList pkgs
traverse_ (report red reportNotInLog_1) $ NEL.nonEmpty bads
traverse_ (putTextLn . renderEntry ss) goods
logLookup :: Log -> PkgName -> Either PkgName LogEntry
logLookup (Log lns) p = case matches of
[] -> Left p
(h:t) -> Right $
LogEntry { name = p
, firstInstall = T.take 16 $ T.tail h
, upgrades = fromIntegral . length $ filter (T.isInfixOf " upgraded ") t
, recent = reverse . take 5 $ reverse t }
where matches = filter (T.isInfixOf (" " <> pnName p <> " (")) lns
renderEntry :: Settings -> LogEntry -> Text
renderEntry ss (LogEntry (PkgName pn) fi us rs) =
dtot . colourCheck ss $ entrify ss fields entries <> hardline <> recents <> hardline
where fields = logLookUpFields $ langOf ss
entries = map pretty [ pn, fi, T.pack (show us), "" ]
recents = vsep $ map pretty rs
|
1d1324090f3ff5be30e398e47b3ce05d486ab236e4a8acb02f899a3ca6de9725 | samply/blaze | logical_operators.clj | (ns blaze.elm.compiler.logical-operators
"13. Logical Operators
Section numbers are according to
-logicalspecification.html."
(:require
[blaze.elm.compiler.core :as core]
[blaze.elm.compiler.macros :refer [defunop]]))
13.1 . And
(defn- nil-and-expr [x]
(reify core/Expression
(-eval [_ context resource scope]
(when (false? (core/-eval x context resource scope))
false))
(-form [_]
(list 'and nil (core/-form x)))))
(defn- nil-and
"Creates an and-expression where one operand is known to be nil."
[x]
(condp identical? x
true nil
false false
nil nil
(nil-and-expr x)))
(defn- dynamic-and
"Creates an and-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true a
false false
nil (nil-and-expr a)
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(if (false? a)
false
(let [b (core/-eval b context resource scope)]
(cond
(false? b) false
(and (true? a) (true? b)) true)))))
(-form [_]
(list 'and (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/and
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true (core/compile* context b)
false false
nil (nil-and (core/compile* context b))
(dynamic-and a (core/compile* context b)))))
13.2 Implies
(defmethod core/compile* :elm.compiler.type/implies
[_ _]
(throw (Exception. "Unsupported Implies expression. Please normalize the ELM tree before compiling.")))
13.3 Not
(defunop not [operand]
(when (some? operand)
(not operand)))
13.4 . Or
(defn- nil-or-expr [x]
(reify core/Expression
(-eval [_ context resource scope]
(when (true? (core/-eval x context resource scope))
true))
(-form [_]
(list 'or nil (core/-form x)))))
(defn- nil-or
"Creates an or-expression where one operand is known to be nil."
[x]
(condp identical? x
true true
false nil
nil nil
(nil-or-expr x)))
(defn- dynamic-or
"Creates an or-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true true
false a
nil (nil-or-expr a)
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(if (true? a)
true
(let [b (core/-eval b context resource scope)]
(cond
(true? b) true
(and (false? a) (false? b)) false)))))
(-form [_]
(list 'or (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/or
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true true
false (core/compile* context b)
nil (nil-or (core/compile* context b))
(dynamic-or a (core/compile* context b)))))
13.5 Xor
(defn- dynamic-xor
"Creates an xor-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(when (some? a)
(not a))))
(-form [_]
(list 'not (core/-form a))))
false a
nil nil
(reify core/Expression
(-eval [_ context resource scope]
(when-some [a (core/-eval a context resource scope)]
(when-some [b (core/-eval b context resource scope)]
(if a (not b) b))))
(-form [_]
(list 'xor (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/xor
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true (core/compile* context {:type "Not" :operand b})
false (core/compile* context b)
nil nil
(dynamic-xor a (core/compile* context b)))))
| null | https://raw.githubusercontent.com/samply/blaze/f7d26bf3db0bc74b9195c59891261b1eb2c5accb/modules/cql/src/blaze/elm/compiler/logical_operators.clj | clojure | (ns blaze.elm.compiler.logical-operators
"13. Logical Operators
Section numbers are according to
-logicalspecification.html."
(:require
[blaze.elm.compiler.core :as core]
[blaze.elm.compiler.macros :refer [defunop]]))
13.1 . And
(defn- nil-and-expr [x]
(reify core/Expression
(-eval [_ context resource scope]
(when (false? (core/-eval x context resource scope))
false))
(-form [_]
(list 'and nil (core/-form x)))))
(defn- nil-and
"Creates an and-expression where one operand is known to be nil."
[x]
(condp identical? x
true nil
false false
nil nil
(nil-and-expr x)))
(defn- dynamic-and
"Creates an and-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true a
false false
nil (nil-and-expr a)
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(if (false? a)
false
(let [b (core/-eval b context resource scope)]
(cond
(false? b) false
(and (true? a) (true? b)) true)))))
(-form [_]
(list 'and (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/and
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true (core/compile* context b)
false false
nil (nil-and (core/compile* context b))
(dynamic-and a (core/compile* context b)))))
13.2 Implies
(defmethod core/compile* :elm.compiler.type/implies
[_ _]
(throw (Exception. "Unsupported Implies expression. Please normalize the ELM tree before compiling.")))
13.3 Not
(defunop not [operand]
(when (some? operand)
(not operand)))
13.4 . Or
(defn- nil-or-expr [x]
(reify core/Expression
(-eval [_ context resource scope]
(when (true? (core/-eval x context resource scope))
true))
(-form [_]
(list 'or nil (core/-form x)))))
(defn- nil-or
"Creates an or-expression where one operand is known to be nil."
[x]
(condp identical? x
true true
false nil
nil nil
(nil-or-expr x)))
(defn- dynamic-or
"Creates an or-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true true
false a
nil (nil-or-expr a)
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(if (true? a)
true
(let [b (core/-eval b context resource scope)]
(cond
(true? b) true
(and (false? a) (false? b)) false)))))
(-form [_]
(list 'or (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/or
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true true
false (core/compile* context b)
nil (nil-or (core/compile* context b))
(dynamic-or a (core/compile* context b)))))
13.5 Xor
(defn- dynamic-xor
"Creates an xor-expression where `a` is known to be dynamic and `b` could be
static or dynamic."
[a b]
(condp identical? b
true
(reify core/Expression
(-eval [_ context resource scope]
(let [a (core/-eval a context resource scope)]
(when (some? a)
(not a))))
(-form [_]
(list 'not (core/-form a))))
false a
nil nil
(reify core/Expression
(-eval [_ context resource scope]
(when-some [a (core/-eval a context resource scope)]
(when-some [b (core/-eval b context resource scope)]
(if a (not b) b))))
(-form [_]
(list 'xor (core/-form a) (core/-form b))))))
(defmethod core/compile* :elm.compiler.type/xor
[context {[a b] :operand}]
(let [a (core/compile* context a)]
(condp identical? a
true (core/compile* context {:type "Not" :operand b})
false (core/compile* context b)
nil nil
(dynamic-xor a (core/compile* context b)))))
| |
2766059459da194e158bde0291ba564dbe593f73e42f8be0e415cecbc8404821 | marineschimel/ilqr_vae | variational.ml | open Base
open Owl
include Variational_typ
open Covariance
open Priors
open Dynamics
open Likelihoods
open Accessor.O
(* -------------------------------------
-- iLQR primitive
------------------------------------- *)
module ILQR (U : Prior_T) (D : Dynamics_T) (L : Likelihood_T) = struct
module G = Owl_parameters.Make (Generative_P.Make (U.P) (D.P) (L.P))
let linesearch = U.requires_linesearch || D.requires_linesearch || L.requires_linesearch
(* n : dimensionality of state space; m : input dimension *)
let solve
?(conv_threshold = 1E-4)
?(n_beg = 1)
?saving_iter
~u_init
~primal'
~n
~m
~n_steps
~prms
data
=
let open Generative_P in
let module M = struct
type theta = G.p
let primal' = primal'
let cost ~theta =
let cost_lik = L.neg_logp_t ~prms:theta.likelihood in
let cost_liks =
Array.init n_steps ~f:(fun k -> cost_lik ~data_t:(L.data_slice ~k data.o))
in
let cost_u = U.neg_logp_t ~prms:theta.prior in
fun ~k ~x ~u ->
let cost_lik =
if k < n_beg then AD.F 0. else cost_liks.(k - n_beg) ~k:(k - n_beg) ~z_t:x
in
let cost_u = cost_u ~k ~x ~u in
AD.Maths.(cost_u + cost_lik)
let m = m
let n = n
let rl_u =
Option.map U.neg_jac_t ~f:(fun neg_jac_t ~theta -> neg_jac_t ~prms:theta.prior)
let rl_x =
Option.map L.neg_jac_t ~f:(fun neg_jac_t ~theta ->
let neg_jac_t = neg_jac_t ~prms:theta.likelihood in
let neg_jac_ts =
Array.init n_steps ~f:(fun k -> neg_jac_t ~data_t:(L.data_slice ~k data.o))
in
fun ~k ~x ~u:_ ->
if k < n_beg
then AD.Mat.zeros 1 n
else (
let k = k - n_beg in
neg_jac_ts.(k) ~k ~z_t:x))
let rl_xx =
Option.map L.neg_hess_t ~f:(fun neg_hess_t ~theta ->
let neg_hess_t = neg_hess_t ~prms:theta.likelihood in
let neg_hess_ts =
Array.init n_steps ~f:(fun k -> neg_hess_t ~data_t:(L.data_slice ~k data.o))
in
fun ~k ~x ~u:_ ->
if k < n_beg
then AD.Mat.zeros n n
else (
let k = k - n_beg in
neg_hess_ts.(k) ~k ~z_t:x))
let rl_uu =
Option.map U.neg_hess_t ~f:(fun neg_hess_t ~theta -> neg_hess_t ~prms:theta.prior)
let rl_ux = Some (fun ~theta:_ ~k:_ ~x:_ ~u:_ -> AD.Mat.zeros m n)
let final_cost ~theta:_ ~k:_ ~x:_ = AD.F 0.
let fl_x =
let z = AD.Mat.zeros 1 n in
Some (fun ~theta:_ ~k:_ ~x:_ -> z)
let fl_xx =
let z = AD.Mat.zeros n n in
Some (fun ~theta:_ ~k:_ ~x:_ -> z)
let dyn ~theta = D.dyn ~theta:theta.dynamics
let dyn_x = Option.map D.dyn_x ~f:(fun d ~theta -> d ~theta:theta.dynamics)
let dyn_u = Option.map D.dyn_u ~f:(fun d ~theta -> d ~theta:theta.dynamics)
let running_loss = cost
let final_loss = final_cost
end
in
let n_steps = n_steps + n_beg - 1 in
let module IP =
Dilqr.Default.Make (struct
include M
let n_steps = n_steps + 1
end)
in
let nprev = ref 1E8 in
let stop_ilqr loss ~prms =
let x0, theta = AD.Mat.zeros 1 n, prms in
let cprev = ref 1E9 in
fun _k us ->
let c = loss ~theta x0 us in
let pct_change = Float.(abs ((c -. !cprev) /. !cprev)) in
cprev := c;
(* Stdio.printf "\n loss %f || Iter %i \n%!" c _k; *)
(if Float.(pct_change < conv_threshold) then nprev := Float.(of_int _k));
Float.(pct_change < conv_threshold || Int.(_k > 10))
in
let us =
match u_init with
| None -> List.init n_steps ~f:(fun _ -> AD.Mat.zeros 1 m)
| Some us ->
List.init n_steps ~f:(fun k -> AD.pack_arr (Mat.get_slice [ [ k ] ] us))
in
u0 u1 u2 ...... uT
x1 x2 ...... xT xT+1
u0 u1 u2 ...... uT
x0 = 0 x1 x2 ...... xT xT+1
*)
let tau =
IP.ilqr
~linesearch
~stop:(stop_ilqr IP.loss ~prms)
~us
~x0:(AD.Mat.zeros 1 n)
~theta:prms
()
in
let tau = AD.Maths.reshape tau [| n_steps + 1; -1 |] in
let _ =
match saving_iter with
| None -> ()
| Some file ->
Mat.save_txt ~out:file ~append:true (Mat.of_array [| !nprev |] 1 (-1))
in
AD.Maths.get_slice [ [ 0; -2 ]; [ n; -1 ] ] tau
end
(* -------------------------------------
-- VAE
------------------------------------- *)
module VAE
(U : Prior_T)
(D : Dynamics_T)
(L : Likelihood_T) (X : sig
val n : int (* state dimension *)
val m : int (* input dimension *)
val n_steps : int
val n_beg : int Option.t
val diag_time_cov : bool
end) =
struct
open X
module G = Owl_parameters.Make (Generative_P.Make (U.P) (D.P) (L.P))
module R = Owl_parameters.Make (Recognition_P.Make (U.P) (D.P) (L.P))
module P = Owl_parameters.Make (VAE_P.Make (U.P) (D.P) (L.P))
module Integrate = Dynamics.Integrate (D)
module Ilqr = ILQR (U) (D) (L)
open VAE_P
let n_beg = Option.value_map n_beg ~default:1 ~f:(fun i -> i)
let rec_gen prms =
match prms.recognition.generative with
| Some x -> x
| None -> prms.generative
let init ?(tie = false) ?(sigma = 1.) gen (set : Owl_parameters.setter) =
let recognition =
Recognition_P.
{ generative = (if tie then None else Some gen)
; space_cov = Covariance.init ~pin_diag:true ~sigma2:1. set m
; time_cov =
Covariance.init
~no_triangle:diag_time_cov
~pin_diag:false
~sigma2:Float.(square sigma)
set
(n_steps + n_beg - 1)
}
in
{ generative = gen; recognition }
let sample_generative ~prms =
let open Generative_P in
let u = U.sample ~prms:prms.prior ~n_steps ~m in
let z = Integrate.integrate ~prms:prms.dynamics ~n ~u:(AD.expand0 u) |> AD.squeeze0 in
let o = L.sample ~prms:prms.likelihood ~z in
{ u = Some u; z = Some z; o }
NON - DIFFERENTIABLE
let sample_generative_autonomous ~sigma ~prms =
let open Generative_P in
let u =
let u0 = Mat.gaussian ~sigma 1 m in
let u_rest = Mat.zeros (n_steps - 1) m in
AD.pack_arr Mat.(u0 @= u_rest)
in
let z = Integrate.integrate ~prms:prms.dynamics ~n ~u:(AD.expand0 u) |> AD.squeeze0 in
let o = L.sample ~prms:prms.likelihood ~z in
{ u = Some u; z = Some z; o }
let logp ~prms data =
let prms = prms.generative in
L.logp ~prms:prms.likelihood ~z:(Option.value_exn data.z) ~data:data.o
let primal' = G.map ~f:(Owl_parameters.map AD.primal')
let posterior_mean ?saving_iter ?conv_threshold ~u_init ~prms data =
Ilqr.solve
?saving_iter
?conv_threshold
~n_beg
~u_init
~primal'
~n
~m
~n_steps
~prms:(rec_gen prms)
data
let sample_recognition ~prms =
let prms = prms.recognition in
let chol_space = Covariance.to_chol_factor prms.space_cov in
let chol_time_t = Covariance.to_chol_factor prms.time_cov in
fun ~mu_u n_samples ->
let mu_u = AD.Maths.reshape mu_u [| 1; n_steps + n_beg - 1; m |] in
let xi = AD.Mat.(gaussian Int.(n_samples * (n_steps + n_beg - 1)) m) in
let z =
AD.Maths.(xi *@ chol_space)
|> fun v ->
AD.Maths.reshape v [| n_samples; n_steps + n_beg - 1; m |]
|> fun v ->
AD.Maths.transpose ~axis:[| 1; 0; 2 |] v
|> fun v ->
AD.Maths.reshape v [| n_steps + n_beg - 1; -1 |]
|> fun v ->
AD.Maths.(transpose chol_time_t *@ v)
|> fun v ->
AD.Maths.reshape v [| n_steps + n_beg - 1; n_samples; m |]
|> fun v -> AD.Maths.transpose ~axis:[| 1; 0; 2 |] v
in
AD.Maths.(mu_u + z)
let predictions ?(pre = true) ~n_samples ~prms mu_u =
let u = sample_recognition ~prms ~mu_u n_samples in
let z = Integrate.integrate ~prms:prms.generative.dynamics ~n ~u in
let z = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] z in
let u = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] u in
let o =
Array.init n_samples ~f:(fun i ->
let z = AD.Maths.(reshape (get_slice [ [ i ] ] z) [| n_steps; n |]) in
let o =
(if pre then L.pre_sample else L.sample) ~prms:prms.generative.likelihood ~z
in
o
|> L.to_mat_list
|> Array.of_list
|> Array.map ~f:(fun (label, v) ->
label, AD.Maths.reshape v [| 1; AD.Mat.row_num v; AD.Mat.col_num v |]))
in
for backward compatibility with Marine 's previous convention , I need to transpose
let tr = AD.Maths.transpose ~axis:[| 1; 2; 0 |] in
let o =
let n_o = Array.length o.(0) in
Array.init n_o ~f:(fun i ->
let label, _ = o.(0).(i) in
( label
, o |> Array.map ~f:(fun a -> snd a.(i)) |> AD.Maths.concatenate ~axis:0 |> tr ))
in
tr u, tr z, o
let lik_term ~prms =
let logp = logp ~prms in
let dyn = Integrate.integrate ~prms:prms.generative.dynamics in
fun samples data ->
let n_samples = (AD.shape samples).(0) in
let z = dyn ~n ~u:samples in
let z = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] z in
let data = { data with z = Some z } in
AD.Maths.(logp data / F Float.(of_int n_samples))
let kl_term ~prms =
match U.kl_to_gaussian with
| `sampling_based ->
let logp = U.logp ~prms:prms.generative.prior ~n_steps in
let logq =
let c_space = Covariance.to_chol_factor prms.recognition.space_cov in
let c_time = Covariance.to_chol_factor prms.recognition.time_cov in
let m_ = AD.Mat.row_num c_space in
let m = Float.of_int m_ in
let t = Float.of_int (AD.Mat.row_num c_time) in
let cst = Float.(m * t * log Const.pi2) in
let log_det_term =
let d_space = Owl_parameters.extract prms.recognition.space_cov.d in
let d_time = Owl_parameters.extract prms.recognition.time_cov.d in
AD.Maths.(F 2. * ((F m * sum' (log d_time)) + (F t * sum' (log d_space))))
in
fun mu_u u ->
let u_s = AD.shape u in
assert (Array.length u_s = 3);
let n_samples = u_s.(0) in
let du = AD.Maths.(u - AD.expand0 mu_u) in
quadratic term :
assuming is stacking columns , du = vec(dU ) and = is T x N
du^t ( ( S^t S)⊗(T^t T))^{-1 } du
= du^t ( ( S^{-1 } S^{-t})⊗(T^{-1 } T^{-t } ) ) du
= du^t ( S^{-1}⊗T^{-1 } ) ( } ) du
= || ( } ) du ||^2
= || vec(T^{-t } dU S^{-1 } ) ||^2
assuming vec is stacking columns, du = vec(dU) and dU = is T x N
du^t ((S^t S)⊗(T^t T))^{-1} du
= du^t ((S^{-1} S^{-t})⊗(T^{-1} T^{-t})) du
= du^t (S^{-1}⊗T^{-1}) (S^{-t}⊗T^{-t}) du
= || (S^{-t}⊗T^{-t}) du ||^2
= || vec(T^{-t} dU S^{-1}) ||^2 *)
let quadratic_term =
(* K x T x N *)
du
|> AD.Maths.transpose ~axis:[| 1; 0; 2 |]
|> (fun v -> AD.Maths.reshape v [| n_steps + n_beg - 1; -1 |])
|> AD.Linalg.linsolve ~typ:`u ~trans:true c_time
|> (fun v -> AD.Maths.reshape v [| -1; m_ |])
|> AD.Maths.transpose
|> AD.Linalg.linsolve ~typ:`u ~trans:true c_space
|> AD.Maths.l2norm_sqr'
in
AD.Maths.(
F (-0.5)
* ((F Float.(of_int n_samples) * (F cst + log_det_term)) + quadratic_term))
in
fun mu_u u ->
let u_s = AD.shape u in
assert (Array.length u_s = 3);
let n_samples = u_s.(0) in
(* compute log q(u) - log p(u) *)
let logqu = logq mu_u u in
let logpu = logp u in
AD.Maths.((logqu - logpu) / F Float.(of_int n_samples))
| `direct f ->
fun mu_u _ ->
f
~prms:prms.generative.prior
~mu:mu_u
~space:prms.recognition.space_cov
~time:prms.recognition.time_cov
let elbo ?conv_threshold ~u_init ~n_samples ?(beta = 1.) ~prms =
let lik_term = lik_term ~prms in
let kl_term = kl_term ~prms in
let sample_recognition = sample_recognition ~prms in
fun data ->
let mu_u =
match u_init with
| `known mu_u -> mu_u
| `guess u_init -> posterior_mean ?conv_threshold ~u_init ~prms data
in
let samples = sample_recognition ~mu_u n_samples in
let lik_term = lik_term samples data in
let kl_term = kl_term mu_u samples in
let elbo = AD.Maths.(lik_term - (F beta * kl_term)) in
elbo, AD.(unpack_arr (primal' mu_u))
let elbo_all ~u_init ~n_samples ?beta ~prms data =
Array.foldi data ~init:(AD.F 0.) ~f:(fun i accu data ->
let elbo, _ = elbo ~u_init:u_init.(i) ~n_samples ?beta ~prms data in
AD.Maths.(accu + elbo))
type u_init =
[ `known of AD.t option
| `guess of Mat.mat option
]
let train
?(n_samples = fun _ -> 1)
?(mini_batch : int Option.t)
?max_iter
?conv_threshold
?(mu_u : u_init Array.t Option.t)
?(recycle_u = true)
?save_progress_to
?in_each_iteration
?eta
?reg
~init_prms
data
=
let n_samples_ = ref (n_samples 1) in
let n_trials = Array.length data in
(* make sure all workers have the same data *)
let data = C.broadcast data in
(* make sure all workers have different random seeds *)
C.self_init_rng ();
let module Packer = Owl_parameters.Packer () in
let handle = P.pack (module Packer) init_prms in
let theta, lbound, ubound = Packer.finalize () in
let theta = AD.unpack_arr theta in
let us_init =
match mu_u with
| Some z -> z
| None -> Array.create ~len:n_trials (`guess None)
in
let adam_loss theta gradient =
Stdlib.Gc.full_major ();
let theta = C.broadcast theta in
let data_batch =
match mini_batch with
| None -> data
| Some size ->
let ids =
C.broadcast' (fun () ->
let ids = Array.mapi data ~f:(fun i _ -> i) in
Array.permute ids;
Array.sub ids ~pos:0 ~len:size)
in
Array.map ids ~f:(Array.get data)
in
let count, loss, g =
Array.foldi
data_batch
~init:(0, 0., Arr.(zeros (shape theta)))
~f:(fun i (accu_count, accu_loss, accu_g) datai ->
if Int.(i % C.n_nodes = C.rank)
then (
try
let open AD in
let theta = make_reverse (Arr (Owl.Mat.copy theta)) (AD.tag ()) in
let prms = P.unpack handle theta in
let u_init =
match us_init.(i) with
| `guess z -> `guess z
| `known z -> `known (Option.value_exn z)
in
let elbo, mu_u =
elbo ?conv_threshold ~u_init ~n_samples:!n_samples_ ~prms datai
in
if recycle_u
then (
match u_init with
| `guess _ -> us_init.(i) <- `guess (Some mu_u)
| `known _ -> ());
let loss = AD.Maths.(neg elbo) in
(* normalize by the problem size *)
let loss =
AD.Maths.(
loss
/ F
Float.(
of_int
Int.(n_steps * L.size ~prms:init_prms.generative.likelihood)))
in
(* optionally add regularizer *)
let loss =
match reg with
| None -> loss
| Some r -> AD.Maths.(loss + r ~prms)
in
reverse_prop (F 1.) loss;
( accu_count + 1
, accu_loss +. unpack_flt loss
, Owl.Mat.(accu_g + unpack_arr (adjval theta)) )
with
| _ ->
Stdio.printf "Trial %i failed with some exception." i;
accu_count, accu_loss, accu_g)
else accu_count, accu_loss, accu_g)
in
let total_count = Mpi.reduce_int count Mpi.Int_sum 0 Mpi.comm_world in
let loss = Mpi.reduce_float loss Mpi.Float_sum 0 Mpi.comm_world in
Mpi.reduce_bigarray g gradient Mpi.Sum 0 Mpi.comm_world;
Mat.div_scalar_ gradient Float.(of_int total_count);
Float.(loss / of_int total_count)
in
let stop iter current_loss =
n_samples_ := n_samples iter;
Option.iter in_each_iteration ~f:(fun do_this ->
let prms = P.unpack handle (AD.pack_arr theta) in
let u_init =
Array.map us_init ~f:(function
| `known _ -> None
| `guess z -> z)
in
do_this ~u_init ~prms iter);
C.root_perform (fun () ->
Stdio.printf "\r[%05i]%!" iter;
Option.iter save_progress_to ~f:(fun (loss_every, prms_every, prefix) ->
let kk = Int.((iter - 1) / loss_every) in
if Int.((iter - 1) % prms_every) = 0
then (
let prefix = Printf.sprintf "%s_%i" prefix kk in
let prms = P.unpack handle (AD.pack_arr theta) in
Misc.save_bin ~out:(prefix ^ ".params.bin") prms;
P.save_to_files ~prefix ~prms);
if Int.((iter - 1) % loss_every) = 0
then (
Stdio.printf "\r[%05i] %.4f%!" iter current_loss;
let z = [| [| Float.of_int kk; current_loss |] |] in
Mat.(save_txt ~append:true (of_arrays z) ~out:(prefix ^ ".loss")))));
match max_iter with
| Some mi -> iter > mi
| None -> false
in
let _ = Adam.min ?eta ?lb:lbound ?ub:ubound ~stop adam_loss theta in
theta |> AD.pack_arr |> P.unpack handle
let recalibrate_uncertainty
?n_samples
?max_iter
?save_progress_to
?in_each_iteration
?eta
~prms
data
=
let n_trials = Array.length data in
assert (Int.(n_trials % C.n_nodes = 0));
(* make sure all workers have the same data *)
let data = C.broadcast data in
(* make sure all workers have different random seeds *)
C.self_init_rng ();
(* get posterior means once and for all *)
let mu_u =
Array.mapi data ~f:(fun i data_i ->
if Int.(i % C.n_nodes = C.rank)
then `known (Some (posterior_mean ~u_init:None ~prms data_i))
else `known None)
in
(* freeze all parameters except for the posterior uncertainty *)
let init_prms =
P.map ~f:Owl_parameters.pin prms
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.space_cov) ~f:(fun _ ->
prms.recognition.space_cov)
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.time_cov) ~f:(fun _ ->
prms.recognition.time_cov)
in
let recalibrated_prms =
train
?n_samples
?max_iter
~mu_u
?save_progress_to
?in_each_iteration
?eta
~init_prms
data
in
(* pop the uncertainty back in the original prms set *)
prms
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.space_cov) ~f:(fun _ ->
recalibrated_prms.recognition.space_cov)
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.time_cov) ~f:(fun _ ->
recalibrated_prms.recognition.time_cov)
let check_grad ~prms data n_points file =
let seed = Random.int 31415 in
let u_init = Array.map data ~f:(fun _ -> `guess None) in
let elbo_all ~prms =
let _ = Owl_stats_prng.init seed in
elbo_all ~u_init ~n_samples:2 ~beta:1. ~prms
in
let module Packer = Owl_parameters.Packer () in
let handle = P.pack (module Packer) prms in
let theta, _, _ = Packer.finalize () in
let theta = AD.unpack_arr theta in
let loss, true_g =
let theta = AD.make_reverse (Arr (Mat.copy theta)) (AD.tag ()) in
let prms = P.unpack handle theta in
let loss = elbo_all ~prms data in
AD.reverse_prop (F 1.) loss;
AD.unpack_flt loss, AD.(unpack_arr (adjval theta))
in
let dim = Mat.numel theta in
let n_points =
match n_points with
| `all -> dim
| `random k -> k
in
Array.init dim ~f:(fun i -> i)
|> Stats.shuffle
|> Array.sub ~pos:0 ~len:n_points
|> Array.mapi ~f:(fun k id ->
Stdio.printf "\rcheck grad: %05i / %05i (out of %i)%!" (k + 1) n_points dim;
let true_g = Mat.get true_g 0 id in
let est_g =
let delta = 1E-6 in
let theta' = Mat.copy theta in
Mat.set theta' 0 id (Mat.get theta 0 id +. delta);
let loss' =
elbo_all ~prms:(P.unpack handle (Arr theta')) data |> AD.unpack_flt
in
Float.((loss' - loss) / delta)
in
[| true_g; est_g |])
|> Mat.of_arrays
|> Mat.save_txt ~out:file
|> fun _ -> Stdio.print_endline ""
let save_data ?prefix data =
Option.iter data.u ~f:(fun u ->
Mat.save_txt ~out:(Owl_parameters.with_prefix ?prefix "u") (AD.unpack_arr u));
Option.iter data.z ~f:(fun z ->
Mat.save_txt ~out:(Owl_parameters.with_prefix ?prefix "z") (AD.unpack_arr z));
L.save_data ~prefix:(Owl_parameters.with_prefix ?prefix "o") data.o
end
| null | https://raw.githubusercontent.com/marineschimel/ilqr_vae/8491790c037ce004e095a5e3a7614c01c6663fc8/lib/variational.ml | ocaml | -------------------------------------
-- iLQR primitive
-------------------------------------
n : dimensionality of state space; m : input dimension
Stdio.printf "\n loss %f || Iter %i \n%!" c _k;
-------------------------------------
-- VAE
-------------------------------------
state dimension
input dimension
K x T x N
compute log q(u) - log p(u)
make sure all workers have the same data
make sure all workers have different random seeds
normalize by the problem size
optionally add regularizer
make sure all workers have the same data
make sure all workers have different random seeds
get posterior means once and for all
freeze all parameters except for the posterior uncertainty
pop the uncertainty back in the original prms set | open Base
open Owl
include Variational_typ
open Covariance
open Priors
open Dynamics
open Likelihoods
open Accessor.O
module ILQR (U : Prior_T) (D : Dynamics_T) (L : Likelihood_T) = struct
module G = Owl_parameters.Make (Generative_P.Make (U.P) (D.P) (L.P))
let linesearch = U.requires_linesearch || D.requires_linesearch || L.requires_linesearch
let solve
?(conv_threshold = 1E-4)
?(n_beg = 1)
?saving_iter
~u_init
~primal'
~n
~m
~n_steps
~prms
data
=
let open Generative_P in
let module M = struct
type theta = G.p
let primal' = primal'
let cost ~theta =
let cost_lik = L.neg_logp_t ~prms:theta.likelihood in
let cost_liks =
Array.init n_steps ~f:(fun k -> cost_lik ~data_t:(L.data_slice ~k data.o))
in
let cost_u = U.neg_logp_t ~prms:theta.prior in
fun ~k ~x ~u ->
let cost_lik =
if k < n_beg then AD.F 0. else cost_liks.(k - n_beg) ~k:(k - n_beg) ~z_t:x
in
let cost_u = cost_u ~k ~x ~u in
AD.Maths.(cost_u + cost_lik)
let m = m
let n = n
let rl_u =
Option.map U.neg_jac_t ~f:(fun neg_jac_t ~theta -> neg_jac_t ~prms:theta.prior)
let rl_x =
Option.map L.neg_jac_t ~f:(fun neg_jac_t ~theta ->
let neg_jac_t = neg_jac_t ~prms:theta.likelihood in
let neg_jac_ts =
Array.init n_steps ~f:(fun k -> neg_jac_t ~data_t:(L.data_slice ~k data.o))
in
fun ~k ~x ~u:_ ->
if k < n_beg
then AD.Mat.zeros 1 n
else (
let k = k - n_beg in
neg_jac_ts.(k) ~k ~z_t:x))
let rl_xx =
Option.map L.neg_hess_t ~f:(fun neg_hess_t ~theta ->
let neg_hess_t = neg_hess_t ~prms:theta.likelihood in
let neg_hess_ts =
Array.init n_steps ~f:(fun k -> neg_hess_t ~data_t:(L.data_slice ~k data.o))
in
fun ~k ~x ~u:_ ->
if k < n_beg
then AD.Mat.zeros n n
else (
let k = k - n_beg in
neg_hess_ts.(k) ~k ~z_t:x))
let rl_uu =
Option.map U.neg_hess_t ~f:(fun neg_hess_t ~theta -> neg_hess_t ~prms:theta.prior)
let rl_ux = Some (fun ~theta:_ ~k:_ ~x:_ ~u:_ -> AD.Mat.zeros m n)
let final_cost ~theta:_ ~k:_ ~x:_ = AD.F 0.
let fl_x =
let z = AD.Mat.zeros 1 n in
Some (fun ~theta:_ ~k:_ ~x:_ -> z)
let fl_xx =
let z = AD.Mat.zeros n n in
Some (fun ~theta:_ ~k:_ ~x:_ -> z)
let dyn ~theta = D.dyn ~theta:theta.dynamics
let dyn_x = Option.map D.dyn_x ~f:(fun d ~theta -> d ~theta:theta.dynamics)
let dyn_u = Option.map D.dyn_u ~f:(fun d ~theta -> d ~theta:theta.dynamics)
let running_loss = cost
let final_loss = final_cost
end
in
let n_steps = n_steps + n_beg - 1 in
let module IP =
Dilqr.Default.Make (struct
include M
let n_steps = n_steps + 1
end)
in
let nprev = ref 1E8 in
let stop_ilqr loss ~prms =
let x0, theta = AD.Mat.zeros 1 n, prms in
let cprev = ref 1E9 in
fun _k us ->
let c = loss ~theta x0 us in
let pct_change = Float.(abs ((c -. !cprev) /. !cprev)) in
cprev := c;
(if Float.(pct_change < conv_threshold) then nprev := Float.(of_int _k));
Float.(pct_change < conv_threshold || Int.(_k > 10))
in
let us =
match u_init with
| None -> List.init n_steps ~f:(fun _ -> AD.Mat.zeros 1 m)
| Some us ->
List.init n_steps ~f:(fun k -> AD.pack_arr (Mat.get_slice [ [ k ] ] us))
in
u0 u1 u2 ...... uT
x1 x2 ...... xT xT+1
u0 u1 u2 ...... uT
x0 = 0 x1 x2 ...... xT xT+1
*)
let tau =
IP.ilqr
~linesearch
~stop:(stop_ilqr IP.loss ~prms)
~us
~x0:(AD.Mat.zeros 1 n)
~theta:prms
()
in
let tau = AD.Maths.reshape tau [| n_steps + 1; -1 |] in
let _ =
match saving_iter with
| None -> ()
| Some file ->
Mat.save_txt ~out:file ~append:true (Mat.of_array [| !nprev |] 1 (-1))
in
AD.Maths.get_slice [ [ 0; -2 ]; [ n; -1 ] ] tau
end
module VAE
(U : Prior_T)
(D : Dynamics_T)
(L : Likelihood_T) (X : sig
val n_steps : int
val n_beg : int Option.t
val diag_time_cov : bool
end) =
struct
open X
module G = Owl_parameters.Make (Generative_P.Make (U.P) (D.P) (L.P))
module R = Owl_parameters.Make (Recognition_P.Make (U.P) (D.P) (L.P))
module P = Owl_parameters.Make (VAE_P.Make (U.P) (D.P) (L.P))
module Integrate = Dynamics.Integrate (D)
module Ilqr = ILQR (U) (D) (L)
open VAE_P
let n_beg = Option.value_map n_beg ~default:1 ~f:(fun i -> i)
let rec_gen prms =
match prms.recognition.generative with
| Some x -> x
| None -> prms.generative
let init ?(tie = false) ?(sigma = 1.) gen (set : Owl_parameters.setter) =
let recognition =
Recognition_P.
{ generative = (if tie then None else Some gen)
; space_cov = Covariance.init ~pin_diag:true ~sigma2:1. set m
; time_cov =
Covariance.init
~no_triangle:diag_time_cov
~pin_diag:false
~sigma2:Float.(square sigma)
set
(n_steps + n_beg - 1)
}
in
{ generative = gen; recognition }
let sample_generative ~prms =
let open Generative_P in
let u = U.sample ~prms:prms.prior ~n_steps ~m in
let z = Integrate.integrate ~prms:prms.dynamics ~n ~u:(AD.expand0 u) |> AD.squeeze0 in
let o = L.sample ~prms:prms.likelihood ~z in
{ u = Some u; z = Some z; o }
NON - DIFFERENTIABLE
let sample_generative_autonomous ~sigma ~prms =
let open Generative_P in
let u =
let u0 = Mat.gaussian ~sigma 1 m in
let u_rest = Mat.zeros (n_steps - 1) m in
AD.pack_arr Mat.(u0 @= u_rest)
in
let z = Integrate.integrate ~prms:prms.dynamics ~n ~u:(AD.expand0 u) |> AD.squeeze0 in
let o = L.sample ~prms:prms.likelihood ~z in
{ u = Some u; z = Some z; o }
let logp ~prms data =
let prms = prms.generative in
L.logp ~prms:prms.likelihood ~z:(Option.value_exn data.z) ~data:data.o
let primal' = G.map ~f:(Owl_parameters.map AD.primal')
let posterior_mean ?saving_iter ?conv_threshold ~u_init ~prms data =
Ilqr.solve
?saving_iter
?conv_threshold
~n_beg
~u_init
~primal'
~n
~m
~n_steps
~prms:(rec_gen prms)
data
let sample_recognition ~prms =
let prms = prms.recognition in
let chol_space = Covariance.to_chol_factor prms.space_cov in
let chol_time_t = Covariance.to_chol_factor prms.time_cov in
fun ~mu_u n_samples ->
let mu_u = AD.Maths.reshape mu_u [| 1; n_steps + n_beg - 1; m |] in
let xi = AD.Mat.(gaussian Int.(n_samples * (n_steps + n_beg - 1)) m) in
let z =
AD.Maths.(xi *@ chol_space)
|> fun v ->
AD.Maths.reshape v [| n_samples; n_steps + n_beg - 1; m |]
|> fun v ->
AD.Maths.transpose ~axis:[| 1; 0; 2 |] v
|> fun v ->
AD.Maths.reshape v [| n_steps + n_beg - 1; -1 |]
|> fun v ->
AD.Maths.(transpose chol_time_t *@ v)
|> fun v ->
AD.Maths.reshape v [| n_steps + n_beg - 1; n_samples; m |]
|> fun v -> AD.Maths.transpose ~axis:[| 1; 0; 2 |] v
in
AD.Maths.(mu_u + z)
let predictions ?(pre = true) ~n_samples ~prms mu_u =
let u = sample_recognition ~prms ~mu_u n_samples in
let z = Integrate.integrate ~prms:prms.generative.dynamics ~n ~u in
let z = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] z in
let u = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] u in
let o =
Array.init n_samples ~f:(fun i ->
let z = AD.Maths.(reshape (get_slice [ [ i ] ] z) [| n_steps; n |]) in
let o =
(if pre then L.pre_sample else L.sample) ~prms:prms.generative.likelihood ~z
in
o
|> L.to_mat_list
|> Array.of_list
|> Array.map ~f:(fun (label, v) ->
label, AD.Maths.reshape v [| 1; AD.Mat.row_num v; AD.Mat.col_num v |]))
in
for backward compatibility with Marine 's previous convention , I need to transpose
let tr = AD.Maths.transpose ~axis:[| 1; 2; 0 |] in
let o =
let n_o = Array.length o.(0) in
Array.init n_o ~f:(fun i ->
let label, _ = o.(0).(i) in
( label
, o |> Array.map ~f:(fun a -> snd a.(i)) |> AD.Maths.concatenate ~axis:0 |> tr ))
in
tr u, tr z, o
let lik_term ~prms =
let logp = logp ~prms in
let dyn = Integrate.integrate ~prms:prms.generative.dynamics in
fun samples data ->
let n_samples = (AD.shape samples).(0) in
let z = dyn ~n ~u:samples in
let z = AD.Maths.get_slice [ []; [ n_beg - 1; -1 ]; [] ] z in
let data = { data with z = Some z } in
AD.Maths.(logp data / F Float.(of_int n_samples))
let kl_term ~prms =
match U.kl_to_gaussian with
| `sampling_based ->
let logp = U.logp ~prms:prms.generative.prior ~n_steps in
let logq =
let c_space = Covariance.to_chol_factor prms.recognition.space_cov in
let c_time = Covariance.to_chol_factor prms.recognition.time_cov in
let m_ = AD.Mat.row_num c_space in
let m = Float.of_int m_ in
let t = Float.of_int (AD.Mat.row_num c_time) in
let cst = Float.(m * t * log Const.pi2) in
let log_det_term =
let d_space = Owl_parameters.extract prms.recognition.space_cov.d in
let d_time = Owl_parameters.extract prms.recognition.time_cov.d in
AD.Maths.(F 2. * ((F m * sum' (log d_time)) + (F t * sum' (log d_space))))
in
fun mu_u u ->
let u_s = AD.shape u in
assert (Array.length u_s = 3);
let n_samples = u_s.(0) in
let du = AD.Maths.(u - AD.expand0 mu_u) in
quadratic term :
assuming is stacking columns , du = vec(dU ) and = is T x N
du^t ( ( S^t S)⊗(T^t T))^{-1 } du
= du^t ( ( S^{-1 } S^{-t})⊗(T^{-1 } T^{-t } ) ) du
= du^t ( S^{-1}⊗T^{-1 } ) ( } ) du
= || ( } ) du ||^2
= || vec(T^{-t } dU S^{-1 } ) ||^2
assuming vec is stacking columns, du = vec(dU) and dU = is T x N
du^t ((S^t S)⊗(T^t T))^{-1} du
= du^t ((S^{-1} S^{-t})⊗(T^{-1} T^{-t})) du
= du^t (S^{-1}⊗T^{-1}) (S^{-t}⊗T^{-t}) du
= || (S^{-t}⊗T^{-t}) du ||^2
= || vec(T^{-t} dU S^{-1}) ||^2 *)
let quadratic_term =
du
|> AD.Maths.transpose ~axis:[| 1; 0; 2 |]
|> (fun v -> AD.Maths.reshape v [| n_steps + n_beg - 1; -1 |])
|> AD.Linalg.linsolve ~typ:`u ~trans:true c_time
|> (fun v -> AD.Maths.reshape v [| -1; m_ |])
|> AD.Maths.transpose
|> AD.Linalg.linsolve ~typ:`u ~trans:true c_space
|> AD.Maths.l2norm_sqr'
in
AD.Maths.(
F (-0.5)
* ((F Float.(of_int n_samples) * (F cst + log_det_term)) + quadratic_term))
in
fun mu_u u ->
let u_s = AD.shape u in
assert (Array.length u_s = 3);
let n_samples = u_s.(0) in
let logqu = logq mu_u u in
let logpu = logp u in
AD.Maths.((logqu - logpu) / F Float.(of_int n_samples))
| `direct f ->
fun mu_u _ ->
f
~prms:prms.generative.prior
~mu:mu_u
~space:prms.recognition.space_cov
~time:prms.recognition.time_cov
let elbo ?conv_threshold ~u_init ~n_samples ?(beta = 1.) ~prms =
let lik_term = lik_term ~prms in
let kl_term = kl_term ~prms in
let sample_recognition = sample_recognition ~prms in
fun data ->
let mu_u =
match u_init with
| `known mu_u -> mu_u
| `guess u_init -> posterior_mean ?conv_threshold ~u_init ~prms data
in
let samples = sample_recognition ~mu_u n_samples in
let lik_term = lik_term samples data in
let kl_term = kl_term mu_u samples in
let elbo = AD.Maths.(lik_term - (F beta * kl_term)) in
elbo, AD.(unpack_arr (primal' mu_u))
let elbo_all ~u_init ~n_samples ?beta ~prms data =
Array.foldi data ~init:(AD.F 0.) ~f:(fun i accu data ->
let elbo, _ = elbo ~u_init:u_init.(i) ~n_samples ?beta ~prms data in
AD.Maths.(accu + elbo))
type u_init =
[ `known of AD.t option
| `guess of Mat.mat option
]
let train
?(n_samples = fun _ -> 1)
?(mini_batch : int Option.t)
?max_iter
?conv_threshold
?(mu_u : u_init Array.t Option.t)
?(recycle_u = true)
?save_progress_to
?in_each_iteration
?eta
?reg
~init_prms
data
=
let n_samples_ = ref (n_samples 1) in
let n_trials = Array.length data in
let data = C.broadcast data in
C.self_init_rng ();
let module Packer = Owl_parameters.Packer () in
let handle = P.pack (module Packer) init_prms in
let theta, lbound, ubound = Packer.finalize () in
let theta = AD.unpack_arr theta in
let us_init =
match mu_u with
| Some z -> z
| None -> Array.create ~len:n_trials (`guess None)
in
let adam_loss theta gradient =
Stdlib.Gc.full_major ();
let theta = C.broadcast theta in
let data_batch =
match mini_batch with
| None -> data
| Some size ->
let ids =
C.broadcast' (fun () ->
let ids = Array.mapi data ~f:(fun i _ -> i) in
Array.permute ids;
Array.sub ids ~pos:0 ~len:size)
in
Array.map ids ~f:(Array.get data)
in
let count, loss, g =
Array.foldi
data_batch
~init:(0, 0., Arr.(zeros (shape theta)))
~f:(fun i (accu_count, accu_loss, accu_g) datai ->
if Int.(i % C.n_nodes = C.rank)
then (
try
let open AD in
let theta = make_reverse (Arr (Owl.Mat.copy theta)) (AD.tag ()) in
let prms = P.unpack handle theta in
let u_init =
match us_init.(i) with
| `guess z -> `guess z
| `known z -> `known (Option.value_exn z)
in
let elbo, mu_u =
elbo ?conv_threshold ~u_init ~n_samples:!n_samples_ ~prms datai
in
if recycle_u
then (
match u_init with
| `guess _ -> us_init.(i) <- `guess (Some mu_u)
| `known _ -> ());
let loss = AD.Maths.(neg elbo) in
let loss =
AD.Maths.(
loss
/ F
Float.(
of_int
Int.(n_steps * L.size ~prms:init_prms.generative.likelihood)))
in
let loss =
match reg with
| None -> loss
| Some r -> AD.Maths.(loss + r ~prms)
in
reverse_prop (F 1.) loss;
( accu_count + 1
, accu_loss +. unpack_flt loss
, Owl.Mat.(accu_g + unpack_arr (adjval theta)) )
with
| _ ->
Stdio.printf "Trial %i failed with some exception." i;
accu_count, accu_loss, accu_g)
else accu_count, accu_loss, accu_g)
in
let total_count = Mpi.reduce_int count Mpi.Int_sum 0 Mpi.comm_world in
let loss = Mpi.reduce_float loss Mpi.Float_sum 0 Mpi.comm_world in
Mpi.reduce_bigarray g gradient Mpi.Sum 0 Mpi.comm_world;
Mat.div_scalar_ gradient Float.(of_int total_count);
Float.(loss / of_int total_count)
in
let stop iter current_loss =
n_samples_ := n_samples iter;
Option.iter in_each_iteration ~f:(fun do_this ->
let prms = P.unpack handle (AD.pack_arr theta) in
let u_init =
Array.map us_init ~f:(function
| `known _ -> None
| `guess z -> z)
in
do_this ~u_init ~prms iter);
C.root_perform (fun () ->
Stdio.printf "\r[%05i]%!" iter;
Option.iter save_progress_to ~f:(fun (loss_every, prms_every, prefix) ->
let kk = Int.((iter - 1) / loss_every) in
if Int.((iter - 1) % prms_every) = 0
then (
let prefix = Printf.sprintf "%s_%i" prefix kk in
let prms = P.unpack handle (AD.pack_arr theta) in
Misc.save_bin ~out:(prefix ^ ".params.bin") prms;
P.save_to_files ~prefix ~prms);
if Int.((iter - 1) % loss_every) = 0
then (
Stdio.printf "\r[%05i] %.4f%!" iter current_loss;
let z = [| [| Float.of_int kk; current_loss |] |] in
Mat.(save_txt ~append:true (of_arrays z) ~out:(prefix ^ ".loss")))));
match max_iter with
| Some mi -> iter > mi
| None -> false
in
let _ = Adam.min ?eta ?lb:lbound ?ub:ubound ~stop adam_loss theta in
theta |> AD.pack_arr |> P.unpack handle
let recalibrate_uncertainty
?n_samples
?max_iter
?save_progress_to
?in_each_iteration
?eta
~prms
data
=
let n_trials = Array.length data in
assert (Int.(n_trials % C.n_nodes = 0));
let data = C.broadcast data in
C.self_init_rng ();
let mu_u =
Array.mapi data ~f:(fun i data_i ->
if Int.(i % C.n_nodes = C.rank)
then `known (Some (posterior_mean ~u_init:None ~prms data_i))
else `known None)
in
let init_prms =
P.map ~f:Owl_parameters.pin prms
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.space_cov) ~f:(fun _ ->
prms.recognition.space_cov)
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.time_cov) ~f:(fun _ ->
prms.recognition.time_cov)
in
let recalibrated_prms =
train
?n_samples
?max_iter
~mu_u
?save_progress_to
?in_each_iteration
?eta
~init_prms
data
in
prms
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.space_cov) ~f:(fun _ ->
recalibrated_prms.recognition.space_cov)
|> Accessor.map (VAE_P.A.recognition @> Recognition_P.A.time_cov) ~f:(fun _ ->
recalibrated_prms.recognition.time_cov)
let check_grad ~prms data n_points file =
let seed = Random.int 31415 in
let u_init = Array.map data ~f:(fun _ -> `guess None) in
let elbo_all ~prms =
let _ = Owl_stats_prng.init seed in
elbo_all ~u_init ~n_samples:2 ~beta:1. ~prms
in
let module Packer = Owl_parameters.Packer () in
let handle = P.pack (module Packer) prms in
let theta, _, _ = Packer.finalize () in
let theta = AD.unpack_arr theta in
let loss, true_g =
let theta = AD.make_reverse (Arr (Mat.copy theta)) (AD.tag ()) in
let prms = P.unpack handle theta in
let loss = elbo_all ~prms data in
AD.reverse_prop (F 1.) loss;
AD.unpack_flt loss, AD.(unpack_arr (adjval theta))
in
let dim = Mat.numel theta in
let n_points =
match n_points with
| `all -> dim
| `random k -> k
in
Array.init dim ~f:(fun i -> i)
|> Stats.shuffle
|> Array.sub ~pos:0 ~len:n_points
|> Array.mapi ~f:(fun k id ->
Stdio.printf "\rcheck grad: %05i / %05i (out of %i)%!" (k + 1) n_points dim;
let true_g = Mat.get true_g 0 id in
let est_g =
let delta = 1E-6 in
let theta' = Mat.copy theta in
Mat.set theta' 0 id (Mat.get theta 0 id +. delta);
let loss' =
elbo_all ~prms:(P.unpack handle (Arr theta')) data |> AD.unpack_flt
in
Float.((loss' - loss) / delta)
in
[| true_g; est_g |])
|> Mat.of_arrays
|> Mat.save_txt ~out:file
|> fun _ -> Stdio.print_endline ""
let save_data ?prefix data =
Option.iter data.u ~f:(fun u ->
Mat.save_txt ~out:(Owl_parameters.with_prefix ?prefix "u") (AD.unpack_arr u));
Option.iter data.z ~f:(fun z ->
Mat.save_txt ~out:(Owl_parameters.with_prefix ?prefix "z") (AD.unpack_arr z));
L.save_data ~prefix:(Owl_parameters.with_prefix ?prefix "o") data.o
end
|
9fb8cbe006d1bddd370c99b6d1afcec083398fbb244e2250c88817d28086123a | ananthakumaran/eopl | mutpair.clj | (ns eopl.core.mutpair
(:use eopl.core.define-datatype)
(:use eopl.core.vector-ref))
(define-datatype mutpair mutpair?
(a-pair
(left-loc reference?)
(right-loc reference?)))
(defn make-pair [left right]
(a-pair (newref left)
(newref right)))
(defn left [mp]
(cases mutpair mp
(a-pair (left-loc right-loc)
(de-ref left-loc))))
(defn right [mp]
(cases mutpair mp
(a-pair (left-loc right-loc)
(de-ref right-loc))))
(defn set-left [mp value]
(cases mutpair mp
(a-pair (left-loc right-loc)
(setref! left-loc
value))))
(defn set-right [mp value]
(cases mutpair mp
(a-pair (left-loc right-loc)
(setref! right-loc
value))))
| null | https://raw.githubusercontent.com/ananthakumaran/eopl/876d6c2e44865e2c89a05a683d99a289c71f1487/src/eopl/core/mutpair.clj | clojure | (ns eopl.core.mutpair
(:use eopl.core.define-datatype)
(:use eopl.core.vector-ref))
(define-datatype mutpair mutpair?
(a-pair
(left-loc reference?)
(right-loc reference?)))
(defn make-pair [left right]
(a-pair (newref left)
(newref right)))
(defn left [mp]
(cases mutpair mp
(a-pair (left-loc right-loc)
(de-ref left-loc))))
(defn right [mp]
(cases mutpair mp
(a-pair (left-loc right-loc)
(de-ref right-loc))))
(defn set-left [mp value]
(cases mutpair mp
(a-pair (left-loc right-loc)
(setref! left-loc
value))))
(defn set-right [mp value]
(cases mutpair mp
(a-pair (left-loc right-loc)
(setref! right-loc
value))))
| |
10a6a3cc2bbc9a207e09c9cd5fac827ac45f6cb5838cd543ed1901dce87a6a2c | Clojure2D/clojure2d-examples | aabb.clj | (ns rt4.the-next-week.ch05b.aabb
(:require [rt4.the-next-week.ch05b.interval :as interval]
[fastmath.vector :as v]
[fastmath.core :as m])
(:import [fastmath.vector Vec3]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol AABBProto
(axis [aabb n])
(hit [aabb r ray-t]))
(defmacro ^:private check-axis
[i d o]
`(let [invd# (/ ~d)
m0# (double (if (neg? invd#) (:mx ~i) (:mn ~i)))
m1# (double (if (neg? invd#) (:mn ~i) (:mx ~i)))
t0# (* (- m0# ~o) invd#)
t1# (* (- m1# ~o) invd#)
ray-tmin# (if (> t0# ~'rt-min) t0# ~'rt-min)
ray-tmax# (if (< t1# ~'rt-max) t1# ~'rt-max)]
(> ray-tmax# ray-tmin#)))
(defrecord AABB [x y z]
AABBProto
(axis [_ n] (case (int n) 0 x 1 y 2 z))
(hit [_ r ray-t]
(let [^Vec3 direction (:direction r)
^Vec3 origin (:origin r)
^double rt-min (:mn ray-t)
^double rt-max (:mx ray-t)]
(and (check-axis x (.x direction) (.x origin))
(check-axis y (.y direction) (.y origin))
(check-axis z (.z direction) (.z origin))))))
(defn aabb
([^Vec3 a ^Vec3 b]
(aabb (interval/interval (min (.x a) (.x b)) (max (.x a) (.x b)))
(interval/interval (min (.y a) (.y b)) (max (.y a) (.y b)))
(interval/interval (min (.z a) (.z b)) (max (.z a) (.z b)))))
([x y z] (->AABB x y z)))
(defn merge-boxes
[box0 box1]
(aabb (interval/merge-intervals (:x box0) (:x box1))
(interval/merge-intervals (:y box0) (:y box1))
(interval/merge-intervals (:z box0) (:z box1))))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/ead92d6f17744b91070e6308157364ad4eab8a1b/src/rt4/the_next_week/ch05b/aabb.clj | clojure | (ns rt4.the-next-week.ch05b.aabb
(:require [rt4.the-next-week.ch05b.interval :as interval]
[fastmath.vector :as v]
[fastmath.core :as m])
(:import [fastmath.vector Vec3]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol AABBProto
(axis [aabb n])
(hit [aabb r ray-t]))
(defmacro ^:private check-axis
[i d o]
`(let [invd# (/ ~d)
m0# (double (if (neg? invd#) (:mx ~i) (:mn ~i)))
m1# (double (if (neg? invd#) (:mn ~i) (:mx ~i)))
t0# (* (- m0# ~o) invd#)
t1# (* (- m1# ~o) invd#)
ray-tmin# (if (> t0# ~'rt-min) t0# ~'rt-min)
ray-tmax# (if (< t1# ~'rt-max) t1# ~'rt-max)]
(> ray-tmax# ray-tmin#)))
(defrecord AABB [x y z]
AABBProto
(axis [_ n] (case (int n) 0 x 1 y 2 z))
(hit [_ r ray-t]
(let [^Vec3 direction (:direction r)
^Vec3 origin (:origin r)
^double rt-min (:mn ray-t)
^double rt-max (:mx ray-t)]
(and (check-axis x (.x direction) (.x origin))
(check-axis y (.y direction) (.y origin))
(check-axis z (.z direction) (.z origin))))))
(defn aabb
([^Vec3 a ^Vec3 b]
(aabb (interval/interval (min (.x a) (.x b)) (max (.x a) (.x b)))
(interval/interval (min (.y a) (.y b)) (max (.y a) (.y b)))
(interval/interval (min (.z a) (.z b)) (max (.z a) (.z b)))))
([x y z] (->AABB x y z)))
(defn merge-boxes
[box0 box1]
(aabb (interval/merge-intervals (:x box0) (:x box1))
(interval/merge-intervals (:y box0) (:y box1))
(interval/merge-intervals (:z box0) (:z box1))))
| |
54aa5771cce12ed3763f03b77b047ac79ed23c4eea75892c7e90b653640ff4f2 | ocurrent/opam-repo-ci | packageOpt.ml | type t = {
pkg : OpamPackage.t;
urgent : ([`High | `Low] -> bool) option;
has_tests : bool;
}
let compare {pkg = pkg1; urgent = _; has_tests = _} {pkg = pkg2; urgent = _; has_tests = _} =
OpamPackage.compare pkg1 pkg2
let pp f {pkg; urgent = _; has_tests = _} =
Fmt.of_to_string OpamPackage.to_string f pkg
| null | https://raw.githubusercontent.com/ocurrent/opam-repo-ci/d60f72a41fff211ccc0c9b00b41a3bf849067b8f/lib/packageOpt.ml | ocaml | type t = {
pkg : OpamPackage.t;
urgent : ([`High | `Low] -> bool) option;
has_tests : bool;
}
let compare {pkg = pkg1; urgent = _; has_tests = _} {pkg = pkg2; urgent = _; has_tests = _} =
OpamPackage.compare pkg1 pkg2
let pp f {pkg; urgent = _; has_tests = _} =
Fmt.of_to_string OpamPackage.to_string f pkg
| |
1a6b6d58111b813d4d789b0e87b2b0f15939ccf42f268ab29a57f1930db79a1b | expede/rescue | Class.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE LambdaCase #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
# LANGUAGE UndecidableInstances #
| The ' MonadRescue ' class , meant for retrieving the success / failure branches
module Control.Monad.Rescue.Class (MonadRescue (..)) where
import Data.WorldPeace
import Control.Exception
import qualified Control.Monad.Catch as Catch
import Control.Monad.Cont
import Control.Monad.Raise
import Control.Monad.Trans.Except
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import qualified Control.Monad.RWS.Lazy as Lazy
import qualified Control.Monad.RWS.Strict as Strict
import qualified Control.Monad.State.Lazy as Lazy
import qualified Control.Monad.State.Strict as Strict
import qualified Control.Monad.Writer.Lazy as Lazy
import qualified Control.Monad.Writer.Strict as Strict
-- $setup
--
-- >>> :set -XDataKinds
-- >>> :set -XFlexibleContexts
-- >>> :set -XTypeApplications
-- >>> :set -XLambdaCase
--
> > > import Control . Monad . Trans . Rescue
> > > import Data . Functor . Identity
-- >>> import Data.Proxy
> > > import Data . WorldPeace as OpenUnion
--
> > > data FooErr deriving Show
> > > data BarErr deriving Show
> > > data deriving Show
-- | Pull a potential error out of the surrounding context
NOTE that the target ` m ` may not even be aware of Raise / Rescue . It 's an escape to the " normal " world
class MonadRaise m => MonadRescue m where
-- | Attempt some action, exposing the success and error branches
--
-- ==== __Examples__
--
-- >>> :{
goesBoom : : Int - > Rescue ' [ FooErr , BarErr ] Int
-- goesBoom x =
if x > 50
-- then return x
else raise
-- :}
--
> > > runRescue . attempt $ goesBoom 42
-- Right (Left (Identity FooErr))
--
-- Where @Identity fooErr@ is the selection of the 'OpenUnion'.
-- In practice you would handle the 'OpenUnion' like so:
--
-- >>> let handleErr = catchesOpenUnion (show, show)
> > > let x = attempt ( goesBoom 42 ) > > = pure . either handleErr show
-- >>> runRescue x
Right " "
--
-- Where @Identity FooErr@ is the selection of the 'OpenUnion'.
attempt :: m a -> m (Either (ErrorCase m) a)
instance MonadRescue Maybe where
attempt Nothing = Just . Left $ openUnionLift ()
attempt (Just x) = Just $ Right x
instance MonadRescue [] where
attempt [] = [Left $ include ()]
attempt xs = Right <$> xs
instance MonadRescue (Either (OpenUnion errs)) where
attempt action = Right action
instance MonadRescue IO where
attempt action =
Catch.try action >>= \case
Left (err :: IOException) -> return . Left $ include err
Right val -> return $ Right val
instance
( MonadRescue m
, () `IsMember` Errors m
, Errors m `Contains` Errors m
)
=> MonadRescue (MaybeT m) where
attempt (MaybeT action) =
MaybeT $
attempt action >>= \case
Left errs -> return . Just . Left $ include errs
Right Nothing -> return . Just . Left $ include ()
Right (Just val) -> return . Just $ Right val
instance MonadRescue m => MonadRescue (IdentityT m) where
attempt (IdentityT action) = IdentityT $ attempt action
instance
( MonadRescue m
, Contains (Errors m) errs
)
=> MonadRescue (ExceptT (OpenUnion errs) m) where
attempt (ExceptT action) =
lift $
attempt action >>= \case
Left err -> return . Left $ include err
Right errOrVal -> return errOrVal
instance MonadRescue m => MonadRescue (ReaderT cfg m) where
attempt = mapReaderT attempt
instance (Monoid w, MonadRescue m) => MonadRescue (Lazy.WriterT w m) where
attempt = Lazy.mapWriterT runner2
instance (Monoid w, MonadRescue m) => MonadRescue (Strict.WriterT w m) where
attempt = Strict.mapWriterT runner2
instance MonadRescue m => MonadRescue (Lazy.StateT s m) where
attempt = Lazy.mapStateT runner2
instance MonadRescue m => MonadRescue (Strict.StateT s m) where
attempt = Strict.mapStateT runner2
instance (Monoid w, MonadRescue m) => MonadRescue (Lazy.RWST r w s m) where
attempt = Lazy.mapRWST runner3
instance (Monoid w, MonadRescue m) => MonadRescue (Strict.RWST r w s m) where
attempt = Strict.mapRWST runner3
instance MonadRescue m => MonadRescue (ContT r m) where
attempt = withContT $ \b_mr current -> b_mr =<< attempt (pure current)
runner2
:: ( MonadRescue m
, RaisesOnly m errs
)
=> m (a, w)
-> m (Either (OpenUnion errs) a, w)
runner2 inner = do
(a, w) <- inner
errOrVal <- attempt (pure a)
return (errOrVal, w)
runner3
:: ( MonadRescue m
, RaisesOnly m errs
)
=> m (a, b, c)
-> m (Either (OpenUnion errs) a, b, c)
runner3 inner = do
(a, s, w) <- inner
errOrVal <- attempt (pure a)
return (errOrVal, s, w)
| null | https://raw.githubusercontent.com/expede/rescue/5f460a4cd9a01ac84e669a50711375c8f8dcba75/library/Control/Monad/Rescue/Class.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
$setup
>>> :set -XDataKinds
>>> :set -XFlexibleContexts
>>> :set -XTypeApplications
>>> :set -XLambdaCase
>>> import Data.Proxy
| Pull a potential error out of the surrounding context
| Attempt some action, exposing the success and error branches
==== __Examples__
>>> :{
goesBoom x =
then return x
:}
Right (Left (Identity FooErr))
Where @Identity fooErr@ is the selection of the 'OpenUnion'.
In practice you would handle the 'OpenUnion' like so:
>>> let handleErr = catchesOpenUnion (show, show)
>>> runRescue x
Where @Identity FooErr@ is the selection of the 'OpenUnion'. | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
| The ' MonadRescue ' class , meant for retrieving the success / failure branches
module Control.Monad.Rescue.Class (MonadRescue (..)) where
import Data.WorldPeace
import Control.Exception
import qualified Control.Monad.Catch as Catch
import Control.Monad.Cont
import Control.Monad.Raise
import Control.Monad.Trans.Except
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import qualified Control.Monad.RWS.Lazy as Lazy
import qualified Control.Monad.RWS.Strict as Strict
import qualified Control.Monad.State.Lazy as Lazy
import qualified Control.Monad.State.Strict as Strict
import qualified Control.Monad.Writer.Lazy as Lazy
import qualified Control.Monad.Writer.Strict as Strict
> > > import Control . Monad . Trans . Rescue
> > > import Data . Functor . Identity
> > > import Data . WorldPeace as OpenUnion
> > > data FooErr deriving Show
> > > data BarErr deriving Show
> > > data deriving Show
NOTE that the target ` m ` may not even be aware of Raise / Rescue . It 's an escape to the " normal " world
class MonadRaise m => MonadRescue m where
goesBoom : : Int - > Rescue ' [ FooErr , BarErr ] Int
if x > 50
else raise
> > > runRescue . attempt $ goesBoom 42
> > > let x = attempt ( goesBoom 42 ) > > = pure . either handleErr show
Right " "
attempt :: m a -> m (Either (ErrorCase m) a)
instance MonadRescue Maybe where
attempt Nothing = Just . Left $ openUnionLift ()
attempt (Just x) = Just $ Right x
instance MonadRescue [] where
attempt [] = [Left $ include ()]
attempt xs = Right <$> xs
instance MonadRescue (Either (OpenUnion errs)) where
attempt action = Right action
instance MonadRescue IO where
attempt action =
Catch.try action >>= \case
Left (err :: IOException) -> return . Left $ include err
Right val -> return $ Right val
instance
( MonadRescue m
, () `IsMember` Errors m
, Errors m `Contains` Errors m
)
=> MonadRescue (MaybeT m) where
attempt (MaybeT action) =
MaybeT $
attempt action >>= \case
Left errs -> return . Just . Left $ include errs
Right Nothing -> return . Just . Left $ include ()
Right (Just val) -> return . Just $ Right val
instance MonadRescue m => MonadRescue (IdentityT m) where
attempt (IdentityT action) = IdentityT $ attempt action
instance
( MonadRescue m
, Contains (Errors m) errs
)
=> MonadRescue (ExceptT (OpenUnion errs) m) where
attempt (ExceptT action) =
lift $
attempt action >>= \case
Left err -> return . Left $ include err
Right errOrVal -> return errOrVal
instance MonadRescue m => MonadRescue (ReaderT cfg m) where
attempt = mapReaderT attempt
instance (Monoid w, MonadRescue m) => MonadRescue (Lazy.WriterT w m) where
attempt = Lazy.mapWriterT runner2
instance (Monoid w, MonadRescue m) => MonadRescue (Strict.WriterT w m) where
attempt = Strict.mapWriterT runner2
instance MonadRescue m => MonadRescue (Lazy.StateT s m) where
attempt = Lazy.mapStateT runner2
instance MonadRescue m => MonadRescue (Strict.StateT s m) where
attempt = Strict.mapStateT runner2
instance (Monoid w, MonadRescue m) => MonadRescue (Lazy.RWST r w s m) where
attempt = Lazy.mapRWST runner3
instance (Monoid w, MonadRescue m) => MonadRescue (Strict.RWST r w s m) where
attempt = Strict.mapRWST runner3
instance MonadRescue m => MonadRescue (ContT r m) where
attempt = withContT $ \b_mr current -> b_mr =<< attempt (pure current)
runner2
:: ( MonadRescue m
, RaisesOnly m errs
)
=> m (a, w)
-> m (Either (OpenUnion errs) a, w)
runner2 inner = do
(a, w) <- inner
errOrVal <- attempt (pure a)
return (errOrVal, w)
runner3
:: ( MonadRescue m
, RaisesOnly m errs
)
=> m (a, b, c)
-> m (Either (OpenUnion errs) a, b, c)
runner3 inner = do
(a, s, w) <- inner
errOrVal <- attempt (pure a)
return (errOrVal, s, w)
|
1745955464d0139bc5813a6f1a0865cc1d5648f8f2afed157308ed89554564e8 | basho/riak_cs | riak_cs_delete_fsm_sup.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% @doc Supervisor for `riak_cs_delete_fsm'
-module(riak_cs_delete_fsm_sup).
-behaviour(supervisor).
%% API
-export([start_delete_fsm/2]).
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% ===================================================================
%% API functions
%% ===================================================================
%% @doc API for starting the supervisor.
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% @doc Start a `riak_cs_delete_fsm' child process.
-spec start_delete_fsm(node(), list()) ->
supervisor:startchild_ret().
start_delete_fsm(Node, Args) ->
supervisor:start_child({?MODULE, Node}, Args).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
%% @doc Initialize this supervisor. This is a `simple_one_for_one',
%% whose child spec is for starting `riak_cs_delete_fsm' processes.
-spec init([]) -> {ok, {{supervisor:strategy(),
pos_integer(),
pos_integer()},
[supervisor:child_spec()]}}.
init([]) ->
RestartStrategy = simple_one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = temporary,
Shutdown = 2000,
Type = worker,
DeleteFsmSpec = {undefined,
{riak_cs_delete_fsm, start_link, []},
Restart, Shutdown, Type, [riak_cs_delete_fsm]},
{ok, {SupFlags, [DeleteFsmSpec]}}.
| null | https://raw.githubusercontent.com/basho/riak_cs/c0c1012d1c9c691c74c8c5d9f69d388f5047bcd2/src/riak_cs_delete_fsm_sup.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
@doc Supervisor for `riak_cs_delete_fsm'
API
Supervisor callbacks
===================================================================
API functions
===================================================================
@doc API for starting the supervisor.
@doc Start a `riak_cs_delete_fsm' child process.
===================================================================
Supervisor callbacks
===================================================================
@doc Initialize this supervisor. This is a `simple_one_for_one',
whose child spec is for starting `riak_cs_delete_fsm' processes. | Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_cs_delete_fsm_sup).
-behaviour(supervisor).
-export([start_delete_fsm/2]).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec start_delete_fsm(node(), list()) ->
supervisor:startchild_ret().
start_delete_fsm(Node, Args) ->
supervisor:start_child({?MODULE, Node}, Args).
-spec init([]) -> {ok, {{supervisor:strategy(),
pos_integer(),
pos_integer()},
[supervisor:child_spec()]}}.
init([]) ->
RestartStrategy = simple_one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = temporary,
Shutdown = 2000,
Type = worker,
DeleteFsmSpec = {undefined,
{riak_cs_delete_fsm, start_link, []},
Restart, Shutdown, Type, [riak_cs_delete_fsm]},
{ok, {SupFlags, [DeleteFsmSpec]}}.
|
4939db219143ef4c032b457108b106976c8c37ed7b04f3118c6c5a51e5e4e938 | xapi-project/xen-api | xcp_service.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
module StringSet = Set.Make (String)
(* Server configuration. We have built-in (hopefully) sensible defaults,
together with command-line arguments and a configuration file. They are
applied in order: (latest takes precedence) defaults < arguments < config
file *)
let default_service_name = Filename.basename Sys.argv.(0)
let config_file = ref (Printf.sprintf "/etc/%s.conf" default_service_name)
let config_dir = ref (Printf.sprintf "/etc/%s.conf.d" default_service_name)
let pidfile = ref (Printf.sprintf "/var/run/%s.pid" default_service_name)
let extra_search_path = ref []
let log_destination = ref "syslog:daemon"
let log_level = ref Syslog.Debug
let daemon = ref false
let have_daemonized () = Unix.getppid () = 1
let common_prefix = "org.xen.xapi."
let finally f g =
try
let result = f () in
g () ; result
with e -> g () ; raise e
type opt = string * Arg.spec * (unit -> string) * string
module D = Debug.Make (struct let name = default_service_name end)
open D
module Config_file = struct
open Arg
let apply v = function
| Unit f ->
f ()
| Bool f ->
f (bool_of_string v)
| Set b ->
b := bool_of_string v
| Clear b ->
b := not (bool_of_string v)
| String f ->
f v
| Set_string s ->
s := v
| Int f ->
f (int_of_string v)
| Set_int i ->
i := int_of_string v
| Float f ->
f (float_of_string v)
| Set_float f ->
f := float_of_string v
| _ ->
failwith "Unsupported type in config file"
(* Trim trailing whitespace from a line *)
let trim_trailing_ws line =
let re_ws = Re.compile (Re.Emacs.re "[ \t]+$") in
try
let ofs = fst (Re.Group.all_offset (Re.exec re_ws line)).(0) in
String.sub line 0 ofs
with Not_found -> line
let trim_comment line =
try
let i = String.index line '#' in
String.sub line 0 i
with Not_found -> line
let get_kv line =
let re =
Re.compile (Re.Emacs.re "\\([^=\\ \t]+\\)[\\ \t]*=[\\ \t]*\\(.*\\)")
in
let get (x, y) = String.sub line x (y - x) in
try
match Re.Group.all_offset (Re.exec re line) with
| [|_; key_ofs; v_ofs|] ->
First in array is always the full extent of all matches
Some (get key_ofs, get v_ofs)
| _ ->
None
with _ -> None
let strip_quotes (k, v) =
if String.length v < 2 then
(k, v)
else
let first = v.[0] and last = v.[String.length v - 1] in
if first = last && (first = '"' || first = '\'') then
(k, String.sub v 1 (String.length v - 2))
else
(k, v)
let parse_line line =
Strip comments
let stripped = line |> trim_comment |> trim_trailing_ws in
let lift f x = Some (f x) in
let ( >>= ) m f = match m with Some x -> f x | None -> None in
get_kv stripped >>= lift strip_quotes
let process_line data spec =
let spec = List.map (fun (a, b, _, _) -> (a, b)) spec in
match parse_line data with
| Some (key, v) ->
if List.mem_assoc key spec then apply v (List.assoc key spec)
| None ->
()
let parse filename spec =
(* Remove the unnecessary doc parameter *)
let ic = open_in filename in
finally
(fun () ->
try
while true do
let line = input_line ic in
process_line line spec
done
with End_of_file -> ()
)
(fun () -> close_in ic)
let dump spec =
List.iter
(fun (name, _, printer, description) ->
debug "%s = %s (%s)" name (printer ()) description
)
spec
end
let rec split_c c str =
try
let i = String.index str c in
String.sub str 0 i
:: split_c c (String.sub str (i + 1) (String.length str - i - 1))
with Not_found -> [str]
let setify =
let rec loop acc = function
| [] ->
acc
| x :: xs ->
(if List.mem x acc then loop acc else loop (x :: acc)) xs
in
loop []
let common_options =
[
( "use-switch"
, Arg.Bool (fun b -> Xcp_client.use_switch := b)
, (fun () -> string_of_bool !Xcp_client.use_switch)
, "true if the message switch is to be enabled"
)
; ( "switch-path"
, Arg.Set_string Xcp_client.switch_path
, (fun () -> !Xcp_client.switch_path)
, "Unix domain socket path on localhost where the message switch is \
listening"
)
; ( "search-path"
, Arg.String
(fun s -> extra_search_path := split_c ':' s @ !extra_search_path)
, (fun () -> String.concat ":" !extra_search_path)
, "Search path for resources"
)
; ( "pidfile"
, Arg.Set_string pidfile
, (fun () -> !pidfile)
, "Filename to write process PID"
)
; ( "log"
, Arg.Set_string log_destination
, (fun () -> !log_destination)
, "Where to write log messages"
)
; ( "daemon"
, Arg.Bool (fun x -> daemon := x)
, (fun () -> string_of_bool !daemon)
, "True if we are to daemonise"
)
; ( "disable-logging-for"
, Arg.String
(fun x ->
debug "Parsing [%s]" x ;
try
let modules = List.filter (fun x -> x <> "") (split_c ' ' x) in
List.iter Debug.disable modules
with e ->
error "Processing disabled-logging-for = %s: %s" x
(Printexc.to_string e)
)
, (fun () ->
String.concat " " (setify (List.map fst (Debug.disabled_modules ())))
)
, "A space-separated list of debug modules to suppress logging from"
)
; ( "loglevel"
, Arg.String
(fun x ->
debug "Parsing [%s]" x ;
try
log_level := Syslog.level_of_string x ;
Debug.set_level !log_level
with e ->
error "Processing loglevel = %s: %s" x (Printexc.to_string e)
)
, (fun () -> Syslog.string_of_level !log_level)
, "Log level"
)
; ( "inventory"
, Arg.Set_string Inventory.inventory_filename
, (fun () -> !Inventory.inventory_filename)
, "Location of the inventory file"
)
; ( "config"
, Arg.Set_string config_file
, (fun () -> !config_file)
, "Location of configuration file"
)
; ( "config-dir"
, Arg.Set_string config_dir
, (fun () -> !config_dir)
, "Location of directory containing configuration file fragments"
)
]
let loglevel () = !log_level
module Term = Cmdliner.Term
module Cmd = Cmdliner.Cmd
let rec list = function
| [] ->
Term.const []
| x :: xs ->
Term.app (Term.app (Term.const (fun x y -> x :: y)) x) (list xs)
let command_of ?(name = Sys.argv.(0)) ?(version = "unknown")
?(doc = "Please describe this command.") xs =
let term_of_option (key, arg, get_fn, doc) =
let default = get_fn () in
match arg with
| Arg.Unit f ->
let t = Cmdliner.Arg.(value & flag & info [key] ~doc) in
let make = function true -> f () | false -> () in
Term.(const make $ t)
| Arg.Bool f ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set b ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
let make v = b := v in
Term.(const make $ t)
| Arg.Clear b ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
let make v = b := not v in
Term.(const make $ t)
| Arg.String f ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
Term.(const f $ t)
| Arg.Set_string s ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
let make v = s := v in
Term.(const make $ t)
| Arg.Int f ->
let t =
Cmdliner.Arg.(
value & opt int (int_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set_int s ->
let t =
Cmdliner.Arg.(
value & opt int (int_of_string default) & info [key] ~doc
)
in
let make v = s := v in
Term.(const make $ t)
| Arg.Float f ->
let t =
Cmdliner.Arg.(
value & opt float (float_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set_float s ->
let t =
Cmdliner.Arg.(
value & opt float (float_of_string default) & info [key] ~doc
)
in
let make v = s := v in
Term.(const make $ t)
| _ ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
let make v = Config_file.apply v arg in
Term.(const make $ t)
in
let terms = List.map term_of_option xs in
let _common_options = "COMMON OPTIONS" in
let man =
[
`S "DESCRIPTION"
; `P doc
; `S _common_options
; `P "These options are common to all services."
; `S "BUGS"
; `P "Check bug reports at -project/xcp-idl"
]
in
Cmd.v
(Cmd.info name ~version ~sdocs:_common_options ~man)
Term.(const (fun (_ : unit list) -> `Ok ()) $ list terms)
let arg_spec = List.map (fun (a, b, _, c) -> ("-" ^ a, b, c))
type res = {
name: string
; description: string
; essential: bool
; path: string ref
; perms: Unix.access_permission list
}
let default_resources = []
let canonicalise x =
if not (Filename.is_relative x) then
x
else (* Search the PATH and XCP_PATH for the executable *)
let paths = split_c ':' (Sys.getenv "PATH") in
let first_hit =
List.fold_left
(fun found path ->
match found with
| Some _hit ->
found
| None ->
let possibility = Filename.concat path x in
if Sys.file_exists possibility then Some possibility else None
)
None
(paths @ !extra_search_path)
in
match first_hit with
| None ->
warn "Failed to find %s on $PATH ( = %s) or search_path option ( = %s)"
x (Sys.getenv "PATH")
(String.concat ":" !extra_search_path) ;
x
| Some hit ->
info "Found '%s' at '%s'" x hit ;
hit
let to_opt =
List.map (fun f ->
( f.name
, Arg.String (fun x -> f.path := canonicalise x)
, (fun () -> !(f.path))
, f.description
)
)
let read_config_file x =
if Sys.file_exists !config_file then
(* Will raise exception if config is mis-formatted. It's up to the caller to
inspect and handle the failure. *)
Config_file.parse !config_file x ;
(try Sys.readdir !config_dir with _ -> [||])
|> Array.to_list
|> List.stable_sort compare
|> List.iter (fun fragment ->
let path = Filename.concat !config_dir fragment in
Config_file.parse path x
)
let startswith prefix x =
let prefix' = String.length prefix and x' = String.length x in
prefix' <= x' && String.sub x 0 prefix' = prefix
let configure_common ~options ~resources arg_parse_fn =
(* Register the Logs reporter to ensure we get log messages from libraries
using Logs *)
Debug.init_logs () ;
let resources = default_resources @ resources in
let config_spec = common_options @ options @ to_opt resources in
(* It's very confusing if there are duplicate key names *)
let keys = List.map (fun (k, _, _, _) -> k) config_spec in
let rec check_for_duplicates seen_already = function
| [] ->
()
| x :: xs ->
if List.mem x seen_already then
warn
"Duplicate configuration keys in Xcp_service.configure: %s in [ %s \
]"
x (String.concat "; " keys) ;
check_for_duplicates (x :: seen_already) xs
in
check_for_duplicates [] keys ;
arg_parse_fn config_spec ;
read_config_file config_spec ;
List.iter (fun r -> r.path := canonicalise !(r.path)) resources ;
Config_file.dump config_spec ;
(* Check the required binaries are all available *)
List.iter
(fun f ->
try if f.essential then Unix.access !(f.path) f.perms
with _ ->
let args =
List.filter
(fun x -> not (startswith ("--" ^ f.name) x))
(Array.to_list Sys.argv)
in
let lines =
[
"Cannot access " ^ !(f.path)
; Printf.sprintf "Please either add to %s" !config_file
; Printf.sprintf " %s=<%s>" f.name f.description
; "or add a command-line argument"
; Printf.sprintf " %s --%s=<%s>" (String.concat " " args) f.name
f.description
]
in
List.iter (fun x -> error "%s" x) lines ;
failwith (String.concat "\n" lines)
)
resources ;
Sys.set_signal Sys.sigpipe Sys.Signal_ignore
let configure ?(options = []) ?(resources = []) () =
try
configure_common ~options ~resources (fun config_spec ->
Arg.parse
(Arg.align (arg_spec config_spec))
(fun _ -> failwith "Invalid argument")
(Printf.sprintf "Usage: %s [-config filename]" Sys.argv.(0))
)
with Failure _ -> exit 1
let configure2 ~name ~version ~doc ?(options = []) ?(resources = []) () =
configure_common ~options ~resources @@ fun config_spec ->
let cmd = command_of ~name ~version ~doc config_spec in
match Cmd.eval_value ~catch:true cmd with
| Ok (`Ok _) ->
()
| Ok `Help | Ok `Version ->
exit Cmd.Exit.ok
| Error `Parse ->
exit Cmd.Exit.some_error
| Error `Term ->
exit Cmd.Exit.cli_error
| Error `Exn ->
exit Cmd.Exit.internal_error
let http_handler call_of_string string_of_response process s =
let ic = Unix.in_channel_of_descr s in
let oc = Unix.out_channel_of_descr s in
let module Request = Cohttp.Request.Make (Cohttp_posix_io.Buffered_IO) in
let module Response = Cohttp.Response.Make (Cohttp_posix_io.Buffered_IO) in
match Request.read ic with
| `Eof ->
debug "Failed to read HTTP request"
| `Invalid x ->
debug "Failed to read HTTP request. Got: '%s'" x
| `Ok req -> (
match (Cohttp.Request.meth req, Uri.path (Cohttp.Request.uri req)) with
| `POST, _ -> (
let headers = Cohttp.Request.headers req in
match Cohttp.Header.get headers "content-length" with
| None ->
debug "Failed to read content-length"
| Some content_length ->
let content_length = int_of_string content_length in
let request_txt = Bytes.make content_length '\000' in
really_input ic request_txt 0 content_length ;
let rpc_call =
call_of_string (Bytes.unsafe_to_string request_txt)
in
debug "%s" (Rpc.string_of_call rpc_call) ;
let rpc_response = process rpc_call in
debug " %s" (Rpc.string_of_response rpc_response) ;
let response_txt = string_of_response rpc_response in
let content_length = String.length response_txt in
let headers =
Cohttp.Header.of_list
[
("user-agent", default_service_name)
; ("content-length", string_of_int content_length)
]
in
let response =
Cohttp.Response.make ~version:`HTTP_1_1 ~status:`OK ~headers
~encoding:(Cohttp.Transfer.Fixed (Int64.of_int content_length))
()
in
Response.write
(fun t -> Response.write_body t response_txt)
response oc
)
| _, _ ->
let content_length = 0 in
let headers =
Cohttp.Header.of_list
[
("user-agent", default_service_name)
; ("content-length", string_of_int content_length)
]
in
let response =
Cohttp.Response.make ~version:`HTTP_1_1 ~status:`Not_found ~headers
~encoding:(Cohttp.Transfer.Fixed (Int64.of_int content_length))
()
in
Response.write (fun _t -> ()) response oc
)
let ign_int (t : int) = ignore t
let default_raw_fn rpc_fn s =
http_handler Xmlrpc.call_of_string Xmlrpc.string_of_response rpc_fn s
let mkdir_rec dir perm =
let rec p_mkdir dir =
let p_name = Filename.dirname dir in
if p_name <> "/" && p_name <> "." then p_mkdir p_name ;
try Unix.mkdir dir perm with Unix.Unix_error (Unix.EEXIST, _, _) -> ()
in
p_mkdir dir
type server =
| Socket of Unix.file_descr * (Unix.file_descr -> unit)
| Queue of string * (Rpc.call -> Rpc.response)
(* Start accepting connections on sockets before we daemonize *)
let make_socket_server path fn =
try
(try Unix.unlink path with Unix.Unix_error (Unix.ENOENT, _, _) -> ()) ;
mkdir_rec (Filename.dirname path) 0o0755 ;
let sock = Unix.socket Unix.PF_UNIX Unix.SOCK_STREAM 0 in
Unix.bind sock (Unix.ADDR_UNIX path) ;
Unix.listen sock 5 ;
info "Listening on %s" path ;
Socket (sock, fn)
with e ->
error "Failed to listen on Unix domain socket %s. Raw error was: %s" path
(Printexc.to_string e) ;
( match e with
| Unix.Unix_error (Unix.EACCES, _, _) ->
error "Access was denied." ;
error "Possible fixes include:" ;
error "1. Run this program as root (recommended)" ;
error
"2. Make the permissions in the filesystem more permissive (my \
effective uid is %d)"
(Unix.geteuid ()) ;
error "3. Adjust the sockets-path directive in %s" !config_file ;
exit 1
| _ ->
()
) ;
raise e
let make_queue_server name fn = Queue (name, fn)
(* TODO: connect to the message switch *)
let make ~path ~queue_name ?raw_fn ~rpc_fn () =
if !Xcp_client.use_switch then
make_queue_server queue_name rpc_fn
else
make_socket_server path
(match raw_fn with Some x -> x | None -> default_raw_fn rpc_fn)
let serve_forever = function
| Socket (listening_sock, fn) ->
while true do
let this_connection, _ = Unix.accept listening_sock in
let (_ : Thread.t) =
Thread.create
(fun () ->
finally
(fun () -> fn this_connection)
(fun () -> Unix.close this_connection)
)
()
in
()
done
| Queue (queue_name, fn) ->
let process x =
Jsonrpc.string_of_response (fn (Jsonrpc.call_of_string x))
in
let _ =
Message_switch_unix.Protocol_unix.Server.listen ~process
~switch:!Xcp_client.switch_path ~queue:queue_name ()
in
let rec forever () = Thread.delay 3600. ; forever () in
forever ()
let pidfile_write filename =
let fd =
Unix.openfile filename [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] 0o640
in
finally
(fun () ->
let pid = Unix.getpid () in
let buf = string_of_int pid ^ "\n" |> Bytes.of_string in
let len = Bytes.length buf in
if Unix.write fd buf 0 len <> len then
failwith "pidfile_write failed"
)
(fun () -> Unix.close fd)
, Advanced Programming in the UNIX Environment ,
Section 13.3
Section 13.3 *)
let daemonize ?start_fn () =
if not (have_daemonized ()) then
ign_int (Unix.umask 0) ;
match Unix.fork () with
| 0 -> (
if Unix.setsid () == -1 then failwith "Unix.setsid failed" ;
Sys.set_signal Sys.sighup Sys.Signal_ignore ;
match Unix.fork () with
| 0 ->
Option.iter (fun fn -> fn ()) start_fn ;
Unix.chdir "/" ;
mkdir_rec (Filename.dirname !pidfile) 0o755 ;
pidfile_write !pidfile ;
let nullfd = Unix.openfile "/dev/null" [Unix.O_RDWR] 0 in
Unix.dup2 nullfd Unix.stdin ;
Unix.dup2 nullfd Unix.stdout ;
Unix.dup2 nullfd Unix.stderr ;
Unix.close nullfd
| _ ->
exit 0
)
| _ ->
exit 0
let maybe_daemonize ?start_fn () =
if !daemon then
daemonize ?start_fn ()
else
Option.iter (fun fn -> fn ()) start_fn
let cli ~name ~doc ~version ~cmdline_gen =
let default = Term.(ret (const (fun _ -> `Help (`Pager, None)) $ const ())) in
let version =
let maj, min, mic = version in
Printf.sprintf "%d.%d.%d" maj min mic
in
let info = Cmd.info name ~version ~doc in
let cmds = List.map (fun (t, i) -> Cmd.v i t) (cmdline_gen ()) in
Cmd.group ~default info cmds
let eval_cmdline cmdline =
match Cmd.eval_value cmdline with
| Ok (`Ok f) ->
f ()
| Ok _ ->
()
| Error (`Parse | `Term) ->
error "Error when parsing command line" ;
exit Cmd.Exit.cli_error
| Error `Exn ->
error "Error: uncaught exception" ;
exit Cmd.Exit.internal_error
| null | https://raw.githubusercontent.com/xapi-project/xen-api/8d17b53bc236e23aa1ec455137773f7367665d83/ocaml/xapi-idl/lib/xcp_service.ml | ocaml | Server configuration. We have built-in (hopefully) sensible defaults,
together with command-line arguments and a configuration file. They are
applied in order: (latest takes precedence) defaults < arguments < config
file
Trim trailing whitespace from a line
Remove the unnecessary doc parameter
Search the PATH and XCP_PATH for the executable
Will raise exception if config is mis-formatted. It's up to the caller to
inspect and handle the failure.
Register the Logs reporter to ensure we get log messages from libraries
using Logs
It's very confusing if there are duplicate key names
Check the required binaries are all available
Start accepting connections on sockets before we daemonize
TODO: connect to the message switch |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
module StringSet = Set.Make (String)
let default_service_name = Filename.basename Sys.argv.(0)
let config_file = ref (Printf.sprintf "/etc/%s.conf" default_service_name)
let config_dir = ref (Printf.sprintf "/etc/%s.conf.d" default_service_name)
let pidfile = ref (Printf.sprintf "/var/run/%s.pid" default_service_name)
let extra_search_path = ref []
let log_destination = ref "syslog:daemon"
let log_level = ref Syslog.Debug
let daemon = ref false
let have_daemonized () = Unix.getppid () = 1
let common_prefix = "org.xen.xapi."
let finally f g =
try
let result = f () in
g () ; result
with e -> g () ; raise e
type opt = string * Arg.spec * (unit -> string) * string
module D = Debug.Make (struct let name = default_service_name end)
open D
module Config_file = struct
open Arg
let apply v = function
| Unit f ->
f ()
| Bool f ->
f (bool_of_string v)
| Set b ->
b := bool_of_string v
| Clear b ->
b := not (bool_of_string v)
| String f ->
f v
| Set_string s ->
s := v
| Int f ->
f (int_of_string v)
| Set_int i ->
i := int_of_string v
| Float f ->
f (float_of_string v)
| Set_float f ->
f := float_of_string v
| _ ->
failwith "Unsupported type in config file"
let trim_trailing_ws line =
let re_ws = Re.compile (Re.Emacs.re "[ \t]+$") in
try
let ofs = fst (Re.Group.all_offset (Re.exec re_ws line)).(0) in
String.sub line 0 ofs
with Not_found -> line
let trim_comment line =
try
let i = String.index line '#' in
String.sub line 0 i
with Not_found -> line
let get_kv line =
let re =
Re.compile (Re.Emacs.re "\\([^=\\ \t]+\\)[\\ \t]*=[\\ \t]*\\(.*\\)")
in
let get (x, y) = String.sub line x (y - x) in
try
match Re.Group.all_offset (Re.exec re line) with
| [|_; key_ofs; v_ofs|] ->
First in array is always the full extent of all matches
Some (get key_ofs, get v_ofs)
| _ ->
None
with _ -> None
let strip_quotes (k, v) =
if String.length v < 2 then
(k, v)
else
let first = v.[0] and last = v.[String.length v - 1] in
if first = last && (first = '"' || first = '\'') then
(k, String.sub v 1 (String.length v - 2))
else
(k, v)
let parse_line line =
Strip comments
let stripped = line |> trim_comment |> trim_trailing_ws in
let lift f x = Some (f x) in
let ( >>= ) m f = match m with Some x -> f x | None -> None in
get_kv stripped >>= lift strip_quotes
let process_line data spec =
let spec = List.map (fun (a, b, _, _) -> (a, b)) spec in
match parse_line data with
| Some (key, v) ->
if List.mem_assoc key spec then apply v (List.assoc key spec)
| None ->
()
let parse filename spec =
let ic = open_in filename in
finally
(fun () ->
try
while true do
let line = input_line ic in
process_line line spec
done
with End_of_file -> ()
)
(fun () -> close_in ic)
let dump spec =
List.iter
(fun (name, _, printer, description) ->
debug "%s = %s (%s)" name (printer ()) description
)
spec
end
let rec split_c c str =
try
let i = String.index str c in
String.sub str 0 i
:: split_c c (String.sub str (i + 1) (String.length str - i - 1))
with Not_found -> [str]
let setify =
let rec loop acc = function
| [] ->
acc
| x :: xs ->
(if List.mem x acc then loop acc else loop (x :: acc)) xs
in
loop []
let common_options =
[
( "use-switch"
, Arg.Bool (fun b -> Xcp_client.use_switch := b)
, (fun () -> string_of_bool !Xcp_client.use_switch)
, "true if the message switch is to be enabled"
)
; ( "switch-path"
, Arg.Set_string Xcp_client.switch_path
, (fun () -> !Xcp_client.switch_path)
, "Unix domain socket path on localhost where the message switch is \
listening"
)
; ( "search-path"
, Arg.String
(fun s -> extra_search_path := split_c ':' s @ !extra_search_path)
, (fun () -> String.concat ":" !extra_search_path)
, "Search path for resources"
)
; ( "pidfile"
, Arg.Set_string pidfile
, (fun () -> !pidfile)
, "Filename to write process PID"
)
; ( "log"
, Arg.Set_string log_destination
, (fun () -> !log_destination)
, "Where to write log messages"
)
; ( "daemon"
, Arg.Bool (fun x -> daemon := x)
, (fun () -> string_of_bool !daemon)
, "True if we are to daemonise"
)
; ( "disable-logging-for"
, Arg.String
(fun x ->
debug "Parsing [%s]" x ;
try
let modules = List.filter (fun x -> x <> "") (split_c ' ' x) in
List.iter Debug.disable modules
with e ->
error "Processing disabled-logging-for = %s: %s" x
(Printexc.to_string e)
)
, (fun () ->
String.concat " " (setify (List.map fst (Debug.disabled_modules ())))
)
, "A space-separated list of debug modules to suppress logging from"
)
; ( "loglevel"
, Arg.String
(fun x ->
debug "Parsing [%s]" x ;
try
log_level := Syslog.level_of_string x ;
Debug.set_level !log_level
with e ->
error "Processing loglevel = %s: %s" x (Printexc.to_string e)
)
, (fun () -> Syslog.string_of_level !log_level)
, "Log level"
)
; ( "inventory"
, Arg.Set_string Inventory.inventory_filename
, (fun () -> !Inventory.inventory_filename)
, "Location of the inventory file"
)
; ( "config"
, Arg.Set_string config_file
, (fun () -> !config_file)
, "Location of configuration file"
)
; ( "config-dir"
, Arg.Set_string config_dir
, (fun () -> !config_dir)
, "Location of directory containing configuration file fragments"
)
]
let loglevel () = !log_level
module Term = Cmdliner.Term
module Cmd = Cmdliner.Cmd
let rec list = function
| [] ->
Term.const []
| x :: xs ->
Term.app (Term.app (Term.const (fun x y -> x :: y)) x) (list xs)
let command_of ?(name = Sys.argv.(0)) ?(version = "unknown")
?(doc = "Please describe this command.") xs =
let term_of_option (key, arg, get_fn, doc) =
let default = get_fn () in
match arg with
| Arg.Unit f ->
let t = Cmdliner.Arg.(value & flag & info [key] ~doc) in
let make = function true -> f () | false -> () in
Term.(const make $ t)
| Arg.Bool f ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set b ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
let make v = b := v in
Term.(const make $ t)
| Arg.Clear b ->
let t =
Cmdliner.Arg.(
value & opt bool (bool_of_string default) & info [key] ~doc
)
in
let make v = b := not v in
Term.(const make $ t)
| Arg.String f ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
Term.(const f $ t)
| Arg.Set_string s ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
let make v = s := v in
Term.(const make $ t)
| Arg.Int f ->
let t =
Cmdliner.Arg.(
value & opt int (int_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set_int s ->
let t =
Cmdliner.Arg.(
value & opt int (int_of_string default) & info [key] ~doc
)
in
let make v = s := v in
Term.(const make $ t)
| Arg.Float f ->
let t =
Cmdliner.Arg.(
value & opt float (float_of_string default) & info [key] ~doc
)
in
Term.(const f $ t)
| Arg.Set_float s ->
let t =
Cmdliner.Arg.(
value & opt float (float_of_string default) & info [key] ~doc
)
in
let make v = s := v in
Term.(const make $ t)
| _ ->
let t = Cmdliner.Arg.(value & opt string default & info [key] ~doc) in
let make v = Config_file.apply v arg in
Term.(const make $ t)
in
let terms = List.map term_of_option xs in
let _common_options = "COMMON OPTIONS" in
let man =
[
`S "DESCRIPTION"
; `P doc
; `S _common_options
; `P "These options are common to all services."
; `S "BUGS"
; `P "Check bug reports at -project/xcp-idl"
]
in
Cmd.v
(Cmd.info name ~version ~sdocs:_common_options ~man)
Term.(const (fun (_ : unit list) -> `Ok ()) $ list terms)
let arg_spec = List.map (fun (a, b, _, c) -> ("-" ^ a, b, c))
type res = {
name: string
; description: string
; essential: bool
; path: string ref
; perms: Unix.access_permission list
}
let default_resources = []
let canonicalise x =
if not (Filename.is_relative x) then
x
let paths = split_c ':' (Sys.getenv "PATH") in
let first_hit =
List.fold_left
(fun found path ->
match found with
| Some _hit ->
found
| None ->
let possibility = Filename.concat path x in
if Sys.file_exists possibility then Some possibility else None
)
None
(paths @ !extra_search_path)
in
match first_hit with
| None ->
warn "Failed to find %s on $PATH ( = %s) or search_path option ( = %s)"
x (Sys.getenv "PATH")
(String.concat ":" !extra_search_path) ;
x
| Some hit ->
info "Found '%s' at '%s'" x hit ;
hit
let to_opt =
List.map (fun f ->
( f.name
, Arg.String (fun x -> f.path := canonicalise x)
, (fun () -> !(f.path))
, f.description
)
)
let read_config_file x =
if Sys.file_exists !config_file then
Config_file.parse !config_file x ;
(try Sys.readdir !config_dir with _ -> [||])
|> Array.to_list
|> List.stable_sort compare
|> List.iter (fun fragment ->
let path = Filename.concat !config_dir fragment in
Config_file.parse path x
)
let startswith prefix x =
let prefix' = String.length prefix and x' = String.length x in
prefix' <= x' && String.sub x 0 prefix' = prefix
let configure_common ~options ~resources arg_parse_fn =
Debug.init_logs () ;
let resources = default_resources @ resources in
let config_spec = common_options @ options @ to_opt resources in
let keys = List.map (fun (k, _, _, _) -> k) config_spec in
let rec check_for_duplicates seen_already = function
| [] ->
()
| x :: xs ->
if List.mem x seen_already then
warn
"Duplicate configuration keys in Xcp_service.configure: %s in [ %s \
]"
x (String.concat "; " keys) ;
check_for_duplicates (x :: seen_already) xs
in
check_for_duplicates [] keys ;
arg_parse_fn config_spec ;
read_config_file config_spec ;
List.iter (fun r -> r.path := canonicalise !(r.path)) resources ;
Config_file.dump config_spec ;
List.iter
(fun f ->
try if f.essential then Unix.access !(f.path) f.perms
with _ ->
let args =
List.filter
(fun x -> not (startswith ("--" ^ f.name) x))
(Array.to_list Sys.argv)
in
let lines =
[
"Cannot access " ^ !(f.path)
; Printf.sprintf "Please either add to %s" !config_file
; Printf.sprintf " %s=<%s>" f.name f.description
; "or add a command-line argument"
; Printf.sprintf " %s --%s=<%s>" (String.concat " " args) f.name
f.description
]
in
List.iter (fun x -> error "%s" x) lines ;
failwith (String.concat "\n" lines)
)
resources ;
Sys.set_signal Sys.sigpipe Sys.Signal_ignore
let configure ?(options = []) ?(resources = []) () =
try
configure_common ~options ~resources (fun config_spec ->
Arg.parse
(Arg.align (arg_spec config_spec))
(fun _ -> failwith "Invalid argument")
(Printf.sprintf "Usage: %s [-config filename]" Sys.argv.(0))
)
with Failure _ -> exit 1
let configure2 ~name ~version ~doc ?(options = []) ?(resources = []) () =
configure_common ~options ~resources @@ fun config_spec ->
let cmd = command_of ~name ~version ~doc config_spec in
match Cmd.eval_value ~catch:true cmd with
| Ok (`Ok _) ->
()
| Ok `Help | Ok `Version ->
exit Cmd.Exit.ok
| Error `Parse ->
exit Cmd.Exit.some_error
| Error `Term ->
exit Cmd.Exit.cli_error
| Error `Exn ->
exit Cmd.Exit.internal_error
let http_handler call_of_string string_of_response process s =
let ic = Unix.in_channel_of_descr s in
let oc = Unix.out_channel_of_descr s in
let module Request = Cohttp.Request.Make (Cohttp_posix_io.Buffered_IO) in
let module Response = Cohttp.Response.Make (Cohttp_posix_io.Buffered_IO) in
match Request.read ic with
| `Eof ->
debug "Failed to read HTTP request"
| `Invalid x ->
debug "Failed to read HTTP request. Got: '%s'" x
| `Ok req -> (
match (Cohttp.Request.meth req, Uri.path (Cohttp.Request.uri req)) with
| `POST, _ -> (
let headers = Cohttp.Request.headers req in
match Cohttp.Header.get headers "content-length" with
| None ->
debug "Failed to read content-length"
| Some content_length ->
let content_length = int_of_string content_length in
let request_txt = Bytes.make content_length '\000' in
really_input ic request_txt 0 content_length ;
let rpc_call =
call_of_string (Bytes.unsafe_to_string request_txt)
in
debug "%s" (Rpc.string_of_call rpc_call) ;
let rpc_response = process rpc_call in
debug " %s" (Rpc.string_of_response rpc_response) ;
let response_txt = string_of_response rpc_response in
let content_length = String.length response_txt in
let headers =
Cohttp.Header.of_list
[
("user-agent", default_service_name)
; ("content-length", string_of_int content_length)
]
in
let response =
Cohttp.Response.make ~version:`HTTP_1_1 ~status:`OK ~headers
~encoding:(Cohttp.Transfer.Fixed (Int64.of_int content_length))
()
in
Response.write
(fun t -> Response.write_body t response_txt)
response oc
)
| _, _ ->
let content_length = 0 in
let headers =
Cohttp.Header.of_list
[
("user-agent", default_service_name)
; ("content-length", string_of_int content_length)
]
in
let response =
Cohttp.Response.make ~version:`HTTP_1_1 ~status:`Not_found ~headers
~encoding:(Cohttp.Transfer.Fixed (Int64.of_int content_length))
()
in
Response.write (fun _t -> ()) response oc
)
let ign_int (t : int) = ignore t
let default_raw_fn rpc_fn s =
http_handler Xmlrpc.call_of_string Xmlrpc.string_of_response rpc_fn s
let mkdir_rec dir perm =
let rec p_mkdir dir =
let p_name = Filename.dirname dir in
if p_name <> "/" && p_name <> "." then p_mkdir p_name ;
try Unix.mkdir dir perm with Unix.Unix_error (Unix.EEXIST, _, _) -> ()
in
p_mkdir dir
type server =
| Socket of Unix.file_descr * (Unix.file_descr -> unit)
| Queue of string * (Rpc.call -> Rpc.response)
let make_socket_server path fn =
try
(try Unix.unlink path with Unix.Unix_error (Unix.ENOENT, _, _) -> ()) ;
mkdir_rec (Filename.dirname path) 0o0755 ;
let sock = Unix.socket Unix.PF_UNIX Unix.SOCK_STREAM 0 in
Unix.bind sock (Unix.ADDR_UNIX path) ;
Unix.listen sock 5 ;
info "Listening on %s" path ;
Socket (sock, fn)
with e ->
error "Failed to listen on Unix domain socket %s. Raw error was: %s" path
(Printexc.to_string e) ;
( match e with
| Unix.Unix_error (Unix.EACCES, _, _) ->
error "Access was denied." ;
error "Possible fixes include:" ;
error "1. Run this program as root (recommended)" ;
error
"2. Make the permissions in the filesystem more permissive (my \
effective uid is %d)"
(Unix.geteuid ()) ;
error "3. Adjust the sockets-path directive in %s" !config_file ;
exit 1
| _ ->
()
) ;
raise e
let make_queue_server name fn = Queue (name, fn)
let make ~path ~queue_name ?raw_fn ~rpc_fn () =
if !Xcp_client.use_switch then
make_queue_server queue_name rpc_fn
else
make_socket_server path
(match raw_fn with Some x -> x | None -> default_raw_fn rpc_fn)
let serve_forever = function
| Socket (listening_sock, fn) ->
while true do
let this_connection, _ = Unix.accept listening_sock in
let (_ : Thread.t) =
Thread.create
(fun () ->
finally
(fun () -> fn this_connection)
(fun () -> Unix.close this_connection)
)
()
in
()
done
| Queue (queue_name, fn) ->
let process x =
Jsonrpc.string_of_response (fn (Jsonrpc.call_of_string x))
in
let _ =
Message_switch_unix.Protocol_unix.Server.listen ~process
~switch:!Xcp_client.switch_path ~queue:queue_name ()
in
let rec forever () = Thread.delay 3600. ; forever () in
forever ()
let pidfile_write filename =
let fd =
Unix.openfile filename [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] 0o640
in
finally
(fun () ->
let pid = Unix.getpid () in
let buf = string_of_int pid ^ "\n" |> Bytes.of_string in
let len = Bytes.length buf in
if Unix.write fd buf 0 len <> len then
failwith "pidfile_write failed"
)
(fun () -> Unix.close fd)
, Advanced Programming in the UNIX Environment ,
Section 13.3
Section 13.3 *)
let daemonize ?start_fn () =
if not (have_daemonized ()) then
ign_int (Unix.umask 0) ;
match Unix.fork () with
| 0 -> (
if Unix.setsid () == -1 then failwith "Unix.setsid failed" ;
Sys.set_signal Sys.sighup Sys.Signal_ignore ;
match Unix.fork () with
| 0 ->
Option.iter (fun fn -> fn ()) start_fn ;
Unix.chdir "/" ;
mkdir_rec (Filename.dirname !pidfile) 0o755 ;
pidfile_write !pidfile ;
let nullfd = Unix.openfile "/dev/null" [Unix.O_RDWR] 0 in
Unix.dup2 nullfd Unix.stdin ;
Unix.dup2 nullfd Unix.stdout ;
Unix.dup2 nullfd Unix.stderr ;
Unix.close nullfd
| _ ->
exit 0
)
| _ ->
exit 0
let maybe_daemonize ?start_fn () =
if !daemon then
daemonize ?start_fn ()
else
Option.iter (fun fn -> fn ()) start_fn
let cli ~name ~doc ~version ~cmdline_gen =
let default = Term.(ret (const (fun _ -> `Help (`Pager, None)) $ const ())) in
let version =
let maj, min, mic = version in
Printf.sprintf "%d.%d.%d" maj min mic
in
let info = Cmd.info name ~version ~doc in
let cmds = List.map (fun (t, i) -> Cmd.v i t) (cmdline_gen ()) in
Cmd.group ~default info cmds
let eval_cmdline cmdline =
match Cmd.eval_value cmdline with
| Ok (`Ok f) ->
f ()
| Ok _ ->
()
| Error (`Parse | `Term) ->
error "Error when parsing command line" ;
exit Cmd.Exit.cli_error
| Error `Exn ->
error "Error: uncaught exception" ;
exit Cmd.Exit.internal_error
|
78422ebbcbdb6b270d6b3f35b391777833a781b6fe55268c26c204202b1693be | adinapoli/clj3D | fenvs.clj | (ns clj3D.test.fenvs
(:use
[clj3D.fenvs]
[clojure.test]:reload)
(:import [com.jme3.math Vector3f]))
;;Needed for private-function testing
(def jvector (ns-resolve 'clj3D.fenvs 'jvector))
(deftest curry-test
(let [sum2 #(+ %1 %2)]
(is (= 3 ((curry sum2 1) 2)) "Simple currying ((sum 1) 2)")
(is (function? (curry sum2 1)))))
(deftest chr-and-ord
(is (= \c (chr 99)))
(is (thrown? ClassCastException (chr \c)))
(is (thrown? ClassCastException (chr 3.41)))
(is (= 99 (ord \c)))
(is (thrown? ClassCastException (ord 9.9)))
(is (thrown? ClassCastException (ord "a")))
(is (= \c (chr (ord \c))))
(is (= 99 (ord (chr 99)))))
(deftest cat-test
(is (= [] (cat [])))
(is (= [1 2 3]) (cat [1 2 3]))
(is (= [1 2 3 4 5 6]) (cat [1 2 3] [4 5 6]))
(is (= [1 2 3 4]) (cat [1 2] '(3 4)))
(is (= [1 2 3 4 5 6]) (cat [1 2] '(3 4) [5 6]))
(is (= [1 2 3 4 8 8]) (cat [1 2] '(3 4) '(8 8)))
(is (= [1 2 3 4 8 8 9 8]) (cat [1 2] '(3 4) [8 8] '(9 8)))
(is (= [1 2 3 4] (cat [[1 2] [3 4]])))
(is (= ["a" "very" "nested" "list"] (cat (cat [[["a" "very"] ["nested" "list"]]])))))
(deftest id-test
(is (= true (id true)))
(is (= 4 (id 4)))
(is (= [1 2] (id [1 2]))))
(deftest k-test
(is (function? (k 1)))
(is (= 4 (k 4 2)))
(is (= ((k 2) 3) (k 2 3)))
(is (true? (tt nil)))
(is (true? (tt false))))
(deftest distl-test
(is (= [[9 1] [9 2 2] [9 3]] (distl 9 [[1] [2 2] [3]])))
(is (= [[9 0 0] [9 1 1] [9 2 2]] (distl 9 [[0 0] [1 1] [2 2]]))))
(deftest distr-test
(is (= [[1 9] [2 2 9] [3 9]] (distr 9 [[1] [2 2] [3]])))
(is (= [[0 0 9] [1 1 9] [2 2 9]] (distr 9 [[0 0] [1 1] [2 2]]))))
(deftest insl-test
(is (= -4 (insl #(- %1 %2) [1 2 3])))
(is (= 0 (insl #(- %1 %2) [1 2 -1]))))
(deftest insr-test
(is (= 2 (insr #(- %1 %2) [1 2 3])))
(is (= -2 (insr #(- %1 %2) [1 2 -1]))))
(deftest aa-test
(is (function? (aa #(* %1 %1))))
(is (= [1 4 9] ((aa #(* %1 %1)) [1 2 3])))
(is (= [1 4 9] (aa #(* %1 %1) [1 2 3]))))
(deftest jvector-test
(is (= 3.0 (.getX ^Vector3f (jvector 1 3.0))))
(is (= 3.0 (.getX ^Vector3f (jvector [1] [3]))))
(let [v1 (jvector [1 3] [4 2])]
(is (= 4.0 (.getX ^Vector3f v1)))
(is (= 0.0 (.getY ^Vector3f v1)))
(is (= 2.0 (.getZ ^Vector3f v1))))
(let [v1 (jvector [1 2 3] [4 8 2])]
(is (= 4.0 (.getX ^Vector3f v1)))
(is (= 8.0 (.getY ^Vector3f v1)))
(is (= 2.0 (.getZ ^Vector3f v1)))))
(deftest n-test
(is (= '(10 10 10) (n 3 10))))
(deftest nn-test
(is (= '(10 10 10) (nn 3 [10]))))
(deftest vectsum-test
(is (= [8 20 7] (vectsum [0 1 1] [2 2 2] [4 5 2] [2 12 2])))
(is (= [0 0] (vectsum [0 1] [1 0] [-1 -1])))
(is (= [11 13 15] (vectsum [10 11 12] [0 1 2] [1 1 1]))))
(deftest vectdiff-test
(is (= [9 9 9] (vectdiff [10 11 12] [0 1 2] [1 1 1]))))
(deftest meanpoint-test
(is (= [1.0 1.0 1.0] (meanpoint [0 0 0] [1 1 1] [2 2 2])))
(is (= [2.0 1.0 1.5] (meanpoint [3 0 0] [1 2 3]))))
(deftest div-test
(is (= 1.0 (div 10 5 2)))
(is (= 1.0 (div 10 2 5)))
(is (= 5.0 (div 20 2 2)))) | null | https://raw.githubusercontent.com/adinapoli/clj3D/1175bcaddc1d59fe37c6876dc8cc178417211e38/test/clj3D/test/fenvs.clj | clojure | Needed for private-function testing | (ns clj3D.test.fenvs
(:use
[clj3D.fenvs]
[clojure.test]:reload)
(:import [com.jme3.math Vector3f]))
(def jvector (ns-resolve 'clj3D.fenvs 'jvector))
(deftest curry-test
(let [sum2 #(+ %1 %2)]
(is (= 3 ((curry sum2 1) 2)) "Simple currying ((sum 1) 2)")
(is (function? (curry sum2 1)))))
(deftest chr-and-ord
(is (= \c (chr 99)))
(is (thrown? ClassCastException (chr \c)))
(is (thrown? ClassCastException (chr 3.41)))
(is (= 99 (ord \c)))
(is (thrown? ClassCastException (ord 9.9)))
(is (thrown? ClassCastException (ord "a")))
(is (= \c (chr (ord \c))))
(is (= 99 (ord (chr 99)))))
(deftest cat-test
(is (= [] (cat [])))
(is (= [1 2 3]) (cat [1 2 3]))
(is (= [1 2 3 4 5 6]) (cat [1 2 3] [4 5 6]))
(is (= [1 2 3 4]) (cat [1 2] '(3 4)))
(is (= [1 2 3 4 5 6]) (cat [1 2] '(3 4) [5 6]))
(is (= [1 2 3 4 8 8]) (cat [1 2] '(3 4) '(8 8)))
(is (= [1 2 3 4 8 8 9 8]) (cat [1 2] '(3 4) [8 8] '(9 8)))
(is (= [1 2 3 4] (cat [[1 2] [3 4]])))
(is (= ["a" "very" "nested" "list"] (cat (cat [[["a" "very"] ["nested" "list"]]])))))
(deftest id-test
(is (= true (id true)))
(is (= 4 (id 4)))
(is (= [1 2] (id [1 2]))))
(deftest k-test
(is (function? (k 1)))
(is (= 4 (k 4 2)))
(is (= ((k 2) 3) (k 2 3)))
(is (true? (tt nil)))
(is (true? (tt false))))
(deftest distl-test
(is (= [[9 1] [9 2 2] [9 3]] (distl 9 [[1] [2 2] [3]])))
(is (= [[9 0 0] [9 1 1] [9 2 2]] (distl 9 [[0 0] [1 1] [2 2]]))))
(deftest distr-test
(is (= [[1 9] [2 2 9] [3 9]] (distr 9 [[1] [2 2] [3]])))
(is (= [[0 0 9] [1 1 9] [2 2 9]] (distr 9 [[0 0] [1 1] [2 2]]))))
(deftest insl-test
(is (= -4 (insl #(- %1 %2) [1 2 3])))
(is (= 0 (insl #(- %1 %2) [1 2 -1]))))
(deftest insr-test
(is (= 2 (insr #(- %1 %2) [1 2 3])))
(is (= -2 (insr #(- %1 %2) [1 2 -1]))))
(deftest aa-test
(is (function? (aa #(* %1 %1))))
(is (= [1 4 9] ((aa #(* %1 %1)) [1 2 3])))
(is (= [1 4 9] (aa #(* %1 %1) [1 2 3]))))
(deftest jvector-test
(is (= 3.0 (.getX ^Vector3f (jvector 1 3.0))))
(is (= 3.0 (.getX ^Vector3f (jvector [1] [3]))))
(let [v1 (jvector [1 3] [4 2])]
(is (= 4.0 (.getX ^Vector3f v1)))
(is (= 0.0 (.getY ^Vector3f v1)))
(is (= 2.0 (.getZ ^Vector3f v1))))
(let [v1 (jvector [1 2 3] [4 8 2])]
(is (= 4.0 (.getX ^Vector3f v1)))
(is (= 8.0 (.getY ^Vector3f v1)))
(is (= 2.0 (.getZ ^Vector3f v1)))))
(deftest n-test
(is (= '(10 10 10) (n 3 10))))
(deftest nn-test
(is (= '(10 10 10) (nn 3 [10]))))
(deftest vectsum-test
(is (= [8 20 7] (vectsum [0 1 1] [2 2 2] [4 5 2] [2 12 2])))
(is (= [0 0] (vectsum [0 1] [1 0] [-1 -1])))
(is (= [11 13 15] (vectsum [10 11 12] [0 1 2] [1 1 1]))))
(deftest vectdiff-test
(is (= [9 9 9] (vectdiff [10 11 12] [0 1 2] [1 1 1]))))
(deftest meanpoint-test
(is (= [1.0 1.0 1.0] (meanpoint [0 0 0] [1 1 1] [2 2 2])))
(is (= [2.0 1.0 1.5] (meanpoint [3 0 0] [1 2 3]))))
(deftest div-test
(is (= 1.0 (div 10 5 2)))
(is (= 1.0 (div 10 2 5)))
(is (= 5.0 (div 20 2 2)))) |
db789b5f247883df5d16436bf6b9b4a6a4598c5bc7df21e666909097ba5e91ab | zenspider/schemers | exercise.1.30.scm | #lang racket/base
;;; Exercise 1.30:
;; The `sum' procedure above generates a linear recursion. The
;; procedure can be rewritten so that the sum is performed
;; iteratively. Show how to do this by filling in the missing
;; expressions in the following definition:
(define (sum f a b n)
(define (iterate a result)
(if (> a b) result
(iterate (n a) (+ result (f a)))))
(iterate a 0))
(define (inc n) (+ n 1))
(define (identity n) n)
(sum identity 1 10 inc)
(= (/ (+ (* 10 10) 10) 2) (sum identity 1 10 inc))
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_1/exercise.1.30.scm | scheme | Exercise 1.30:
The `sum' procedure above generates a linear recursion. The
procedure can be rewritten so that the sum is performed
iteratively. Show how to do this by filling in the missing
expressions in the following definition: | #lang racket/base
(define (sum f a b n)
(define (iterate a result)
(if (> a b) result
(iterate (n a) (+ result (f a)))))
(iterate a 0))
(define (inc n) (+ n 1))
(define (identity n) n)
(sum identity 1 10 inc)
(= (/ (+ (* 10 10) 10) 2) (sum identity 1 10 inc))
|
4437c2ab1c6bcda6ce9ca3920c60e4c1a9a98f2a664e0b930fa794483faaf612 | 2600hz-archive/whistle | rabbit_event.erl | The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is VMware , Inc.
Copyright ( c ) 2007 - 2011 VMware , Inc. All rights reserved .
%%
-module(rabbit_event).
-include("rabbit.hrl").
-export([start_link/0]).
-export([init_stats_timer/0, ensure_stats_timer/2, stop_stats_timer/1]).
-export([reset_stats_timer/1]).
-export([stats_level/1, if_enabled/2]).
-export([notify/2, notify_if/3]).
%%----------------------------------------------------------------------------
-record(state, {level, timer}).
%%----------------------------------------------------------------------------
-ifdef(use_specs).
-export_type([event_type/0, event_props/0, event_timestamp/0, event/0]).
-type(event_type() :: atom()).
-type(event_props() :: term()).
-type(event_timestamp() ::
{non_neg_integer(), non_neg_integer(), non_neg_integer()}).
-type(event() :: #event {
type :: event_type(),
props :: event_props(),
timestamp :: event_timestamp()
}).
-type(level() :: 'none' | 'coarse' | 'fine').
-opaque(state() :: #state {
level :: level(),
timer :: atom()
}).
-type(timer_fun() :: fun (() -> 'ok')).
-spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
-spec(init_stats_timer/0 :: () -> state()).
-spec(ensure_stats_timer/2 :: (state(), timer_fun()) -> state()).
-spec(stop_stats_timer/1 :: (state()) -> state()).
-spec(reset_stats_timer/1 :: (state()) -> state()).
-spec(stats_level/1 :: (state()) -> level()).
-spec(if_enabled/2 :: (state(), timer_fun()) -> 'ok').
-spec(notify/2 :: (event_type(), event_props()) -> 'ok').
-spec(notify_if/3 :: (boolean(), event_type(), event_props()) -> 'ok').
-endif.
%%----------------------------------------------------------------------------
start_link() ->
gen_event:start_link({local, ?MODULE}).
%% The idea is, for each stat-emitting object:
%%
%% On startup:
%% Timer = init_stats_timer()
%% notify(created event)
if_enabled(internal_emit_stats ) - so we immediately send something
%%
%% On wakeup:
%% ensure_stats_timer(Timer, emit_stats)
( Note we ca n't emit stats immediately , the timer may have fired 1ms ago . )
%%
%% emit_stats:
if_enabled(internal_emit_stats )
%% reset_stats_timer(Timer) - just bookkeeping
%%
%% Pre-hibernation:
if_enabled(internal_emit_stats )
%% stop_stats_timer(Timer)
%%
%% internal_emit_stats:
%% notify(stats)
init_stats_timer() ->
{ok, StatsLevel} = application:get_env(rabbit, collect_statistics),
#state{level = StatsLevel, timer = undefined}.
ensure_stats_timer(State = #state{level = none}, _Fun) ->
State;
ensure_stats_timer(State = #state{timer = undefined}, Fun) ->
{ok, TRef} = timer:apply_after(?STATS_INTERVAL,
erlang, apply, [Fun, []]),
State#state{timer = TRef};
ensure_stats_timer(State, _Fun) ->
State.
stop_stats_timer(State = #state{level = none}) ->
State;
stop_stats_timer(State = #state{timer = undefined}) ->
State;
stop_stats_timer(State = #state{timer = TRef}) ->
{ok, cancel} = timer:cancel(TRef),
State#state{timer = undefined}.
reset_stats_timer(State) ->
State#state{timer = undefined}.
stats_level(#state{level = Level}) ->
Level.
if_enabled(#state{level = none}, _Fun) ->
ok;
if_enabled(_State, Fun) ->
Fun(),
ok.
notify_if(true, Type, Props) -> notify(Type, Props);
notify_if(false, _Type, _Props) -> ok.
notify(Type, Props) ->
%% TODO: switch to os:timestamp() when we drop support for
Erlang / OTP < R13B01
gen_event:notify(rabbit_event, #event{type = Type,
props = Props,
timestamp = now()}).
| null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/lib/rabbitmq_server-2.4.1/src/rabbit_event.erl | erlang | Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
The idea is, for each stat-emitting object:
On startup:
Timer = init_stats_timer()
notify(created event)
On wakeup:
ensure_stats_timer(Timer, emit_stats)
emit_stats:
reset_stats_timer(Timer) - just bookkeeping
Pre-hibernation:
stop_stats_timer(Timer)
internal_emit_stats:
notify(stats)
TODO: switch to os:timestamp() when we drop support for | The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is VMware , Inc.
Copyright ( c ) 2007 - 2011 VMware , Inc. All rights reserved .
-module(rabbit_event).
-include("rabbit.hrl").
-export([start_link/0]).
-export([init_stats_timer/0, ensure_stats_timer/2, stop_stats_timer/1]).
-export([reset_stats_timer/1]).
-export([stats_level/1, if_enabled/2]).
-export([notify/2, notify_if/3]).
-record(state, {level, timer}).
-ifdef(use_specs).
-export_type([event_type/0, event_props/0, event_timestamp/0, event/0]).
-type(event_type() :: atom()).
-type(event_props() :: term()).
-type(event_timestamp() ::
{non_neg_integer(), non_neg_integer(), non_neg_integer()}).
-type(event() :: #event {
type :: event_type(),
props :: event_props(),
timestamp :: event_timestamp()
}).
-type(level() :: 'none' | 'coarse' | 'fine').
-opaque(state() :: #state {
level :: level(),
timer :: atom()
}).
-type(timer_fun() :: fun (() -> 'ok')).
-spec(start_link/0 :: () -> rabbit_types:ok_pid_or_error()).
-spec(init_stats_timer/0 :: () -> state()).
-spec(ensure_stats_timer/2 :: (state(), timer_fun()) -> state()).
-spec(stop_stats_timer/1 :: (state()) -> state()).
-spec(reset_stats_timer/1 :: (state()) -> state()).
-spec(stats_level/1 :: (state()) -> level()).
-spec(if_enabled/2 :: (state(), timer_fun()) -> 'ok').
-spec(notify/2 :: (event_type(), event_props()) -> 'ok').
-spec(notify_if/3 :: (boolean(), event_type(), event_props()) -> 'ok').
-endif.
start_link() ->
gen_event:start_link({local, ?MODULE}).
if_enabled(internal_emit_stats ) - so we immediately send something
( Note we ca n't emit stats immediately , the timer may have fired 1ms ago . )
if_enabled(internal_emit_stats )
if_enabled(internal_emit_stats )
init_stats_timer() ->
{ok, StatsLevel} = application:get_env(rabbit, collect_statistics),
#state{level = StatsLevel, timer = undefined}.
ensure_stats_timer(State = #state{level = none}, _Fun) ->
State;
ensure_stats_timer(State = #state{timer = undefined}, Fun) ->
{ok, TRef} = timer:apply_after(?STATS_INTERVAL,
erlang, apply, [Fun, []]),
State#state{timer = TRef};
ensure_stats_timer(State, _Fun) ->
State.
stop_stats_timer(State = #state{level = none}) ->
State;
stop_stats_timer(State = #state{timer = undefined}) ->
State;
stop_stats_timer(State = #state{timer = TRef}) ->
{ok, cancel} = timer:cancel(TRef),
State#state{timer = undefined}.
reset_stats_timer(State) ->
State#state{timer = undefined}.
stats_level(#state{level = Level}) ->
Level.
if_enabled(#state{level = none}, _Fun) ->
ok;
if_enabled(_State, Fun) ->
Fun(),
ok.
notify_if(true, Type, Props) -> notify(Type, Props);
notify_if(false, _Type, _Props) -> ok.
notify(Type, Props) ->
Erlang / OTP < R13B01
gen_event:notify(rabbit_event, #event{type = Type,
props = Props,
timestamp = now()}).
|
a9c26d4166b14520d64f7c7e12d0c07e1e2968d9e2a6368d70e8d2fafcb43d25 | dyzsr/ocaml-selectml | t172-pushenvacc1.ml | TEST
include tool - ocaml - lib
flags = " -w -a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w -a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
let x = 5 in
let f _ = x + x in
if f 0 <> 10 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 16
11 ENVACC1
12 PUSHENVACC1
13 ADDINT
14 RETURN 1
16 CONSTINT 5
18
19 CLOSURE 1 , 11
22 PUSHCONSTINT 10
24 PUSHCONST0
25 PUSHACC2
26 APPLY1
27 NEQ
28 BRANCHIFNOT 35
30 GETGLOBAL Not_found
32 MAKEBLOCK1 0
34 RAISE
35 POP 2
37 ATOM0
38
40 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 16
11 ENVACC1
12 PUSHENVACC1
13 ADDINT
14 RETURN 1
16 CONSTINT 5
18 PUSHACC0
19 CLOSURE 1, 11
22 PUSHCONSTINT 10
24 PUSHCONST0
25 PUSHACC2
26 APPLY1
27 NEQ
28 BRANCHIFNOT 35
30 GETGLOBAL Not_found
32 MAKEBLOCK1 0
34 RAISE
35 POP 2
37 ATOM0
38 SETGLOBAL T172-pushenvacc1
40 STOP
**)
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/tool-ocaml/t172-pushenvacc1.ml | ocaml | TEST
include tool - ocaml - lib
flags = " -w -a "
ocaml_script_as_argument = " true "
* setup - ocaml - build - env
* *
include tool-ocaml-lib
flags = "-w -a"
ocaml_script_as_argument = "true"
* setup-ocaml-build-env
** ocaml
*)
open Lib;;
let x = 5 in
let f _ = x + x in
if f 0 <> 10 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 16
11 ENVACC1
12 PUSHENVACC1
13 ADDINT
14 RETURN 1
16 CONSTINT 5
18
19 CLOSURE 1 , 11
22 PUSHCONSTINT 10
24 PUSHCONST0
25 PUSHACC2
26 APPLY1
27 NEQ
28 BRANCHIFNOT 35
30 GETGLOBAL Not_found
32 MAKEBLOCK1 0
34 RAISE
35 POP 2
37 ATOM0
38
40 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 16
11 ENVACC1
12 PUSHENVACC1
13 ADDINT
14 RETURN 1
16 CONSTINT 5
18 PUSHACC0
19 CLOSURE 1, 11
22 PUSHCONSTINT 10
24 PUSHCONST0
25 PUSHACC2
26 APPLY1
27 NEQ
28 BRANCHIFNOT 35
30 GETGLOBAL Not_found
32 MAKEBLOCK1 0
34 RAISE
35 POP 2
37 ATOM0
38 SETGLOBAL T172-pushenvacc1
40 STOP
**)
| |
f13bb8d8ef7e5f0d6897b2d47ebc8918d5336ad043a31e93ac1ec6e4569b7904 | dvdt/xvsy | datasets.clj | (ns xvsy.datasets
(:require [korma.core]
[schema.core :as s]
[xvsy.goog-bq :as goog-bq]
[xvsy.ggsql :refer [defdataset]]))
(korma.db/defdb bq (goog-bq/goog-bq {}))
(defdataset bq-natality "natality"
(korma.core/table "publicdata:samples.natality")
(korma.core/database bq)
(assoc :dataset "samples")
(assoc :cols {"alcohol_use" {:factor true :type s/Bool}
"apgar_1min" {:factor false :type s/Int}
"apgar_5min" {:factor false :type s/Int}
"born_alive_alive" {:factor false :type s/Int}
"born_alive_dead" {:factor false :type s/Int}
"born_dead" {:factor true :type s/Int}
"child_race" {:factor true :type s/Int}
"cigarettes_per_day" {:factor false :type s/Int}
"cigarette_use" {:factor true :type s/Bool}
"day" {:factor true :type s/Int}
"drinks_per_week" {:factor false :type s/Int}
"ever_born" {:factor false :type s/Int}
"father_age" {:factor false :type s/Int}
"father_race" {:factor true :type s/Int}
"gestation_weeks" {:factor false :type s/Int}
"is_male" {:factor true :type s/Bool}
"lmp" {:factor true :type s/Str}
"month" {:factor true :type s/Int}
"mother_age" {:factor false :type s/Int}
"mother_birth_state" {:factor true :type s/Str}
"mother_married" {:factor true :type s/Bool}
"mother_race" {:factor true :type s/Int}
"mother_residence_state" {:factor true :type s/Str}
"plurality" {:factor false :type s/Int}
"record_weight" {:factor false :type s/Int}
"source_year" {:factor true :type s/Int}
"state" {:factor true :type s/Str}
"wday" {:factor true :type s/Int}
"weight_gain_pounds" {:factor false :type s/Int}
"weight_pounds" {:factor false :type s/Num}
"year" {:factor true :type s/Int}}))
| null | https://raw.githubusercontent.com/dvdt/xvsy/ff29b96affc6723bb9c66da1011f31900af679dd/src/clj/xvsy/datasets.clj | clojure | (ns xvsy.datasets
(:require [korma.core]
[schema.core :as s]
[xvsy.goog-bq :as goog-bq]
[xvsy.ggsql :refer [defdataset]]))
(korma.db/defdb bq (goog-bq/goog-bq {}))
(defdataset bq-natality "natality"
(korma.core/table "publicdata:samples.natality")
(korma.core/database bq)
(assoc :dataset "samples")
(assoc :cols {"alcohol_use" {:factor true :type s/Bool}
"apgar_1min" {:factor false :type s/Int}
"apgar_5min" {:factor false :type s/Int}
"born_alive_alive" {:factor false :type s/Int}
"born_alive_dead" {:factor false :type s/Int}
"born_dead" {:factor true :type s/Int}
"child_race" {:factor true :type s/Int}
"cigarettes_per_day" {:factor false :type s/Int}
"cigarette_use" {:factor true :type s/Bool}
"day" {:factor true :type s/Int}
"drinks_per_week" {:factor false :type s/Int}
"ever_born" {:factor false :type s/Int}
"father_age" {:factor false :type s/Int}
"father_race" {:factor true :type s/Int}
"gestation_weeks" {:factor false :type s/Int}
"is_male" {:factor true :type s/Bool}
"lmp" {:factor true :type s/Str}
"month" {:factor true :type s/Int}
"mother_age" {:factor false :type s/Int}
"mother_birth_state" {:factor true :type s/Str}
"mother_married" {:factor true :type s/Bool}
"mother_race" {:factor true :type s/Int}
"mother_residence_state" {:factor true :type s/Str}
"plurality" {:factor false :type s/Int}
"record_weight" {:factor false :type s/Int}
"source_year" {:factor true :type s/Int}
"state" {:factor true :type s/Str}
"wday" {:factor true :type s/Int}
"weight_gain_pounds" {:factor false :type s/Int}
"weight_pounds" {:factor false :type s/Num}
"year" {:factor true :type s/Int}}))
| |
1158e86ea32f6234d2e81f2acbc6d487e81d0563f6db1ddfc16ecb34a3389052 | ijvcms/chuanqi_dev | task_comply.erl | %%%-------------------------------------------------------------------
@author yubing
( C ) 2016 , < COMPANY >
%%% @doc
%%%
%%% @end
Created : 05 . 一月 2016 10:20
%%%-------------------------------------------------------------------
-module(task_comply).
-include("cache.hrl").
-include("config.hrl").
-include("proto.hrl").
-include("record.hrl").
-include("common.hrl").
-include("button_tips_config.hrl").
-export([update_player_task_info/3,
update_player_tasksort_kill_scene/3,
update_player_tasksort_up_lv/1,
update_player_tasksort_hava_goods/2,
update_player_tasksort_kill_monster/2,
update_accpet__player_task/2,
update_player_tasksort_past_fb/2,
update_player_tasksort_collect_item/2,
update_player_task_info_tool/4
]).
更新玩家的任务信息 标准版
update_player_task_info(PlayerPid, TaskSortId, Num) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_task_info, [TaskSortId, Num]});
更新玩家的任务信息 标准版
update_player_task_info(State, TaskSortId, Num) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(TaskSortId),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家的任务信息 标准版
update_player_task_info_tool(PlayerPid, TaskSortId, Tool, Num) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_task_info_tool, [TaskSortId, Tool, Num]});
更新玩家的任务信息 标准版
update_player_task_info_tool(State, TaskSortId, Tool, Num) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(TaskSortId, Tool),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家的场景击杀任务信息
update_player_tasksort_kill_scene(PlayerPid, Num, SceneId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_kill_scene, [Num, SceneId]});
更新玩家的场景击杀任务信息
update_player_tasksort_kill_scene(State, Num, SceneId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_KILL_SCENE, SceneId),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家升级任务信息
update_player_tasksort_up_lv(PlayerPid) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_up_lv, []});
更新玩家升级任务信息
update_player_tasksort_up_lv(State) ->
Base = State#player_state.db_player_base,
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_UP_LV),
Num1 = Base#db_player_base.lv,
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
NewState1 = main_task_lib:ref_task_navigate_list(State),
main_task_lib:ref_task_navigate_list(NewState1, TaskList1),
开服活动刷新
active_service_lib:ref_button_tips(NewState1, ?ACTIVE_SERVICE_TYPE_LV).%%
%% 更新玩家获取道具信息
update_player_tasksort_hava_goods(PlayerPid, GoodsId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_hava_goods, [GoodsId]});
%% 更新玩家获取道具信息
update_player_tasksort_hava_goods(State, GoodsId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_HAVE_ITEM, GoodsId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = goods_lib:get_goods_num(GoodsId),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
%% 更新玩家的怪物击杀任务信息
update_player_tasksort_kill_monster(PlayerPid, MonsterId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_kill_monster, [MonsterId]});
更新玩家升级任务信息
update_player_tasksort_kill_monster(State, MonsterId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_MONSTER, MonsterId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
%% 更新任务信息
update_player_tasksort_collect_item(State, MonsterId),
{ok, State}.
%% 通关副本信息 纪录
update_player_tasksort_past_fb(PlayerPid, SceneId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_past_fb, [SceneId]});
更新玩家升级任务信息
update_player_tasksort_past_fb(State, SceneId) ->
?INFO("SceneId ", [SceneId]),
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_PAST_FB, SceneId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
%% 击杀怪物获取道具 任务信息
update_player_tasksort_collect_item(State, MonsterId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_COLLECT_ITEM),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
case lists:member(MonsterId, TaskConf#task_conf.monsterid_arr) of
true ->
%% 判断随机数
RandNum = random:uniform(100),
if
RandNum =< TaskConf#task_conf.tool ->
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
%% 存库
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end;
true ->
List
end;
_ ->
List
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1).
* * * * * * * * * * * * * * * * * * * * * * * *
%% 接取任务时检测
update_accpet__player_task(PlayerState, TaskId) ->
TaskConf = task_config:get(TaskId),
if
TaskConf#task_conf.sort_id =:= ?TASKSORT_DIALOG ->
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = 1,
isfinish = 1
};
TaskConf#task_conf.sort_id =:= ?TASKSORT_UP_LV ->
{NowNum, IsFinish} = is_ok_up_lv(PlayerState, TaskConf),
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = NowNum,
isfinish = IsFinish
};
TaskConf#task_conf.sort_id =:= ?TASKSORT_HAVE_ITEM ->
{NowNum, IsFinish} = is_ok_have_item(TaskConf),
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = NowNum,
isfinish = IsFinish
};
true ->
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = 0,
isfinish = 0
}
end.
%% 判断升级信息
is_ok_up_lv(PlayerState, TaskConf) ->
Base = PlayerState#player_state.db_player_base,
case Base#db_player_base.lv >= TaskConf#task_conf.need_num of
true ->
{Base#db_player_base.lv, 1};
_ ->
{Base#db_player_base.lv, 0}
end.
%% 判断物品获取
is_ok_have_item(TaskConf) ->
Num1 = goods_lib:get_goods_num(TaskConf#task_conf.tool),
case TaskConf#task_conf.need_num =< Num1 of
true ->
{Num1, 1};
_ ->
{Num1, 0}
end.
发送日常活跃任务信息纪录
send_active_task(PlayerState, OldTaskInfo, NewTaskInfo, TaskConf) ->
?INFO("sss ~p ~p", [OldTaskInfo, NewTaskInfo]),
case NewTaskInfo#db_player_task.isfinish =:= 1 of
true ->
Data =
case TaskConf#task_conf.need_num =:= 1 of
true ->
#proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 10
};
_ ->
#proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 2
}
end,
net_send:send_to_client(PlayerState#player_state.socket, 26008, #rep_record_task{record_task_info = Data}),
%% 刷新活跃任务领取红点
button_tips_lib:ref_button_tips(PlayerState, ?BTN_DAILY_TARGET);
_ ->
case OldTaskInfo#db_player_task.nownum =:= 0 of
true ->
Data = #proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 1
},
net_send:send_to_client(PlayerState#player_state.socket, 26008, #rep_record_task{record_task_info = Data});
_ ->
skip
end
end. | null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/business/task/task_comply.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
存库
存库
存库
存库
更新玩家获取道具信息
更新玩家获取道具信息
存库
更新玩家的怪物击杀任务信息
存库
更新任务信息
通关副本信息 纪录
存库
击杀怪物获取道具 任务信息
判断随机数
存库
接取任务时检测
判断升级信息
判断物品获取
刷新活跃任务领取红点 | @author yubing
( C ) 2016 , < COMPANY >
Created : 05 . 一月 2016 10:20
-module(task_comply).
-include("cache.hrl").
-include("config.hrl").
-include("proto.hrl").
-include("record.hrl").
-include("common.hrl").
-include("button_tips_config.hrl").
-export([update_player_task_info/3,
update_player_tasksort_kill_scene/3,
update_player_tasksort_up_lv/1,
update_player_tasksort_hava_goods/2,
update_player_tasksort_kill_monster/2,
update_accpet__player_task/2,
update_player_tasksort_past_fb/2,
update_player_tasksort_collect_item/2,
update_player_task_info_tool/4
]).
更新玩家的任务信息 标准版
update_player_task_info(PlayerPid, TaskSortId, Num) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_task_info, [TaskSortId, Num]});
更新玩家的任务信息 标准版
update_player_task_info(State, TaskSortId, Num) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(TaskSortId),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家的任务信息 标准版
update_player_task_info_tool(PlayerPid, TaskSortId, Tool, Num) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_task_info_tool, [TaskSortId, Tool, Num]});
更新玩家的任务信息 标准版
update_player_task_info_tool(State, TaskSortId, Tool, Num) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(TaskSortId, Tool),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家的场景击杀任务信息
update_player_tasksort_kill_scene(PlayerPid, Num, SceneId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_kill_scene, [Num, SceneId]});
更新玩家的场景击杀任务信息
update_player_tasksort_kill_scene(State, Num, SceneId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_KILL_SCENE, SceneId),
F = fun(X, List) ->
Num1 = X#db_player_task.nownum + Num,
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
更新玩家升级任务信息
update_player_tasksort_up_lv(PlayerPid) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_up_lv, []});
更新玩家升级任务信息
update_player_tasksort_up_lv(State) ->
Base = State#player_state.db_player_base,
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_UP_LV),
Num1 = Base#db_player_base.lv,
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
NewState1 = main_task_lib:ref_task_navigate_list(State),
main_task_lib:ref_task_navigate_list(NewState1, TaskList1),
开服活动刷新
update_player_tasksort_hava_goods(PlayerPid, GoodsId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_hava_goods, [GoodsId]});
update_player_tasksort_hava_goods(State, GoodsId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_HAVE_ITEM, GoodsId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = goods_lib:get_goods_num(GoodsId),
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
update_player_tasksort_kill_monster(PlayerPid, MonsterId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_kill_monster, [MonsterId]});
更新玩家升级任务信息
update_player_tasksort_kill_monster(State, MonsterId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_MONSTER, MonsterId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
update_player_tasksort_collect_item(State, MonsterId),
{ok, State}.
update_player_tasksort_past_fb(PlayerPid, SceneId) when is_pid(PlayerPid) ->
gen_server2:apply_async(PlayerPid, {?MODULE, update_player_tasksort_past_fb, [SceneId]});
更新玩家升级任务信息
update_player_tasksort_past_fb(State, SceneId) ->
?INFO("SceneId ", [SceneId]),
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_PAST_FB, SceneId),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1),
{ok, State}.
update_player_tasksort_collect_item(State, MonsterId) ->
PlayerId = State#player_state.player_id,
TaskList = player_task_dict:get_value_from_list_by_tasksortid(?TASKSORT_COLLECT_ITEM),
F = fun(X, List) ->
TaskConf = task_config:get(X#db_player_task.taskid_id),
case lists:member(MonsterId, TaskConf#task_conf.monsterid_arr) of
true ->
RandNum = random:uniform(100),
if
RandNum =< TaskConf#task_conf.tool ->
Num1 = X#db_player_task.nownum + 1,
X1 =
case TaskConf#task_conf.need_num =< Num1 of
true ->
X#db_player_task{
isfinish = 1,
nownum = TaskConf#task_conf.need_num
};
_ ->
X#db_player_task{
nownum = Num1
}
end,
player_task_cache:update(PlayerId, X1),
player_task_dict:update_task_list(X1),
if
TaskConf#task_conf.type_id =:= ?TASKTYPEID2 ->
send_active_task(State, X, X1, TaskConf),
List;
true ->
[X1 | List]
end;
true ->
List
end;
_ ->
List
end
end,
TaskList1 = lists:foldl(F, [], TaskList),
main_task_lib:ref_task_navigate_list(State, TaskList1).
* * * * * * * * * * * * * * * * * * * * * * * *
update_accpet__player_task(PlayerState, TaskId) ->
TaskConf = task_config:get(TaskId),
if
TaskConf#task_conf.sort_id =:= ?TASKSORT_DIALOG ->
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = 1,
isfinish = 1
};
TaskConf#task_conf.sort_id =:= ?TASKSORT_UP_LV ->
{NowNum, IsFinish} = is_ok_up_lv(PlayerState, TaskConf),
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = NowNum,
isfinish = IsFinish
};
TaskConf#task_conf.sort_id =:= ?TASKSORT_HAVE_ITEM ->
{NowNum, IsFinish} = is_ok_have_item(TaskConf),
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = NowNum,
isfinish = IsFinish
};
true ->
#db_player_task{
player_id = PlayerState#player_state.player_id,
taskid_id = TaskConf#task_conf.id,
nownum = 0,
isfinish = 0
}
end.
is_ok_up_lv(PlayerState, TaskConf) ->
Base = PlayerState#player_state.db_player_base,
case Base#db_player_base.lv >= TaskConf#task_conf.need_num of
true ->
{Base#db_player_base.lv, 1};
_ ->
{Base#db_player_base.lv, 0}
end.
is_ok_have_item(TaskConf) ->
Num1 = goods_lib:get_goods_num(TaskConf#task_conf.tool),
case TaskConf#task_conf.need_num =< Num1 of
true ->
{Num1, 1};
_ ->
{Num1, 0}
end.
发送日常活跃任务信息纪录
send_active_task(PlayerState, OldTaskInfo, NewTaskInfo, TaskConf) ->
?INFO("sss ~p ~p", [OldTaskInfo, NewTaskInfo]),
case NewTaskInfo#db_player_task.isfinish =:= 1 of
true ->
Data =
case TaskConf#task_conf.need_num =:= 1 of
true ->
#proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 10
};
_ ->
#proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 2
}
end,
net_send:send_to_client(PlayerState#player_state.socket, 26008, #rep_record_task{record_task_info = Data}),
button_tips_lib:ref_button_tips(PlayerState, ?BTN_DAILY_TARGET);
_ ->
case OldTaskInfo#db_player_task.nownum =:= 0 of
true ->
Data = #proto_navigate_task_info{
task_id = TaskConf#task_conf.id,
state = 1
},
net_send:send_to_client(PlayerState#player_state.socket, 26008, #rep_record_task{record_task_info = Data});
_ ->
skip
end
end. |
38f590ee4b389f769c50e0ea35ee05a0c3e6303d902776f822b5bed41af16490 | amnh/poy5 | randomTree.ml | A program to generate newick trees , not random , but balanced binary trees , of
* a certain length
* a certain length *)
let size = ref 8
let branch_length_variation = ref 0.
let branch_length = ref 0.1
let is_random = ref false
let () = Random.self_init ()
let asgn r verifier msg x =
if verifier x then r := x
else failwith msg
let generate_length () =
!branch_length +.
(if !branch_length_variation = 0. then 0.0
else
(Random.float (2.0 *. !branch_length_variation)) -.
!branch_length_variation)
let dbg = false
let split_array len arr =
match arr with
| [||] | [|_|] -> assert false
| [|a; b|] -> [|a|], [|b|]
| arr ->
let pos =
if !is_random then
1 + (Random.int (len - 2))
else len / 2
in
if dbg then Printf.printf "%d - %d\n%!" len pos;
Array.sub arr 0 pos, Array.sub arr pos (len - pos)
let output_ancestors = ref false
let ancestor =
let cnt = ref 0 in
fun () ->
incr cnt;
"A" ^ string_of_int !cnt
let rec generator is_root arr =
match arr with
| [||] -> assert false
| [|a|] ->
Printf.sprintf "%s:%f" a (generate_length ())
| _ ->
let len = Array.length arr
and len1 = generate_length () in
let f, s = split_array len arr in
if is_root then
if !output_ancestors then
Printf.sprintf "(%s, %s)%s" (generator false f)
(generator false s) (ancestor ())
else
Printf.sprintf "(%s, %s)" (generator false f)
(generator false s)
else
if !output_ancestors then
Printf.sprintf "(%s, %s)%s:%f" (generator false f)
(generator false s) (ancestor ()) len1
else
Printf.sprintf "(%s, %s):%f" (generator false f)
(generator false s) len1
let create_array size =
Array.init size (fun x -> "T" ^ string_of_int x)
let () =
let params = [
("-random", (Arg.Unit (fun () -> is_random := true)),
"Do not generate a balanced tree but a true random tree. By default the program produces a balanced tree.");
("-size",
(Arg.Int (asgn size (fun x -> x > 1) "-size must be greater than 1")),
"The number of terminals in the tree");
("-bl", (Arg.Float (asgn branch_length (fun x -> x >= 0.0)
"-bl must be greater than or equal to 0.0")),
"The length of the branches of the tree");
("-ancestors", (Arg.Unit (fun () -> output_ancestors := true)),
"Assign lables to the ancestral vertices");
("-blv", (Arg.Float (asgn branch_length_variation (fun x -> x >= 0.0)
"The -blv must be greater than or equal to 0.0")),
"The variation of the branch length") ]
in
let usage = "randomTree [OPTIONS]\nA program to generate random trees." in
Arg.parse params (fun _ -> ()) usage;
print_endline (generator true (create_array !size))
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/utils/randomTree.ml | ocaml | A program to generate newick trees , not random , but balanced binary trees , of
* a certain length
* a certain length *)
let size = ref 8
let branch_length_variation = ref 0.
let branch_length = ref 0.1
let is_random = ref false
let () = Random.self_init ()
let asgn r verifier msg x =
if verifier x then r := x
else failwith msg
let generate_length () =
!branch_length +.
(if !branch_length_variation = 0. then 0.0
else
(Random.float (2.0 *. !branch_length_variation)) -.
!branch_length_variation)
let dbg = false
let split_array len arr =
match arr with
| [||] | [|_|] -> assert false
| [|a; b|] -> [|a|], [|b|]
| arr ->
let pos =
if !is_random then
1 + (Random.int (len - 2))
else len / 2
in
if dbg then Printf.printf "%d - %d\n%!" len pos;
Array.sub arr 0 pos, Array.sub arr pos (len - pos)
let output_ancestors = ref false
let ancestor =
let cnt = ref 0 in
fun () ->
incr cnt;
"A" ^ string_of_int !cnt
let rec generator is_root arr =
match arr with
| [||] -> assert false
| [|a|] ->
Printf.sprintf "%s:%f" a (generate_length ())
| _ ->
let len = Array.length arr
and len1 = generate_length () in
let f, s = split_array len arr in
if is_root then
if !output_ancestors then
Printf.sprintf "(%s, %s)%s" (generator false f)
(generator false s) (ancestor ())
else
Printf.sprintf "(%s, %s)" (generator false f)
(generator false s)
else
if !output_ancestors then
Printf.sprintf "(%s, %s)%s:%f" (generator false f)
(generator false s) (ancestor ()) len1
else
Printf.sprintf "(%s, %s):%f" (generator false f)
(generator false s) len1
let create_array size =
Array.init size (fun x -> "T" ^ string_of_int x)
let () =
let params = [
("-random", (Arg.Unit (fun () -> is_random := true)),
"Do not generate a balanced tree but a true random tree. By default the program produces a balanced tree.");
("-size",
(Arg.Int (asgn size (fun x -> x > 1) "-size must be greater than 1")),
"The number of terminals in the tree");
("-bl", (Arg.Float (asgn branch_length (fun x -> x >= 0.0)
"-bl must be greater than or equal to 0.0")),
"The length of the branches of the tree");
("-ancestors", (Arg.Unit (fun () -> output_ancestors := true)),
"Assign lables to the ancestral vertices");
("-blv", (Arg.Float (asgn branch_length_variation (fun x -> x >= 0.0)
"The -blv must be greater than or equal to 0.0")),
"The variation of the branch length") ]
in
let usage = "randomTree [OPTIONS]\nA program to generate random trees." in
Arg.parse params (fun _ -> ()) usage;
print_endline (generator true (create_array !size))
| |
c7ab2b373b87cd6e478b9a720dfad9b7dcec9240e546e70e6f2193bf74e4d900 | guriguri/cauca | rest_test.clj | (ns cauca.component.rest-test
(:use [clojure test]
)
(:require [cauca.factory :as f]
[cauca.component.rest :as rest]
[cauca.log :as log]
[cauca.config :as config]
)
)
(defn request [method resource web-app & params]
(web-app {:request-method method :uri resource :query-params (first params)}))
(deftest test-routes
(log/configure-logback "/cauca-logback.xml")
(config/config-yaml "/cauca-context.yaml")
(is (= 200 (:status (request :get "/" rest/main-routes))))
(is (= 200 (:status (request :get "/api/courtauction/691438" rest/main-routes))))
(is (= 200 (:status (request :get "/api/courtauction" rest/main-routes {:page 1 :pageSize 10}))))) | null | https://raw.githubusercontent.com/guriguri/cauca/38ba3ee7200d2369a1a4f7ae58e286bd09dd16f3/test/clj/cauca/component/rest_test.clj | clojure | (ns cauca.component.rest-test
(:use [clojure test]
)
(:require [cauca.factory :as f]
[cauca.component.rest :as rest]
[cauca.log :as log]
[cauca.config :as config]
)
)
(defn request [method resource web-app & params]
(web-app {:request-method method :uri resource :query-params (first params)}))
(deftest test-routes
(log/configure-logback "/cauca-logback.xml")
(config/config-yaml "/cauca-context.yaml")
(is (= 200 (:status (request :get "/" rest/main-routes))))
(is (= 200 (:status (request :get "/api/courtauction/691438" rest/main-routes))))
(is (= 200 (:status (request :get "/api/courtauction" rest/main-routes {:page 1 :pageSize 10}))))) | |
cfce32465a498a60c8c41ac33b69e8bc87fcd2ee8af4a4be24f714128bdb3fdd | janestreet/base | test_int64.ml | open! Import
open! Int64
let%expect_test "hash coherence" =
check_int_hash_coherence [%here] (module Int64);
[%expect {| |}]
;;
let numbers =
[ 0x0000_0000_0000_1020L
; 0x0000_0000_0011_2233L
; 0x0000_0000_1122_3344L
; 0x0000_0011_2233_4455L
; 0x0000_1122_3344_5566L
; 0x0011_2233_4455_6677L
; 0x1122_3344_5566_7788L
]
;;
let test = test_conversion ~to_string:Int64.Hex.to_string_hum
let%expect_test "bswap16" =
List.iter numbers ~f:(test bswap16);
[%expect
{|
0x1020 --> 0x2010
0x11_2233 --> 0x3322
0x1122_3344 --> 0x4433
0x11_2233_4455 --> 0x5544
0x1122_3344_5566 --> 0x6655
0x11_2233_4455_6677 --> 0x7766
0x1122_3344_5566_7788 --> 0x8877 |}]
;;
let%expect_test "bswap32" =
List.iter numbers ~f:(test bswap32);
[%expect
{|
0x1020 --> 0x2010_0000
0x11_2233 --> 0x3322_1100
0x1122_3344 --> 0x4433_2211
0x11_2233_4455 --> 0x5544_3322
0x1122_3344_5566 --> 0x6655_4433
0x11_2233_4455_6677 --> 0x7766_5544
0x1122_3344_5566_7788 --> 0x8877_6655 |}]
;;
let%expect_test "bswap48" =
List.iter numbers ~f:(test bswap48);
[%expect
{|
0x1020 --> 0x2010_0000_0000
0x11_2233 --> 0x3322_1100_0000
0x1122_3344 --> 0x4433_2211_0000
0x11_2233_4455 --> 0x5544_3322_1100
0x1122_3344_5566 --> 0x6655_4433_2211
0x11_2233_4455_6677 --> 0x7766_5544_3322
0x1122_3344_5566_7788 --> 0x8877_6655_4433 |}]
;;
let%expect_test "bswap64" =
List.iter numbers ~f:(test bswap64);
[%expect
{|
0x1020 --> 0x2010_0000_0000_0000
0x11_2233 --> 0x3322_1100_0000_0000
0x1122_3344 --> 0x4433_2211_0000_0000
0x11_2233_4455 --> 0x5544_3322_1100_0000
0x1122_3344_5566 --> 0x6655_4433_2211_0000
0x11_2233_4455_6677 --> 0x7766_5544_3322_1100
0x1122_3344_5566_7788 --> -0x7788_99aa_bbcc_ddef |}]
;;
| null | https://raw.githubusercontent.com/janestreet/base/221b085f3fcd77597f8245b4d73de3970b238e71/test/test_int64.ml | ocaml | open! Import
open! Int64
let%expect_test "hash coherence" =
check_int_hash_coherence [%here] (module Int64);
[%expect {| |}]
;;
let numbers =
[ 0x0000_0000_0000_1020L
; 0x0000_0000_0011_2233L
; 0x0000_0000_1122_3344L
; 0x0000_0011_2233_4455L
; 0x0000_1122_3344_5566L
; 0x0011_2233_4455_6677L
; 0x1122_3344_5566_7788L
]
;;
let test = test_conversion ~to_string:Int64.Hex.to_string_hum
let%expect_test "bswap16" =
List.iter numbers ~f:(test bswap16);
[%expect
{|
0x1020 --> 0x2010
0x11_2233 --> 0x3322
0x1122_3344 --> 0x4433
0x11_2233_4455 --> 0x5544
0x1122_3344_5566 --> 0x6655
0x11_2233_4455_6677 --> 0x7766
0x1122_3344_5566_7788 --> 0x8877 |}]
;;
let%expect_test "bswap32" =
List.iter numbers ~f:(test bswap32);
[%expect
{|
0x1020 --> 0x2010_0000
0x11_2233 --> 0x3322_1100
0x1122_3344 --> 0x4433_2211
0x11_2233_4455 --> 0x5544_3322
0x1122_3344_5566 --> 0x6655_4433
0x11_2233_4455_6677 --> 0x7766_5544
0x1122_3344_5566_7788 --> 0x8877_6655 |}]
;;
let%expect_test "bswap48" =
List.iter numbers ~f:(test bswap48);
[%expect
{|
0x1020 --> 0x2010_0000_0000
0x11_2233 --> 0x3322_1100_0000
0x1122_3344 --> 0x4433_2211_0000
0x11_2233_4455 --> 0x5544_3322_1100
0x1122_3344_5566 --> 0x6655_4433_2211
0x11_2233_4455_6677 --> 0x7766_5544_3322
0x1122_3344_5566_7788 --> 0x8877_6655_4433 |}]
;;
let%expect_test "bswap64" =
List.iter numbers ~f:(test bswap64);
[%expect
{|
0x1020 --> 0x2010_0000_0000_0000
0x11_2233 --> 0x3322_1100_0000_0000
0x1122_3344 --> 0x4433_2211_0000_0000
0x11_2233_4455 --> 0x5544_3322_1100_0000
0x1122_3344_5566 --> 0x6655_4433_2211_0000
0x11_2233_4455_6677 --> 0x7766_5544_3322_1100
0x1122_3344_5566_7788 --> -0x7788_99aa_bbcc_ddef |}]
;;
| |
66c70eab2944a0c4e83b4228428611ba5c70af74d0e4b01e5b75b1f70fe9eb6c | sysbio-bioinf/avatar | mo_select.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
Eclipse Public License 2.0 ( -v20.html )
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns avatar.algorithms.mo-select
(:require
[avatar.algorithms.score :as score]
[avatar.algorithms.common :as c]
[avatar.algorithms.greedy-setcover :as gs]
[avatar.util :as u]
[clojure.data.int-map :as im]
[com.climate.claypoole :as cp]
[avatar.algorithms.progress-tracker :as progress]
[clojure.tools.logging :as log]
[clojure.pprint :as pp]
[clojure.set :as set]
[avatar.data.gene-data :as gd])
(:import
(org.uma.jmetal.solution BinarySolution Solution)
(org.uma.jmetal.operator.impl.crossover SinglePointCrossover)
(org.uma.jmetal.operator.impl.mutation BitFlipMutation)
(org.uma.jmetal.operator.impl.selection TournamentSelection)
(org.uma.jmetal.algorithm.multiobjective.nsgaii NSGAIIBuilder NSGAIIBuilder$NSGAIIVariant)
(org.uma.jmetal.algorithm.multiobjective.spea2 SPEA2Builder)
(org.uma.jmetal.util SolutionListUtils)
(org.uma.jmetal.util.evaluator SolutionListEvaluator)
(org.uma.jmetal.algorithm Algorithm)
(org.uma.jmetal.util.binarySet BinarySet)
(org.uma.jmetal.algorithm.multiobjective.gwasfga GWASFGA)
(org.uma.jmetal.util.comparator RankingAndCrowdingDistanceComparator)
(org.uma.jmetal.qualityindicator.impl.hypervolume PISAHypervolume)
(org.uma.jmetal.util.front.imp ArrayFront)
(org.uma.jmetal.problem BinaryProblem Problem)
(org.uma.jmetal.solution.impl DefaultBinarySolution)
(org.uma.jmetal.util.pseudorandom JMetalRandom)
(java.lang.reflect Array)
(clojure.lang IFn$OLL)
(java.util List)))
(defn inc-int-array-entry
^ints [^ints a, ^long pos]
(let [pos (unchecked-int pos),
v (Array/getInt a, pos)]
(Array/setInt a, pos, (unchecked-inc-int v))
a))
(defn dec-int-array-entry
^ints [^ints a, ^long pos]
(let [pos (unchecked-int pos),
v (Array/getInt a, pos)]
(Array/setInt a, pos, (unchecked-dec-int v))
a))
(defn add-sample-covers
[sample-cover-count-array, sample-set]
(reduce
(fn [^ints cover-count-array, ^long sample]
(inc-int-array-entry cover-count-array, sample))
sample-cover-count-array
sample-set))
(defn count-covering-genes
^ints [^long sample-array-size, ^BinarySet selected-genes, sample-set-vector]
(loop [pos (.nextSetBit selected-genes 0), sample-cover-count-array (make-array Integer/TYPE sample-array-size)]
(if (neg? pos)
; done iterating through all selected genes => return array
sample-cover-count-array
(let [sample-set (nth sample-set-vector pos)
cover-count-array (add-sample-covers sample-cover-count-array, sample-set)]
(recur (.nextSetBit selected-genes (unchecked-inc pos)), cover-count-array)))))
(defn redundant-gene?
[^ints sample-cover-count-array, sample-set]
(reduce
(fn [redundant?, sample]
(if (< (aget sample-cover-count-array sample) 2)
not redundant , at least one sample is covered less than twice ( short circuiting via reduced )
(reduced false)
redundant?))
true
sample-set))
(defn delete-sample-covers
[sample-cover-count-array, sample-set]
(reduce
(fn [^ints sample-cover-count-array, sample]
(dec-int-array-entry sample-cover-count-array, sample))
sample-cover-count-array
sample-set))
(defn remove-redundant-genes
[^long sample-array-size, forward?, sample-set-vector, ^BinarySet selected-genes]
"Remove redundant genes (i.e. genes that do not cover additional samples)
by checking from genes with most covered samples to genes with least covered samples when forward? = true
and vice versa for forward? = false."
(let [sample-cover-count-array (count-covering-genes sample-array-size, selected-genes, sample-set-vector)
^IFn$OLL next-set-bit (if forward?
(fn forward ^long [^BinarySet bitset, ^long current-index]
(.nextSetBit bitset (inc current-index)))
(fn backward ^long [^BinarySet bitset, ^long current-index]
(.previousSetBit bitset (dec current-index)))),
first-set-bit (if forward?
(.nextSetBit selected-genes 0)
(.previousSetBit selected-genes (dec (.size selected-genes))))]
genes are sorted by sample set size decreasingly ( redundant genes with larger sample sets are removed first on forward ? = true and last on forward ? = false )
(loop [pos first-set-bit, sample-cover-count-array sample-cover-count-array, selected-genes selected-genes]
(if (neg? pos)
; done iterating through all selected genes => return potentially altered selected-genes
selected-genes
; detect whether current gene is redundant
(let [sample-set (nth sample-set-vector pos)]
(if (redundant-gene? sample-cover-count-array, sample-set)
; remove gene
(recur
(.invokePrim next-set-bit selected-genes pos),
(delete-sample-covers sample-cover-count-array, sample-set),
; remove gene
(doto selected-genes
(.clear pos)))
; keep gene
(recur
(.invokePrim next-set-bit selected-genes pos),
sample-cover-count-array,
selected-genes)))))))
(definterface IEvaluation
(evaluate [solution]))
(defn median-sample-count
[sample-set-vector ^BinarySet selected-genes]
(let [vals (loop [pos (.nextSetBit selected-genes 0), vals (transient [])]
(if (neg? pos)
(persistent! vals)
(recur
(.nextSetBit selected-genes (unchecked-inc pos))
(conj! vals (count (nth sample-set-vector pos))))))
n (count vals)
vals (vec (sort vals))]
(if (even? n)
(/ (+ (nth vals (quot n 2)) (nth vals (dec (quot n 2)))) 2.0)
(nth vals (quot n 2)))))
(deftype EvaluationMeasures [^long covered-count, ^long overlap-count, ^long gene-count])
(defn evaluate-gene-selection
[sample-set-vector, ^BinarySet gene-set]
(loop [pos (.nextSetBit gene-set 0), covered (im/dense-int-set), overlap-count 0, gene-count 0]
(if (neg? pos)
; calculate total result
(let [covered-count (count covered)]
(EvaluationMeasures. covered-count, overlap-count, gene-count))
; get sample set for gene at position `pos`
(let [sample-set (nth sample-set-vector pos)]
(recur
; next set bit
(.nextSetBit gene-set (unchecked-inc pos)),
; update covered set
(im/union covered, sample-set),
; update overlap
(unchecked-add overlap-count (count sample-set))
; update gene count
(unchecked-inc gene-count))))))
(deftype Evaluation [^long sample-array-size, sample-set-vector, objectives-fn]
IEvaluation
(evaluate [_, solution]
(let [^BinarySet selected-genes (.getVariableValue ^BinarySolution solution 0),
remove-redundancy+evaluate (fn remove-redundancy+evaluate [forward?, ^BinarySet gene-set]
(let [gene-set (->> (.clone gene-set)
(remove-redundant-genes sample-array-size, forward?, sample-set-vector))]
{:gene-set gene-set,
:objectives (objectives-fn (evaluate-gene-selection sample-set-vector, gene-set))}))
{forward-set :gene-set, forward-objectives :objectives} (remove-redundancy+evaluate true, selected-genes),
{backward-set :gene-set, backward-objectives :objectives} (remove-redundancy+evaluate false, selected-genes)
[gene-set, objectives] (if (< (second forward-objectives) (second backward-objectives))
[forward-set, forward-objectives]
[backward-set, backward-objectives])]
(u/reduce-indexed
(fn [solution, index, objective-value]
(doto ^BinarySolution solution
(.setObjective index, objective-value)))
(doto ^BinarySolution solution
(.setVariableValue 0, gene-set))
objectives))))
(defn coverage-overlap-objective
[gene-list]
(let [sample-array-size (c/determine-max-sample-count gene-list),
max-coverage (double (score/max-coverage :alterations, gene-list)),
max-overlap (double (score/overlap max-coverage, (score/cover-count-sum :alterations, gene-list)))]
(Evaluation. sample-array-size, (mapv :alterations gene-list),
(fn [^EvaluationMeasures measures]
; jMetal tries to minimize all objectives
[; coverage
(- 1.0 (/ (.covered-count measures) max-coverage))
; overlap
(/ (unchecked-subtract (.overlap-count measures) (.covered-count measures)) max-overlap)]))))
(defn coverage-gene-count-objective
[gene-list]
(let [sample-array-size (c/determine-max-sample-count gene-list),
max-coverage (double (score/max-coverage :alterations, gene-list))
max-gene-count (double (count gene-list))]
(Evaluation. sample-array-size, (mapv :alterations gene-list),
(fn [^EvaluationMeasures measures]
; jMetal tries to minimize all objectives
[; coverage
(- 1.0 (/ (.covered-count measures) max-coverage))
; gene count
(/ (.gene-count measures) max-gene-count)]))))
(defn create-random-solution
[^BinaryProblem problem, ^double selection-probability, ^long n]
(let [solution (DefaultBinarySolution. problem)
^BinarySet variable (.getVariableValue solution, 0)
prng (JMetalRandom/getInstance)]
(loop [i 0]
(when (< i n)
(.set variable i (< (.nextDouble prng) selection-probability))
(recur (unchecked-inc i))))
solution))
(defn create-selection-problem
[{:keys [selection-probability, second-objective] :as parameter-map}, gene-list]
(let [n (count gene-list),
selection-probability (double (or selection-probability 0.5)),
evaluation-fn (case second-objective
:overlap (coverage-overlap-objective gene-list)
:gene-count (coverage-gene-count-objective gene-list))]
(reify BinaryProblem
(getNumberOfVariables [_]
1)
(getNumberOfObjectives [_]
2
; TEST ONLY:
#_3)
(getNumberOfConstraints [_]
0)
(getName [_]
"Gene Selection Problem")
(createSolution [this]
(create-random-solution this, selection-probability, n))
(getNumberOfBits [_, index]
(when-not (== index 0)
(u/illegal-argument "This problem has only a single variable! (Index = %s)" index))
n)
(getTotalNumberOfBits [_] n)
(evaluate [_, solution]
(.evaluate ^IEvaluation evaluation-fn solution)))))
(defn extract-selected-genes
[entity-list, ^BinarySolution solution]
(let [^BinarySet selected-bits (.getVariableValue solution 0)]
(loop [pos (.nextSetBit selected-bits 0), selection (transient [])]
(if (neg? pos)
(persistent! selection)
(recur
(.nextSetBit selected-bits (unchecked-inc pos)),
(conj! selection (nth entity-list pos)))))))
(defn partition-into-parts
[^long part-count, coll]
(let [v (vec coll),
n (count coll)
part-size (quot n part-count),
larger-parts (mod n part-count)]
(loop [part-index 0, start-offset 0, part-list (transient [])]
(if (< part-index part-count)
(let [part-size (cond-> part-size (< part-index larger-parts) unchecked-inc),
end-offset (+ start-offset part-size),
part (subvec v, start-offset, end-offset)]
(recur
(unchecked-inc part-index),
end-offset,
(conj! part-list part)))
(persistent! part-list)))))
(defn solution-list-evaluator
^SolutionListEvaluator [thread-count, progress-tracker]
(let [thread-count (or thread-count 1)
evaluate (fn [^Problem problem, ^Solution solution]
(.evaluate problem solution))
report-progress (if progress-tracker
(fn [^List solution-list]
(progress/update progress-tracker, (.size solution-list)))
(fn [_]
#_nothing_to_do))]
(if (> thread-count 1)
(let [pool (cp/threadpool thread-count)]
(reify SolutionListEvaluator
(evaluate [_, solution-list, problem]
(let [partitions (partition-into-parts thread-count, solution-list)]
(doall
(cp/pmap pool,
(fn [solution-list-partition]
(u/for-each-indexed!
(fn [_, solution]
(evaluate problem, solution))
solution-list-partition)),
partitions)))
(report-progress solution-list)
solution-list)
(shutdown [_] (cp/shutdown pool))))
; single threaded
(reify SolutionListEvaluator
(evaluate [_, solution-list, problem]
(u/for-each-indexed!
(fn [_, solution]
(evaluate problem, solution))
solution-list)
(report-progress solution-list)
solution-list)
(shutdown [_] #_nothing-to-do)))))
(defn create-algorithm
[problem, {:keys [algorithm, crossover-probability, mutation-probability, tournament-size, iteration-count, population-size, thread-count, seed, progress-tracker]}]
HUXCrossover causes strange convergence to single individual populations
(let [crossover (SinglePointCrossover. (or crossover-probability 0.9)),
mutation (BitFlipMutation. (or mutation-probability 0.2)),
proper NSGA - II uses BinaryTournament and RankingAndCrowdingDistanceComparator
selection (TournamentSelection. (RankingAndCrowdingDistanceComparator.), (or tournament-size 2)),
population-size (or population-size 100),
iteration-count (or iteration-count 100),
max-evaluations (* population-size iteration-count),
evaluator (solution-list-evaluator thread-count, progress-tracker)]
; if there is a given seed, initialize the random singleton
(when seed
(.setSeed (JMetalRandom/getInstance) (long seed)))
{:algorithm (case (or algorithm :NSGAII)
:NSGAII (-> (NSGAIIBuilder. problem, crossover, mutation)
(.setMaxEvaluations max-evaluations)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
(.setVariant NSGAIIBuilder$NSGAIIVariant/NSGAII)
.build)
:SteadyStateNSGAII (-> (NSGAIIBuilder. problem, crossover, mutation)
(.setMaxEvaluations max-evaluations)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
(.setVariant NSGAIIBuilder$NSGAIIVariant/SteadyStateNSGAII)
.build)
NSGA - III behaves differently to the other algorithms here . Does not seem to adhere to iteration - count and population - size ( jMetal 5.3 )
;:NSGAIII (-> (NSGAIIIBuilder. problem)
; (.setMaxIterations iteration-count)
; (.setPopulationSize population-size)
; (.setCrossoverOperator crossover)
; (.setMutationOperator mutation)
; (.setSelectionOperator selection)
( .setSolutionListEvaluator evaluator )
; .build)
:SPEA2 (-> (SPEA2Builder. problem, crossover, mutation)
(.setMaxIterations iteration-count)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
.build)
:GWASFGA (GWASFGA. problem, population-size, iteration-count, crossover, mutation, selection, evaluator))
:evaluator evaluator}))
(defn transform-pareto-front
[second-objective, gene-id->gene-map, sample-count, input-gene-list, pareto-front]
(let [sample-count (double sample-count)]
(vec
(sort-by
(juxt :coverage second-objective)
(remove
(fn [{:keys [gene-count]}]
(zero? gene-count))
(mapv
(fn [solution]
(let [input-selection (extract-selected-genes input-gene-list, solution),
result-selection (mapv (fn [{:keys [gene-id]}] (get gene-id->gene-map gene-id)) input-selection),
; in case the selection is only performed for a single sample group
; calculate the objectives only for this group (input-selection from input-gene-list)
coverage (score/total-coverage :alterations, input-selection),
overlap (score/total-overlap :alterations, input-selection)]
{:selection result-selection,
:gene-count (count result-selection),
:coverage coverage,
:overlap overlap,
; for less confusion, use relative coverage and mean overlap
( in contrast to previously objectives relative to maximal number of coverable samples )
:relative-coverage (/ coverage sample-count),
:relative-overlap (/ overlap sample-count)}))
pareto-front))))))
(defn hypervolume
[solution-list]
(-> (PISAHypervolume. (ArrayFront. 1, 2))
(.evaluate solution-list)))
(defn adjusted-hypervolume
"Renormalize solution objectives relative to a greedy solution."
[second-objective, gene-list, solution-list]
(let [greedy-solution (gs/greedy-setcover gene-list),
max-coverage (double (score/total-coverage :alterations, greedy-solution)),
second-objective-fn (case second-objective
:overlap (fn [selected-genes] (score/total-overlap :alterations, selected-genes)),
:gene-count (fn [selected-genes] (count selected-genes))),
max-second-objective (double (second-objective-fn greedy-solution)),
renormalized-solution-list (mapv
(fn [^BinarySolution solution]
(let [selected-genes (extract-selected-genes gene-list, solution)
coverage (score/total-coverage :alterations, selected-genes),
second-objective (second-objective-fn selected-genes)]
(doto (.copy solution)
(.setObjective 0, (- 1.0 (/ coverage max-coverage)))
(.setObjective 1, (/ second-objective max-second-objective)))))
(vec solution-list))]
(hypervolume renormalized-solution-list)))
(defn sort-by-coverage
[gene-list]
(->> gene-list
(map
(fn [index, gene]
(assoc gene
::index index
::coverage (-> gene :alterations count)))
(range))
(sort-by ::coverage >)
vec))
(defn multi-objective-selection
[{:keys [select-only-for-sample-group, minimum-alteration-ratio, alteration-type, second-objective, seed] :as parameter-map}, sample-group-map, gene-list]
(when seed
(.setSeed (JMetalRandom/getInstance) (long seed)))
(let [gene-id->gene-map (zipmap (mapv :gene-id gene-list) gene-list),
input-gene-list (->> gene-list
(c/select-alterations-in-gene-list alteration-type)
sort-by-coverage
(c/maybe-remove-sample-alterations-of-other-groups select-only-for-sample-group, sample-group-map)
(gd/remove-genes-with-less-alterations
(long
(Math/ceil
(* minimum-alteration-ratio (gd/sample-group-size sample-group-map, select-only-for-sample-group))))))]
(if (seq input-gene-list)
(let [sample-count (gd/sample-count-in-group select-only-for-sample-group, sample-group-map)
problem (create-selection-problem parameter-map, input-gene-list),
{:keys [algorithm, evaluator]} (create-algorithm problem, parameter-map),
start-time (System/currentTimeMillis)
_ (.run ^Algorithm algorithm),
stop-time (System/currentTimeMillis)
_ (.shutdown ^SolutionListEvaluator evaluator)
pareto-front (SolutionListUtils/getNondominatedSolutions (.getResult ^Algorithm algorithm))]
{:hypervolume (adjusted-hypervolume second-objective, input-gene-list, pareto-front)
:pareto-front (transform-pareto-front second-objective, gene-id->gene-map, sample-count, input-gene-list, pareto-front),
:runtime (- stop-time start-time),
:parameters (-> parameter-map
(set/rename-keys {:select-only-for-sample-group :sample-group})
(dissoc :show-progress? :progress-tracker :thread-count))})
{:hypervolume 0.0
:pareto-front [],
:runtime 0,
:parameters (-> parameter-map
(set/rename-keys {:select-only-for-sample-group :sample-group})
(dissoc :show-progress? :progress-tracker :thread-count))})))
(defn optimize-selection
[parameter-map, sample-group-map, gene-list]
(log/debugf "multi-objective gene selection parameters:\n%s" (with-out-str (pp/pprint parameter-map)))
(let [{:keys [hypervolume] :as result-map} (u/timing "multi-objective gene selection"
(multi-objective-selection parameter-map, sample-group-map, gene-list))]
(log/debugf "multi-objective gene selection: hypervolume = %s" hypervolume)
result-map))
(defn add-fixed-genes
[fixed-genes, pareto-front]
(mapv
(fn [solution]
(update-in solution [:selection] #(into fixed-genes %)))
pareto-front))
(defn add-plot-data
[gene-sample-data, pareto-front]
(mapv
(fn [{:keys [selection] :as solution-data}]
(assoc solution-data
:plot-data (assoc gene-sample-data :gene-list selection)))
pareto-front))
(defn multi-objective-gene-selection
[{:keys [gene-list, sample-group-map] :as gene-sample-data}, {:keys [exclude-fixed-rows?] :as setup-data}]
(let [result-map (optimize-selection
setup-data,
sample-group-map,
(cond-> gene-list exclude-fixed-rows? (->> (remove :order-fixed?) vec)))]
(update-in result-map [:pareto-front]
(fn [pareto-front]
(cond->> (distinct pareto-front)
exclude-fixed-rows? (add-fixed-genes (filterv :order-fixed? gene-list))
true (add-plot-data gene-sample-data)))))) | null | https://raw.githubusercontent.com/sysbio-bioinf/avatar/cbf9968485f96fb61725aaa7381dba53624d6189/src/clojure/avatar/algorithms/mo_select.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
done iterating through all selected genes => return array
done iterating through all selected genes => return potentially altered selected-genes
detect whether current gene is redundant
remove gene
remove gene
keep gene
calculate total result
get sample set for gene at position `pos`
next set bit
update covered set
update overlap
update gene count
jMetal tries to minimize all objectives
coverage
overlap
jMetal tries to minimize all objectives
coverage
gene count
TEST ONLY:
single threaded
if there is a given seed, initialize the random singleton
:NSGAIII (-> (NSGAIIIBuilder. problem)
(.setMaxIterations iteration-count)
(.setPopulationSize population-size)
(.setCrossoverOperator crossover)
(.setMutationOperator mutation)
(.setSelectionOperator selection)
.build)
in case the selection is only performed for a single sample group
calculate the objectives only for this group (input-selection from input-gene-list)
for less confusion, use relative coverage and mean overlap | Copyright ( c ) . All rights reserved .
Eclipse Public License 2.0 ( -v20.html )
(ns avatar.algorithms.mo-select
(:require
[avatar.algorithms.score :as score]
[avatar.algorithms.common :as c]
[avatar.algorithms.greedy-setcover :as gs]
[avatar.util :as u]
[clojure.data.int-map :as im]
[com.climate.claypoole :as cp]
[avatar.algorithms.progress-tracker :as progress]
[clojure.tools.logging :as log]
[clojure.pprint :as pp]
[clojure.set :as set]
[avatar.data.gene-data :as gd])
(:import
(org.uma.jmetal.solution BinarySolution Solution)
(org.uma.jmetal.operator.impl.crossover SinglePointCrossover)
(org.uma.jmetal.operator.impl.mutation BitFlipMutation)
(org.uma.jmetal.operator.impl.selection TournamentSelection)
(org.uma.jmetal.algorithm.multiobjective.nsgaii NSGAIIBuilder NSGAIIBuilder$NSGAIIVariant)
(org.uma.jmetal.algorithm.multiobjective.spea2 SPEA2Builder)
(org.uma.jmetal.util SolutionListUtils)
(org.uma.jmetal.util.evaluator SolutionListEvaluator)
(org.uma.jmetal.algorithm Algorithm)
(org.uma.jmetal.util.binarySet BinarySet)
(org.uma.jmetal.algorithm.multiobjective.gwasfga GWASFGA)
(org.uma.jmetal.util.comparator RankingAndCrowdingDistanceComparator)
(org.uma.jmetal.qualityindicator.impl.hypervolume PISAHypervolume)
(org.uma.jmetal.util.front.imp ArrayFront)
(org.uma.jmetal.problem BinaryProblem Problem)
(org.uma.jmetal.solution.impl DefaultBinarySolution)
(org.uma.jmetal.util.pseudorandom JMetalRandom)
(java.lang.reflect Array)
(clojure.lang IFn$OLL)
(java.util List)))
(defn inc-int-array-entry
^ints [^ints a, ^long pos]
(let [pos (unchecked-int pos),
v (Array/getInt a, pos)]
(Array/setInt a, pos, (unchecked-inc-int v))
a))
(defn dec-int-array-entry
^ints [^ints a, ^long pos]
(let [pos (unchecked-int pos),
v (Array/getInt a, pos)]
(Array/setInt a, pos, (unchecked-dec-int v))
a))
(defn add-sample-covers
[sample-cover-count-array, sample-set]
(reduce
(fn [^ints cover-count-array, ^long sample]
(inc-int-array-entry cover-count-array, sample))
sample-cover-count-array
sample-set))
(defn count-covering-genes
^ints [^long sample-array-size, ^BinarySet selected-genes, sample-set-vector]
(loop [pos (.nextSetBit selected-genes 0), sample-cover-count-array (make-array Integer/TYPE sample-array-size)]
(if (neg? pos)
sample-cover-count-array
(let [sample-set (nth sample-set-vector pos)
cover-count-array (add-sample-covers sample-cover-count-array, sample-set)]
(recur (.nextSetBit selected-genes (unchecked-inc pos)), cover-count-array)))))
(defn redundant-gene?
[^ints sample-cover-count-array, sample-set]
(reduce
(fn [redundant?, sample]
(if (< (aget sample-cover-count-array sample) 2)
not redundant , at least one sample is covered less than twice ( short circuiting via reduced )
(reduced false)
redundant?))
true
sample-set))
(defn delete-sample-covers
[sample-cover-count-array, sample-set]
(reduce
(fn [^ints sample-cover-count-array, sample]
(dec-int-array-entry sample-cover-count-array, sample))
sample-cover-count-array
sample-set))
(defn remove-redundant-genes
[^long sample-array-size, forward?, sample-set-vector, ^BinarySet selected-genes]
"Remove redundant genes (i.e. genes that do not cover additional samples)
by checking from genes with most covered samples to genes with least covered samples when forward? = true
and vice versa for forward? = false."
(let [sample-cover-count-array (count-covering-genes sample-array-size, selected-genes, sample-set-vector)
^IFn$OLL next-set-bit (if forward?
(fn forward ^long [^BinarySet bitset, ^long current-index]
(.nextSetBit bitset (inc current-index)))
(fn backward ^long [^BinarySet bitset, ^long current-index]
(.previousSetBit bitset (dec current-index)))),
first-set-bit (if forward?
(.nextSetBit selected-genes 0)
(.previousSetBit selected-genes (dec (.size selected-genes))))]
genes are sorted by sample set size decreasingly ( redundant genes with larger sample sets are removed first on forward ? = true and last on forward ? = false )
(loop [pos first-set-bit, sample-cover-count-array sample-cover-count-array, selected-genes selected-genes]
(if (neg? pos)
selected-genes
(let [sample-set (nth sample-set-vector pos)]
(if (redundant-gene? sample-cover-count-array, sample-set)
(recur
(.invokePrim next-set-bit selected-genes pos),
(delete-sample-covers sample-cover-count-array, sample-set),
(doto selected-genes
(.clear pos)))
(recur
(.invokePrim next-set-bit selected-genes pos),
sample-cover-count-array,
selected-genes)))))))
(definterface IEvaluation
(evaluate [solution]))
(defn median-sample-count
[sample-set-vector ^BinarySet selected-genes]
(let [vals (loop [pos (.nextSetBit selected-genes 0), vals (transient [])]
(if (neg? pos)
(persistent! vals)
(recur
(.nextSetBit selected-genes (unchecked-inc pos))
(conj! vals (count (nth sample-set-vector pos))))))
n (count vals)
vals (vec (sort vals))]
(if (even? n)
(/ (+ (nth vals (quot n 2)) (nth vals (dec (quot n 2)))) 2.0)
(nth vals (quot n 2)))))
(deftype EvaluationMeasures [^long covered-count, ^long overlap-count, ^long gene-count])
(defn evaluate-gene-selection
[sample-set-vector, ^BinarySet gene-set]
(loop [pos (.nextSetBit gene-set 0), covered (im/dense-int-set), overlap-count 0, gene-count 0]
(if (neg? pos)
(let [covered-count (count covered)]
(EvaluationMeasures. covered-count, overlap-count, gene-count))
(let [sample-set (nth sample-set-vector pos)]
(recur
(.nextSetBit gene-set (unchecked-inc pos)),
(im/union covered, sample-set),
(unchecked-add overlap-count (count sample-set))
(unchecked-inc gene-count))))))
(deftype Evaluation [^long sample-array-size, sample-set-vector, objectives-fn]
IEvaluation
(evaluate [_, solution]
(let [^BinarySet selected-genes (.getVariableValue ^BinarySolution solution 0),
remove-redundancy+evaluate (fn remove-redundancy+evaluate [forward?, ^BinarySet gene-set]
(let [gene-set (->> (.clone gene-set)
(remove-redundant-genes sample-array-size, forward?, sample-set-vector))]
{:gene-set gene-set,
:objectives (objectives-fn (evaluate-gene-selection sample-set-vector, gene-set))}))
{forward-set :gene-set, forward-objectives :objectives} (remove-redundancy+evaluate true, selected-genes),
{backward-set :gene-set, backward-objectives :objectives} (remove-redundancy+evaluate false, selected-genes)
[gene-set, objectives] (if (< (second forward-objectives) (second backward-objectives))
[forward-set, forward-objectives]
[backward-set, backward-objectives])]
(u/reduce-indexed
(fn [solution, index, objective-value]
(doto ^BinarySolution solution
(.setObjective index, objective-value)))
(doto ^BinarySolution solution
(.setVariableValue 0, gene-set))
objectives))))
(defn coverage-overlap-objective
[gene-list]
(let [sample-array-size (c/determine-max-sample-count gene-list),
max-coverage (double (score/max-coverage :alterations, gene-list)),
max-overlap (double (score/overlap max-coverage, (score/cover-count-sum :alterations, gene-list)))]
(Evaluation. sample-array-size, (mapv :alterations gene-list),
(fn [^EvaluationMeasures measures]
(- 1.0 (/ (.covered-count measures) max-coverage))
(/ (unchecked-subtract (.overlap-count measures) (.covered-count measures)) max-overlap)]))))
(defn coverage-gene-count-objective
[gene-list]
(let [sample-array-size (c/determine-max-sample-count gene-list),
max-coverage (double (score/max-coverage :alterations, gene-list))
max-gene-count (double (count gene-list))]
(Evaluation. sample-array-size, (mapv :alterations gene-list),
(fn [^EvaluationMeasures measures]
(- 1.0 (/ (.covered-count measures) max-coverage))
(/ (.gene-count measures) max-gene-count)]))))
(defn create-random-solution
[^BinaryProblem problem, ^double selection-probability, ^long n]
(let [solution (DefaultBinarySolution. problem)
^BinarySet variable (.getVariableValue solution, 0)
prng (JMetalRandom/getInstance)]
(loop [i 0]
(when (< i n)
(.set variable i (< (.nextDouble prng) selection-probability))
(recur (unchecked-inc i))))
solution))
(defn create-selection-problem
[{:keys [selection-probability, second-objective] :as parameter-map}, gene-list]
(let [n (count gene-list),
selection-probability (double (or selection-probability 0.5)),
evaluation-fn (case second-objective
:overlap (coverage-overlap-objective gene-list)
:gene-count (coverage-gene-count-objective gene-list))]
(reify BinaryProblem
(getNumberOfVariables [_]
1)
(getNumberOfObjectives [_]
2
#_3)
(getNumberOfConstraints [_]
0)
(getName [_]
"Gene Selection Problem")
(createSolution [this]
(create-random-solution this, selection-probability, n))
(getNumberOfBits [_, index]
(when-not (== index 0)
(u/illegal-argument "This problem has only a single variable! (Index = %s)" index))
n)
(getTotalNumberOfBits [_] n)
(evaluate [_, solution]
(.evaluate ^IEvaluation evaluation-fn solution)))))
(defn extract-selected-genes
[entity-list, ^BinarySolution solution]
(let [^BinarySet selected-bits (.getVariableValue solution 0)]
(loop [pos (.nextSetBit selected-bits 0), selection (transient [])]
(if (neg? pos)
(persistent! selection)
(recur
(.nextSetBit selected-bits (unchecked-inc pos)),
(conj! selection (nth entity-list pos)))))))
(defn partition-into-parts
[^long part-count, coll]
(let [v (vec coll),
n (count coll)
part-size (quot n part-count),
larger-parts (mod n part-count)]
(loop [part-index 0, start-offset 0, part-list (transient [])]
(if (< part-index part-count)
(let [part-size (cond-> part-size (< part-index larger-parts) unchecked-inc),
end-offset (+ start-offset part-size),
part (subvec v, start-offset, end-offset)]
(recur
(unchecked-inc part-index),
end-offset,
(conj! part-list part)))
(persistent! part-list)))))
(defn solution-list-evaluator
^SolutionListEvaluator [thread-count, progress-tracker]
(let [thread-count (or thread-count 1)
evaluate (fn [^Problem problem, ^Solution solution]
(.evaluate problem solution))
report-progress (if progress-tracker
(fn [^List solution-list]
(progress/update progress-tracker, (.size solution-list)))
(fn [_]
#_nothing_to_do))]
(if (> thread-count 1)
(let [pool (cp/threadpool thread-count)]
(reify SolutionListEvaluator
(evaluate [_, solution-list, problem]
(let [partitions (partition-into-parts thread-count, solution-list)]
(doall
(cp/pmap pool,
(fn [solution-list-partition]
(u/for-each-indexed!
(fn [_, solution]
(evaluate problem, solution))
solution-list-partition)),
partitions)))
(report-progress solution-list)
solution-list)
(shutdown [_] (cp/shutdown pool))))
(reify SolutionListEvaluator
(evaluate [_, solution-list, problem]
(u/for-each-indexed!
(fn [_, solution]
(evaluate problem, solution))
solution-list)
(report-progress solution-list)
solution-list)
(shutdown [_] #_nothing-to-do)))))
(defn create-algorithm
[problem, {:keys [algorithm, crossover-probability, mutation-probability, tournament-size, iteration-count, population-size, thread-count, seed, progress-tracker]}]
HUXCrossover causes strange convergence to single individual populations
(let [crossover (SinglePointCrossover. (or crossover-probability 0.9)),
mutation (BitFlipMutation. (or mutation-probability 0.2)),
proper NSGA - II uses BinaryTournament and RankingAndCrowdingDistanceComparator
selection (TournamentSelection. (RankingAndCrowdingDistanceComparator.), (or tournament-size 2)),
population-size (or population-size 100),
iteration-count (or iteration-count 100),
max-evaluations (* population-size iteration-count),
evaluator (solution-list-evaluator thread-count, progress-tracker)]
(when seed
(.setSeed (JMetalRandom/getInstance) (long seed)))
{:algorithm (case (or algorithm :NSGAII)
:NSGAII (-> (NSGAIIBuilder. problem, crossover, mutation)
(.setMaxEvaluations max-evaluations)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
(.setVariant NSGAIIBuilder$NSGAIIVariant/NSGAII)
.build)
:SteadyStateNSGAII (-> (NSGAIIBuilder. problem, crossover, mutation)
(.setMaxEvaluations max-evaluations)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
(.setVariant NSGAIIBuilder$NSGAIIVariant/SteadyStateNSGAII)
.build)
NSGA - III behaves differently to the other algorithms here . Does not seem to adhere to iteration - count and population - size ( jMetal 5.3 )
( .setSolutionListEvaluator evaluator )
:SPEA2 (-> (SPEA2Builder. problem, crossover, mutation)
(.setMaxIterations iteration-count)
(.setPopulationSize population-size)
(.setSelectionOperator selection)
(.setSolutionListEvaluator evaluator)
.build)
:GWASFGA (GWASFGA. problem, population-size, iteration-count, crossover, mutation, selection, evaluator))
:evaluator evaluator}))
(defn transform-pareto-front
[second-objective, gene-id->gene-map, sample-count, input-gene-list, pareto-front]
(let [sample-count (double sample-count)]
(vec
(sort-by
(juxt :coverage second-objective)
(remove
(fn [{:keys [gene-count]}]
(zero? gene-count))
(mapv
(fn [solution]
(let [input-selection (extract-selected-genes input-gene-list, solution),
result-selection (mapv (fn [{:keys [gene-id]}] (get gene-id->gene-map gene-id)) input-selection),
coverage (score/total-coverage :alterations, input-selection),
overlap (score/total-overlap :alterations, input-selection)]
{:selection result-selection,
:gene-count (count result-selection),
:coverage coverage,
:overlap overlap,
( in contrast to previously objectives relative to maximal number of coverable samples )
:relative-coverage (/ coverage sample-count),
:relative-overlap (/ overlap sample-count)}))
pareto-front))))))
(defn hypervolume
[solution-list]
(-> (PISAHypervolume. (ArrayFront. 1, 2))
(.evaluate solution-list)))
(defn adjusted-hypervolume
"Renormalize solution objectives relative to a greedy solution."
[second-objective, gene-list, solution-list]
(let [greedy-solution (gs/greedy-setcover gene-list),
max-coverage (double (score/total-coverage :alterations, greedy-solution)),
second-objective-fn (case second-objective
:overlap (fn [selected-genes] (score/total-overlap :alterations, selected-genes)),
:gene-count (fn [selected-genes] (count selected-genes))),
max-second-objective (double (second-objective-fn greedy-solution)),
renormalized-solution-list (mapv
(fn [^BinarySolution solution]
(let [selected-genes (extract-selected-genes gene-list, solution)
coverage (score/total-coverage :alterations, selected-genes),
second-objective (second-objective-fn selected-genes)]
(doto (.copy solution)
(.setObjective 0, (- 1.0 (/ coverage max-coverage)))
(.setObjective 1, (/ second-objective max-second-objective)))))
(vec solution-list))]
(hypervolume renormalized-solution-list)))
(defn sort-by-coverage
[gene-list]
(->> gene-list
(map
(fn [index, gene]
(assoc gene
::index index
::coverage (-> gene :alterations count)))
(range))
(sort-by ::coverage >)
vec))
(defn multi-objective-selection
[{:keys [select-only-for-sample-group, minimum-alteration-ratio, alteration-type, second-objective, seed] :as parameter-map}, sample-group-map, gene-list]
(when seed
(.setSeed (JMetalRandom/getInstance) (long seed)))
(let [gene-id->gene-map (zipmap (mapv :gene-id gene-list) gene-list),
input-gene-list (->> gene-list
(c/select-alterations-in-gene-list alteration-type)
sort-by-coverage
(c/maybe-remove-sample-alterations-of-other-groups select-only-for-sample-group, sample-group-map)
(gd/remove-genes-with-less-alterations
(long
(Math/ceil
(* minimum-alteration-ratio (gd/sample-group-size sample-group-map, select-only-for-sample-group))))))]
(if (seq input-gene-list)
(let [sample-count (gd/sample-count-in-group select-only-for-sample-group, sample-group-map)
problem (create-selection-problem parameter-map, input-gene-list),
{:keys [algorithm, evaluator]} (create-algorithm problem, parameter-map),
start-time (System/currentTimeMillis)
_ (.run ^Algorithm algorithm),
stop-time (System/currentTimeMillis)
_ (.shutdown ^SolutionListEvaluator evaluator)
pareto-front (SolutionListUtils/getNondominatedSolutions (.getResult ^Algorithm algorithm))]
{:hypervolume (adjusted-hypervolume second-objective, input-gene-list, pareto-front)
:pareto-front (transform-pareto-front second-objective, gene-id->gene-map, sample-count, input-gene-list, pareto-front),
:runtime (- stop-time start-time),
:parameters (-> parameter-map
(set/rename-keys {:select-only-for-sample-group :sample-group})
(dissoc :show-progress? :progress-tracker :thread-count))})
{:hypervolume 0.0
:pareto-front [],
:runtime 0,
:parameters (-> parameter-map
(set/rename-keys {:select-only-for-sample-group :sample-group})
(dissoc :show-progress? :progress-tracker :thread-count))})))
(defn optimize-selection
[parameter-map, sample-group-map, gene-list]
(log/debugf "multi-objective gene selection parameters:\n%s" (with-out-str (pp/pprint parameter-map)))
(let [{:keys [hypervolume] :as result-map} (u/timing "multi-objective gene selection"
(multi-objective-selection parameter-map, sample-group-map, gene-list))]
(log/debugf "multi-objective gene selection: hypervolume = %s" hypervolume)
result-map))
(defn add-fixed-genes
[fixed-genes, pareto-front]
(mapv
(fn [solution]
(update-in solution [:selection] #(into fixed-genes %)))
pareto-front))
(defn add-plot-data
[gene-sample-data, pareto-front]
(mapv
(fn [{:keys [selection] :as solution-data}]
(assoc solution-data
:plot-data (assoc gene-sample-data :gene-list selection)))
pareto-front))
(defn multi-objective-gene-selection
[{:keys [gene-list, sample-group-map] :as gene-sample-data}, {:keys [exclude-fixed-rows?] :as setup-data}]
(let [result-map (optimize-selection
setup-data,
sample-group-map,
(cond-> gene-list exclude-fixed-rows? (->> (remove :order-fixed?) vec)))]
(update-in result-map [:pareto-front]
(fn [pareto-front]
(cond->> (distinct pareto-front)
exclude-fixed-rows? (add-fixed-genes (filterv :order-fixed? gene-list))
true (add-plot-data gene-sample-data)))))) |
4f875c8e732bf62f063c7eeb0d81cc75e195c64045ced0a918cc36267b08d6ee | PEZ/rich4clojure | problem_117.clj | (ns rich4clojure.hard.problem-117
(:require [hyperfiddle.rcf :refer [tests]]))
;; = For Science! =
;; By 4Clojure user: amcnamara
;; Difficulty: Hard
;; Tags: [game]
;;
;; A mad scientist with tenure has created an experiment
;; tracking mice in a maze. Several mazes have been
;; randomly generated, and you've been tasked with writing
;; a program to determine the mazes in which it's possible
;; for the mouse to reach the cheesy endpoint. Write a
;; function which accepts a maze in the form of a
;; collection of rows, each row is a string where:
;; * spaces represent areas where the mouse can walk
;; freely
;; * hashes (#) represent walls where the mouse can not
;; walk
;; * M represents the mouse's starting point
;; * C represents the cheese which the mouse must reach
;; The mouse is not allowed to travel diagonally in the
;; maze (only up/down/left/right), nor can he escape the
;; edge of the maze. Your function must return true iff
;; the maze is solvable by the mouse.
(def __ :tests-will-fail)
(comment
)
(tests
true := (__ ["M C"])
false := (__ ["M # C"])
true := (__ ["#######"
"# #"
"# # #"
"#M # C#"
"#######"])
false := (__ ["########"
"#M # #"
"# # #"
"# # # #"
"# # #"
"# # #"
"# # # #"
"# # #"
"# # C#"
"########"])
false := (__ ["M "
" "
" "
" "
" ##"
" #C"])
true := (__ ["C######"
" # "
" # # "
" # #M"
" # "])
true := (__ ["C# # # #"
" "
"# # # # "
" "
" # # # #"
" "
"# # # #M"]))
;; Share your solution, and/or check how others did it:
;; | null | https://raw.githubusercontent.com/PEZ/rich4clojure/2ccfac041840e9b1550f0a69b9becbdb03f9525b/src/rich4clojure/hard/problem_117.clj | clojure | = For Science! =
By 4Clojure user: amcnamara
Difficulty: Hard
Tags: [game]
A mad scientist with tenure has created an experiment
tracking mice in a maze. Several mazes have been
randomly generated, and you've been tasked with writing
a program to determine the mazes in which it's possible
for the mouse to reach the cheesy endpoint. Write a
function which accepts a maze in the form of a
collection of rows, each row is a string where:
* spaces represent areas where the mouse can walk
freely
* hashes (#) represent walls where the mouse can not
walk
* M represents the mouse's starting point
* C represents the cheese which the mouse must reach
The mouse is not allowed to travel diagonally in the
maze (only up/down/left/right), nor can he escape the
edge of the maze. Your function must return true iff
the maze is solvable by the mouse.
Share your solution, and/or check how others did it:
| (ns rich4clojure.hard.problem-117
(:require [hyperfiddle.rcf :refer [tests]]))
(def __ :tests-will-fail)
(comment
)
(tests
true := (__ ["M C"])
false := (__ ["M # C"])
true := (__ ["#######"
"# #"
"# # #"
"#M # C#"
"#######"])
false := (__ ["########"
"#M # #"
"# # #"
"# # # #"
"# # #"
"# # #"
"# # # #"
"# # #"
"# # C#"
"########"])
false := (__ ["M "
" "
" "
" "
" ##"
" #C"])
true := (__ ["C######"
" # "
" # # "
" # #M"
" # "])
true := (__ ["C# # # #"
" "
"# # # # "
" "
" # # # #"
" "
"# # # #M"]))
|
3cbd9659376aae20a7f1b45171e8258eb2673f5ac783d716d940fd74ea5c100e | klutometis/clrs | section.scm | (require-extension
syntax-case
foof-loop)
(require '../2.1/section)
(require '../srfi/srfi-70)
(module
section-8.4
(bucket-sort)
(import* section-2.1
insertion-sort)
(import* srfi-70
exact-floor)
(include "../8.4/bucket-sort.scm"))
| null | https://raw.githubusercontent.com/klutometis/clrs/f85a8f0036f0946c9e64dde3259a19acc62b74a1/8.4/section.scm | scheme | (require-extension
syntax-case
foof-loop)
(require '../2.1/section)
(require '../srfi/srfi-70)
(module
section-8.4
(bucket-sort)
(import* section-2.1
insertion-sort)
(import* srfi-70
exact-floor)
(include "../8.4/bucket-sort.scm"))
| |
515eae6b1b5b167a505a645999dbc584b7d9e31f50440bcc3ada4d76b8b4d0ef | DSiSc/why3 | close_epsilon.mli | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
* The aim of this translation is to obtain terms where all epsilon
abstractions are closed
abstractions are closed *)
(** We do this by applying the following rewriting rule:
eps x.P(x) => eps F.(P(F@y_1@...@y_n)) where y_1...y_n are
the free variables in P and @ is the higher-order application symbol. *)
open Term
type lambda_match =
| Flam of vsymbol list * trigger * term
| Tlam of vsymbol list * trigger * term
| LNone
val destruct_lambda : term -> lambda_match
val is_lambda : term -> bool
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/transform/close_epsilon.mli | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
* We do this by applying the following rewriting rule:
eps x.P(x) => eps F.(P(F@y_1@...@y_n)) where y_1...y_n are
the free variables in P and @ is the higher-order application symbol. | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
* The aim of this translation is to obtain terms where all epsilon
abstractions are closed
abstractions are closed *)
open Term
type lambda_match =
| Flam of vsymbol list * trigger * term
| Tlam of vsymbol list * trigger * term
| LNone
val destruct_lambda : term -> lambda_match
val is_lambda : term -> bool
|
11aef9e7d68da7afb4c08f2cfc052cf9e107f39cbadb6f3323274de3e91d1523 | GlideAngle/flare-timing | Distance.hs | module Flight.Gap.Validity.Distance (DistanceValidity(..)) where
import GHC.Generics (Generic)
import Data.Typeable (Typeable, typeOf)
import "newtype" Control.Newtype (Newtype(..))
import Data.Via.Scientific
(DecimalPlaces(..), deriveDecimalPlaces, deriveJsonViaSci, deriveShowViaSci)
newtype DistanceValidity = DistanceValidity Rational
deriving (Eq, Ord, Typeable, Generic)
instance Newtype DistanceValidity Rational where
pack = DistanceValidity
unpack (DistanceValidity a) = a
deriveDecimalPlaces (DecimalPlaces 8) ''DistanceValidity
deriveJsonViaSci ''DistanceValidity
deriveShowViaSci ''DistanceValidity
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/27bd34c1943496987382091441a1c2516c169263/lang-haskell/gap-valid/library/Flight/Gap/Validity/Distance.hs | haskell | module Flight.Gap.Validity.Distance (DistanceValidity(..)) where
import GHC.Generics (Generic)
import Data.Typeable (Typeable, typeOf)
import "newtype" Control.Newtype (Newtype(..))
import Data.Via.Scientific
(DecimalPlaces(..), deriveDecimalPlaces, deriveJsonViaSci, deriveShowViaSci)
newtype DistanceValidity = DistanceValidity Rational
deriving (Eq, Ord, Typeable, Generic)
instance Newtype DistanceValidity Rational where
pack = DistanceValidity
unpack (DistanceValidity a) = a
deriveDecimalPlaces (DecimalPlaces 8) ''DistanceValidity
deriveJsonViaSci ''DistanceValidity
deriveShowViaSci ''DistanceValidity
| |
dbc1d955ac3c73734ebd03d7ee2d708d1cc7cae8178260c34ea759bb4f26781a | scicloj/tablecloth | columns_test.clj | (ns tablecloth.api.columns-test
(:require [tablecloth.api :as api]
[tech.v3.datatype :as dtype]
[midje.sweet :refer [tabular fact =>]]))
;;
(def dss (api/dataset {:idx [1 1 1 2 2 2 3 3 3]
:a ["a" "b" "c" "a" "b" "c" "a" "b" "c"]
"z" 1
:b [1 2 3 2 3 4 3 2 1]
:c [3 1 2 4 2 1 3 2 4]}))
(fact "reorder-columns"
(tabular (fact (-> dss
(api/reorder-columns ?order)
(api/column-names))
=>
?expected)
?order ?expected
[:ids :b :a :c] [:b :a :c :idx "z"]
[:idx :b :a "z" :c] [:idx :b :a "z" :c]
[:idx :b :a :C] [:idx :b :a "z" :c]
[:c :A :b :e :z :idx] [:c :b :idx :a "z"]
string? ["z" :idx :a :b :c]
#".*[az]$" [:a "z" :idx :b :c])
(fact [:b :a :c :idx "z"]
=>
(-> dss
(api/reorder-columns :b :a [:c :ids])
(api/column-names))))
(fact "add-or-replace"
(tabular (fact (-> {:x [1 2]}
(api/dataset)
(api/add-or-replace-column :y ?v)
:y
(dtype/get-datatype))
=>
?expected)
?expected ?v
:int64 1
:float64 1.0
:string "abc"))
(fact "add"
(tabular (fact (-> {:x [1 2]}
(api/dataset)
(api/add-column :y ?v)
:y
(dtype/get-datatype))
=>
?expected)
?expected ?v
:int64 1
:float64 1.0
:string "abc"))
| null | https://raw.githubusercontent.com/scicloj/tablecloth/e5f53bcfb5aab20a1807cecc3782cfba2b58476b/test/tablecloth/api/columns_test.clj | clojure | (ns tablecloth.api.columns-test
(:require [tablecloth.api :as api]
[tech.v3.datatype :as dtype]
[midje.sweet :refer [tabular fact =>]]))
(def dss (api/dataset {:idx [1 1 1 2 2 2 3 3 3]
:a ["a" "b" "c" "a" "b" "c" "a" "b" "c"]
"z" 1
:b [1 2 3 2 3 4 3 2 1]
:c [3 1 2 4 2 1 3 2 4]}))
(fact "reorder-columns"
(tabular (fact (-> dss
(api/reorder-columns ?order)
(api/column-names))
=>
?expected)
?order ?expected
[:ids :b :a :c] [:b :a :c :idx "z"]
[:idx :b :a "z" :c] [:idx :b :a "z" :c]
[:idx :b :a :C] [:idx :b :a "z" :c]
[:c :A :b :e :z :idx] [:c :b :idx :a "z"]
string? ["z" :idx :a :b :c]
#".*[az]$" [:a "z" :idx :b :c])
(fact [:b :a :c :idx "z"]
=>
(-> dss
(api/reorder-columns :b :a [:c :ids])
(api/column-names))))
(fact "add-or-replace"
(tabular (fact (-> {:x [1 2]}
(api/dataset)
(api/add-or-replace-column :y ?v)
:y
(dtype/get-datatype))
=>
?expected)
?expected ?v
:int64 1
:float64 1.0
:string "abc"))
(fact "add"
(tabular (fact (-> {:x [1 2]}
(api/dataset)
(api/add-column :y ?v)
:y
(dtype/get-datatype))
=>
?expected)
?expected ?v
:int64 1
:float64 1.0
:string "abc"))
| |
e9e49ab92c8e2bf527d145ccb55cfbe895661af163edebf38c30906990258d82 | amar47shah/cis-194 | AParser.hs | CIS 194 HW 10
due Monday , 1 April
due Monday, 1 April
-}
module AParser (Parser, runParser, satisfy, char, posInt) where
import Control.Applicative
import Data.Char
-- A parser for a value of type a is a function which takes a String
-- represnting the input to be parsed, and succeeds or fails; if it
-- succeeds, it returns the parsed value along with the remainder of
-- the input.
newtype Parser a = Parser { runParser :: String -> Maybe (a, String) }
For example , ' satisfy ' takes a predicate on , and constructs a
parser which succeeds only if it sees a that satisfies the
predicate ( which it then returns ) . If it encounters a that
-- does not satisfy the predicate (or an empty input), it fails.
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser f
where
f [] = Nothing -- fail on the empty input
f (x:xs) -- check if x satisfies the predicate
-- if so, return x along with the remainder
-- of the input (that is, xs)
| p x = Just (x, xs)
| otherwise = Nothing -- otherwise, fail
-- Using satisfy, we can define the parser 'char c' which expects to
-- see exactly the character c, and fails otherwise.
char :: Char -> Parser Char
char c = satisfy (== c)
For example :
* > runParser ( satisfy isUpper ) " ABC "
Just ( ' A',"BC " )
* > runParser ( satisfy isUpper ) " abc "
Nothing
* > runParser ( char ' x ' ) " xyz "
Just ( ' x',"yz " )
*Parser> runParser (satisfy isUpper) "ABC"
Just ('A',"BC")
*Parser> runParser (satisfy isUpper) "abc"
Nothing
*Parser> runParser (char 'x') "xyz"
Just ('x',"yz")
-}
-- For convenience, we've also provided a parser for positive
-- integers.
posInt :: Parser Integer
posInt = Parser f
where
f xs
| null ns = Nothing
| otherwise = Just (read ns, rest)
where (ns, rest) = span isDigit xs
------------------------------------------------------------
-- Your code goes below here
------------------------------------------------------------
--------------------------------------------------------------------------------
Exercise 1
first :: (a -> b) -> (a, c) -> (b, c)
first f (x, y) = (f x, y)
--------------------------------------------------------------------------------
Exercise 2
instance Functor Parser where
fmap f p = Parser $ \s -> runParser p s >>= Just . first f
instance Applicative Parser where
pure x = Parser $ Just . (,) x
p1 <*> p2 = Parser $
\s -> runParser p1 s >>= \(f, s') -> runParser p2 s' >>= Just . first f
--------------------------------------------------------------------------------
Exercise 3
abParser :: Parser (Char, Char)
abParser = (,) <$> char 'a' <*> char 'b'
abParser_ :: Parser ()
abParser_ = ignore abParser
intPair :: Parser [Integer]
intPair = (:) <$> posInt <*> (const (:[]) <$> char ' ' <*> posInt)
--------------------------------------------------------------------------------
Exercise 4
instance Alternative Parser where
empty = Parser $ const empty
p1 <|> p2 = Parser $ (<|>) <$> runParser p1 <*> runParser p2
--------------------------------------------------------------------------------
ignore :: Parser a -> Parser ()
ignore = (const () <$>)
upperChar :: Parser Char
upperChar = satisfy isUpper
Exercise 5
intOrUppercase :: Parser ()
intOrUppercase = ignore posInt <|> ignore upperChar
| null | https://raw.githubusercontent.com/amar47shah/cis-194/2b35ce18df176b47502950030af26e14d866b3e4/2013-byorgey/solutions/AParser.hs | haskell | A parser for a value of type a is a function which takes a String
represnting the input to be parsed, and succeeds or fails; if it
succeeds, it returns the parsed value along with the remainder of
the input.
does not satisfy the predicate (or an empty input), it fails.
fail on the empty input
check if x satisfies the predicate
if so, return x along with the remainder
of the input (that is, xs)
otherwise, fail
Using satisfy, we can define the parser 'char c' which expects to
see exactly the character c, and fails otherwise.
For convenience, we've also provided a parser for positive
integers.
----------------------------------------------------------
Your code goes below here
----------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | CIS 194 HW 10
due Monday , 1 April
due Monday, 1 April
-}
module AParser (Parser, runParser, satisfy, char, posInt) where
import Control.Applicative
import Data.Char
newtype Parser a = Parser { runParser :: String -> Maybe (a, String) }
For example , ' satisfy ' takes a predicate on , and constructs a
parser which succeeds only if it sees a that satisfies the
predicate ( which it then returns ) . If it encounters a that
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser f
where
| p x = Just (x, xs)
char :: Char -> Parser Char
char c = satisfy (== c)
For example :
* > runParser ( satisfy isUpper ) " ABC "
Just ( ' A',"BC " )
* > runParser ( satisfy isUpper ) " abc "
Nothing
* > runParser ( char ' x ' ) " xyz "
Just ( ' x',"yz " )
*Parser> runParser (satisfy isUpper) "ABC"
Just ('A',"BC")
*Parser> runParser (satisfy isUpper) "abc"
Nothing
*Parser> runParser (char 'x') "xyz"
Just ('x',"yz")
-}
posInt :: Parser Integer
posInt = Parser f
where
f xs
| null ns = Nothing
| otherwise = Just (read ns, rest)
where (ns, rest) = span isDigit xs
Exercise 1
first :: (a -> b) -> (a, c) -> (b, c)
first f (x, y) = (f x, y)
Exercise 2
instance Functor Parser where
fmap f p = Parser $ \s -> runParser p s >>= Just . first f
instance Applicative Parser where
pure x = Parser $ Just . (,) x
p1 <*> p2 = Parser $
\s -> runParser p1 s >>= \(f, s') -> runParser p2 s' >>= Just . first f
Exercise 3
abParser :: Parser (Char, Char)
abParser = (,) <$> char 'a' <*> char 'b'
abParser_ :: Parser ()
abParser_ = ignore abParser
intPair :: Parser [Integer]
intPair = (:) <$> posInt <*> (const (:[]) <$> char ' ' <*> posInt)
Exercise 4
instance Alternative Parser where
empty = Parser $ const empty
p1 <|> p2 = Parser $ (<|>) <$> runParser p1 <*> runParser p2
ignore :: Parser a -> Parser ()
ignore = (const () <$>)
upperChar :: Parser Char
upperChar = satisfy isUpper
Exercise 5
intOrUppercase :: Parser ()
intOrUppercase = ignore posInt <|> ignore upperChar
|
9211eba91b1967bf75c7bfe5bd93fbecd91d065e4d2033ff074c8adc8f1f5b25 | sarabander/p2pu-sicp | 2.17.scm |
(define (last-pair lst)
(let ((butfirst (cdr lst)))
(if (null? butfirst)
lst
(last-pair butfirst))))
(last-pair '(k l m n)) ; '(n)
(last-pair '()) ; error
| null | https://raw.githubusercontent.com/sarabander/p2pu-sicp/fbc49b67dac717da1487629fb2d7a7d86dfdbe32/2.2/2.17.scm | scheme | '(n)
error |
(define (last-pair lst)
(let ((butfirst (cdr lst)))
(if (null? butfirst)
lst
(last-pair butfirst))))
|
aae88468195a013c3bda74f53ef9165f4133b55f9770624279f5bb27f238056b | alanzplus/EOPL | subst.rkt | #lang eopl
(define subst
(lambda (new_sym old_sym slst)
(if (null? slst)
'()
(cons
(subst-in-s-exp new_sym old_sym (car slst))
(subst new_sym old_sym (cdr slst))))))
(define subst-in-s-exp
(lambda (new_sym old_sym sexp)
(if (symbol? sexp)
(if (eqv? sexp old_sym) new_sym sexp)
(subst new_sym old_sym sexp))))
(provide subst)
| null | https://raw.githubusercontent.com/alanzplus/EOPL/d7b06392d26d93df851d0ca66d9edc681a06693c/EOPL/ch1/subst.rkt | racket | #lang eopl
(define subst
(lambda (new_sym old_sym slst)
(if (null? slst)
'()
(cons
(subst-in-s-exp new_sym old_sym (car slst))
(subst new_sym old_sym (cdr slst))))))
(define subst-in-s-exp
(lambda (new_sym old_sym sexp)
(if (symbol? sexp)
(if (eqv? sexp old_sym) new_sym sexp)
(subst new_sym old_sym sexp))))
(provide subst)
| |
bd80ca41ad7bc127a63b4278b0371e0626cd4b4d061f5f63405b5e3a886a0d9f | lambdamikel/DLMAPS | hooks9.lisp | -*- Mode : Lisp ; Syntax : Ansi - Common - Lisp ; Package : THEMATIC - SUBSTRATE ; Base : 10 -*-
(in-package :THEMATIC-SUBSTRATE)
;;;
;;;
;;;
(defmethod register-bindings :before ((substrate substrate) (query query) (answer-pattern list)
(new-bindings list))
(with-critical-section
(let ((tuple (construct-result-tuple query)))
(when tuple
(with-slots (bindings-queue new-abox-assertions
abox-assertions-to-add
last-queue-item
add-rule-consequences-p
tuple-at-a-time-p) query
(when (is-rule-p query)
(push tuple new-abox-assertions)
(unless tuple-at-a-time-p
(push tuple abox-assertions-to-add)))
(if (not last-queue-item)
(progn
(setf bindings-queue (list tuple))
(setf last-queue-item (last bindings-queue)))
(progn
(setf (cdr last-queue-item)
(list tuple))
(setf last-queue-item
(cdr last-queue-item))))))))
(wait-for-request-or-abort query))
(defmethod register-bindings ((substrate substrate) (query query) (answer-pattern list) (new-bindings list))
'done)
;;;
;;;
;;;
(defmethod querying-started ((substrate substrate) (query query))
t)
(defmethod querying-started :before ((substrate substrate) (query query))
t)
;;;
;;;
;;;
(defmethod querying-ended ((substrate substrate) (query query)))
(defmethod querying-ended :before ((substrate substrate) (query query))
(with-critical-section
(unless (bottom-up-component-query-p query)
(if (is-rule-p query)
(progn
(setf *active-rules* (delete query *active-rules*))
(pushnew query *processed-rules*))
(progn
(setf *active-queries* (delete query *active-queries*))
(pushnew query *processed-queries*))))
etwas Platz schaffen fuer den Garbage Collector !
(with-slots (bottom-up-component-query-p
result-bindings-hash
process
env-setup-fn) query
(unless bottom-up-component-query-p
(setf result-bindings-hash nil)
(dolist (buq (bottom-up-evaluation-plan query))
(setf (slot-value buq 'result-bindings-hash) nil)))
(setf (slot-value (parser query) 'query-hash) nil)
(setf env-setup-fn nil
process nil))))
;;;
;;;
;;;
(defmethod querying-ended :before ((substrate substrate) (query nrql-query))
(setf (slot-value query 'phase-two-started-p) nil))
(defmethod querying-ended ((substrate racer-dummy-substrate) (query nrql-query))
(with-slots (added-premise-axioms substrate) query
wichtig ! das muss hier stattfinden ! in
last - tuple - delivered ! nicht aendern !
(with-critical-section
(when added-premise-axioms
(forget-statement (tbox substrate)
(abox substrate)
added-premise-axioms)
;;; notwendig!
(substrate-needs-reset substrate)))))
;;;
;;;
;;;
(defmethod last-tuple-has-been-delivered ((query query))
t)
(defmethod last-tuple-has-been-delivered ((query nrql-query))
(with-slots (abox-assertions-to-add added-premise-axioms substrate
tuple-at-a-time-p
add-rule-consequences-p) query
(when add-rule-consequences-p
(add-chosen-sets-of-rule-consequences query))
(setf added-premise-axioms nil)))
(defmethod add-chosen-sets-of-rule-consequences ((query nrql-query) &rest args)
(declare (ignore args))
(with-slots (abox-assertions-to-add substrate) query
(dolist (pat abox-assertions-to-add)
(add-abox-assertions substrate pat))
(prog1
abox-assertions-to-add
(setf abox-assertions-to-add nil))))
(defmethod choose-current-set-of-rule-consequences ((query nrql-query) &rest args)
(declare (ignore args))
(with-slots (current-bindings abox-assertions-to-add) query
(unless (member current-bindings
'(:exhausted :timeout
:denied-due-to-deadlock-prevention
:warning-kb-has-changed
:warning-expensive-phase-two-starts))
(push current-bindings abox-assertions-to-add))))
;;;
;;;
;;;
(defmethod add-abox-assertions ((substrate dl-prover-substrate) (assertions list))
(let ((added nil))
(dolist (assertion assertions)
(if (eq assertion :undefined)
(list :undefined)
(ecase (to-keyword (first assertion))
(:instance
(when (=> *dont-add-abox-duplicates-p*
(not (member (list (second assertion) (third assertion))
(dl-prover-all-concept-assertions-for-individual substrate (second assertion))
:test #'equal)))
(push assertion added)
(apply #'dl-prover-add-concept-assertion substrate (rest assertion))))
(:related
(when (=> *dont-add-abox-duplicates-p*
(not (member (list (list (second assertion)
(third assertion))
(fourth assertion))
(dl-prover-all-role-assertions-for-individual-in-domain substrate
(second assertion))
:test #'equal)))
(push assertion added)
(apply #'dl-prover-add-role-assertion substrate (rest assertion))))
(:constrained
(when (=> *dont-add-abox-duplicates-p*
(not (member (third assertion)
(dl-prover-retrieve-individual-attribute-fillers substrate
(second assertion)
(fourth assertion))
:test #'same-abox-individual-p)))
(push assertion added)
(apply #'dl-prover-add-attribute-assertion substrate (rest assertion))))
(:constraints
(dolist (constraint (rest assertion))
(when (=> *dont-add-abox-duplicates-p*
(not (member constraint (dl-prover-all-constraints substrate)
:test #'equal)))
(push assertion added)
(dl-prover-add-constraint-assertion substrate constraint))))
;;;
;;;
;;;
(:forget-concept-assertion
(dl-prover-forget-concept-assertion substrate
(abox substrate)
(second assertion) (third assertion)))
(:forget-role-assertion
(dl-prover-forget-role-assertion substrate
(abox substrate)
(second assertion) (third assertion) (fourth assertion)))
(:forget-constrained-assertion
(dl-prover-forget-constrained-assertion substrate
(abox substrate)
(second assertion) (third assertion) (fourth assertion)))
(:forget-constraint
(dl-prover-forget-constraint substrate
(abox substrate)
(second assertion))))))
added))
| null | https://raw.githubusercontent.com/lambdamikel/DLMAPS/7f8dbb9432069d41e6a7d9c13dc5b25602ad35dc/src/query/hooks9.lisp | lisp | Syntax : Ansi - Common - Lisp ; Package : THEMATIC - SUBSTRATE ; Base : 10 -*-
notwendig!
|
(in-package :THEMATIC-SUBSTRATE)
(defmethod register-bindings :before ((substrate substrate) (query query) (answer-pattern list)
(new-bindings list))
(with-critical-section
(let ((tuple (construct-result-tuple query)))
(when tuple
(with-slots (bindings-queue new-abox-assertions
abox-assertions-to-add
last-queue-item
add-rule-consequences-p
tuple-at-a-time-p) query
(when (is-rule-p query)
(push tuple new-abox-assertions)
(unless tuple-at-a-time-p
(push tuple abox-assertions-to-add)))
(if (not last-queue-item)
(progn
(setf bindings-queue (list tuple))
(setf last-queue-item (last bindings-queue)))
(progn
(setf (cdr last-queue-item)
(list tuple))
(setf last-queue-item
(cdr last-queue-item))))))))
(wait-for-request-or-abort query))
(defmethod register-bindings ((substrate substrate) (query query) (answer-pattern list) (new-bindings list))
'done)
(defmethod querying-started ((substrate substrate) (query query))
t)
(defmethod querying-started :before ((substrate substrate) (query query))
t)
(defmethod querying-ended ((substrate substrate) (query query)))
(defmethod querying-ended :before ((substrate substrate) (query query))
(with-critical-section
(unless (bottom-up-component-query-p query)
(if (is-rule-p query)
(progn
(setf *active-rules* (delete query *active-rules*))
(pushnew query *processed-rules*))
(progn
(setf *active-queries* (delete query *active-queries*))
(pushnew query *processed-queries*))))
etwas Platz schaffen fuer den Garbage Collector !
(with-slots (bottom-up-component-query-p
result-bindings-hash
process
env-setup-fn) query
(unless bottom-up-component-query-p
(setf result-bindings-hash nil)
(dolist (buq (bottom-up-evaluation-plan query))
(setf (slot-value buq 'result-bindings-hash) nil)))
(setf (slot-value (parser query) 'query-hash) nil)
(setf env-setup-fn nil
process nil))))
(defmethod querying-ended :before ((substrate substrate) (query nrql-query))
(setf (slot-value query 'phase-two-started-p) nil))
(defmethod querying-ended ((substrate racer-dummy-substrate) (query nrql-query))
(with-slots (added-premise-axioms substrate) query
wichtig ! das muss hier stattfinden ! in
last - tuple - delivered ! nicht aendern !
(with-critical-section
(when added-premise-axioms
(forget-statement (tbox substrate)
(abox substrate)
added-premise-axioms)
(substrate-needs-reset substrate)))))
(defmethod last-tuple-has-been-delivered ((query query))
t)
(defmethod last-tuple-has-been-delivered ((query nrql-query))
(with-slots (abox-assertions-to-add added-premise-axioms substrate
tuple-at-a-time-p
add-rule-consequences-p) query
(when add-rule-consequences-p
(add-chosen-sets-of-rule-consequences query))
(setf added-premise-axioms nil)))
(defmethod add-chosen-sets-of-rule-consequences ((query nrql-query) &rest args)
(declare (ignore args))
(with-slots (abox-assertions-to-add substrate) query
(dolist (pat abox-assertions-to-add)
(add-abox-assertions substrate pat))
(prog1
abox-assertions-to-add
(setf abox-assertions-to-add nil))))
(defmethod choose-current-set-of-rule-consequences ((query nrql-query) &rest args)
(declare (ignore args))
(with-slots (current-bindings abox-assertions-to-add) query
(unless (member current-bindings
'(:exhausted :timeout
:denied-due-to-deadlock-prevention
:warning-kb-has-changed
:warning-expensive-phase-two-starts))
(push current-bindings abox-assertions-to-add))))
(defmethod add-abox-assertions ((substrate dl-prover-substrate) (assertions list))
(let ((added nil))
(dolist (assertion assertions)
(if (eq assertion :undefined)
(list :undefined)
(ecase (to-keyword (first assertion))
(:instance
(when (=> *dont-add-abox-duplicates-p*
(not (member (list (second assertion) (third assertion))
(dl-prover-all-concept-assertions-for-individual substrate (second assertion))
:test #'equal)))
(push assertion added)
(apply #'dl-prover-add-concept-assertion substrate (rest assertion))))
(:related
(when (=> *dont-add-abox-duplicates-p*
(not (member (list (list (second assertion)
(third assertion))
(fourth assertion))
(dl-prover-all-role-assertions-for-individual-in-domain substrate
(second assertion))
:test #'equal)))
(push assertion added)
(apply #'dl-prover-add-role-assertion substrate (rest assertion))))
(:constrained
(when (=> *dont-add-abox-duplicates-p*
(not (member (third assertion)
(dl-prover-retrieve-individual-attribute-fillers substrate
(second assertion)
(fourth assertion))
:test #'same-abox-individual-p)))
(push assertion added)
(apply #'dl-prover-add-attribute-assertion substrate (rest assertion))))
(:constraints
(dolist (constraint (rest assertion))
(when (=> *dont-add-abox-duplicates-p*
(not (member constraint (dl-prover-all-constraints substrate)
:test #'equal)))
(push assertion added)
(dl-prover-add-constraint-assertion substrate constraint))))
(:forget-concept-assertion
(dl-prover-forget-concept-assertion substrate
(abox substrate)
(second assertion) (third assertion)))
(:forget-role-assertion
(dl-prover-forget-role-assertion substrate
(abox substrate)
(second assertion) (third assertion) (fourth assertion)))
(:forget-constrained-assertion
(dl-prover-forget-constrained-assertion substrate
(abox substrate)
(second assertion) (third assertion) (fourth assertion)))
(:forget-constraint
(dl-prover-forget-constraint substrate
(abox substrate)
(second assertion))))))
added))
|
7015ad21e894cf72606addc07598b4ea33dbb077d8201e69a44612446402fb53 | vbedegi/re-alm | core_test.clj | (ns re-alm.core-test
(:require [clojure.test :refer :all]
[re-alm.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/vbedegi/re-alm/73fdb86b2cb92bec16865be44b101361e7e84115/test/re_alm/core_test.clj | clojure | (ns re-alm.core-test
(:require [clojure.test :refer :all]
[re-alm.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
a85c4c84955592bf45ba74204e20b973a6516a24b4e03779e589c6415be884e8 | onetom/clj-figwheel-main-devcards | core_test.cljs | (ns app.core-test
(:require [app.core]))
| null | https://raw.githubusercontent.com/onetom/clj-figwheel-main-devcards/5ee1d8c5ee3d9399c03855deaa42adaaa1e40154/test/app/core_test.cljs | clojure | (ns app.core-test
(:require [app.core]))
| |
7b1b6dd9bd1a5f7933b93df946517a018d0a9234bff6e23090f62677b34a5a88 | simmone/racket-simple-xlsx | rels.rkt | #lang racket
(require simple-xml)
(require "../xlsx/xlsx.rkt")
(provide (contract-out
[rels (-> list?)]
[write-rels (->* () (path-string?) void?)]
))
(define (rels)
'("Relationships"
("xmlns" . "")
("Relationship"
("Id" . "rId3")
("Type" . "-properties")
("Target" . "docProps/app.xml"))
("Relationship"
("Id". "rId2")
("Type" . "-properties")
("Target" . "docProps/core.xml"))
("Relationship"
("Id" . "rId1")
("Type" . "")
("Target" . "xl/workbook.xml"))))
(define (write-rels [output_dir #f])
(let ([dir (if output_dir output_dir (build-path (XLSX-xlsx_dir (*XLSX*)) "_rels"))])
(make-directory* dir)
(with-output-to-file (build-path dir ".rels")
#:exists 'replace
(lambda ()
(printf "~a" (lists->xml (rels)))))))
| null | https://raw.githubusercontent.com/simmone/racket-simple-xlsx/e0ac3190b6700b0ee1dd80ed91a8f4318533d012/simple-xlsx/_rels/rels.rkt | racket | #lang racket
(require simple-xml)
(require "../xlsx/xlsx.rkt")
(provide (contract-out
[rels (-> list?)]
[write-rels (->* () (path-string?) void?)]
))
(define (rels)
'("Relationships"
("xmlns" . "")
("Relationship"
("Id" . "rId3")
("Type" . "-properties")
("Target" . "docProps/app.xml"))
("Relationship"
("Id". "rId2")
("Type" . "-properties")
("Target" . "docProps/core.xml"))
("Relationship"
("Id" . "rId1")
("Type" . "")
("Target" . "xl/workbook.xml"))))
(define (write-rels [output_dir #f])
(let ([dir (if output_dir output_dir (build-path (XLSX-xlsx_dir (*XLSX*)) "_rels"))])
(make-directory* dir)
(with-output-to-file (build-path dir ".rels")
#:exists 'replace
(lambda ()
(printf "~a" (lists->xml (rels)))))))
| |
907e92dbdd58c8d44caaa2ae9076218a023e3d120f1d9e17fe6ac0ccb08161f8 | cronokirby/haze | Bencoding.hs | |
Description : Functions related to the type
Contains the base data type representing
data , as well as encoders and decoders to marshall
this data from byte strings .
ByteString is used as the base target , since Bencoding
is n't guaranteed to be in utf-8 text , however the strings
contained inside the format are .
Description: Functions related to the Bencoding type
Contains the base data type representing Bencoding
data, as well as encoders and decoders to marshall
this data from byte strings.
ByteString is used as the base target, since Bencoding
isn't guaranteed to be in utf-8 text, however the strings
contained inside the format are.
-}
module Haze.Bencoding
( Bencoding(..)
, Encoder(..)
, encodeBen
, encode
, DecodeError(..)
, Decoder(..)
, decodeBen
, decode
)
where
import Relude
import qualified Data.ByteString.Char8 as BS
import qualified Data.Attoparsec.ByteString.Char8
as AP
import qualified Data.HashMap.Strict as HM
import Data.Int ( Int64 )
| Represents data .
strings can be arbitrary byte sequences , and are n't
always guaranteed to be valid text .
The show instance just shows the raw data structure ,
the encode the structure as a sequence of bytes ,
look to the other encoding functions instead .
Bencoded strings can be arbitrary byte sequences, and aren't
always guaranteed to be valid text.
The show instance just shows the raw data structure,
the encode the structure as a sequence of bytes,
look to the other encoding functions instead.
-}
data Bencoding
= BString !ByteString
| BInt !Int64
| BList ![Bencoding]
| BMap !(HM.HashMap ByteString Bencoding)
deriving (Eq, Show)
| Represents the encoding of some type into Bencoding
newtype Encoder a = Encoder
{ runEncoder :: a -> Bencoding
}
{- | Encode Bencoding as itself.
This is useful for combining with 'encode'.
-}
encodeBen :: Encoder Bencoding
encodeBen = Encoder id
-- | Encode a thing as a bytestring.
encode :: Encoder a -> a -> ByteString
encode encoder = encodeBS . runEncoder encoder
where
encodeBS :: Bencoding -> ByteString
encodeBS (BString t ) = show (BS.length t) <> ":" <> t
encodeBS (BInt i ) = "i" <> show i <> "e"
encodeBS (BList bs) = "l" <> foldMap encodeBS bs <> "e"
encodeBS (BMap mp) = "d" <> foldMap encodeKeyPair (toSorted mp) <> "e"
toSorted :: Ord k => HM.HashMap k v -> [(k, v)]
toSorted = sortWith fst . HM.toList
encodeKeyPair :: (ByteString, Bencoding) -> ByteString
encodeKeyPair (k, v) = encodeBS (BString k) <> encodeBS v
| Represents decoding errors for a bytestring
newtype DecodeError = DecodeError Text deriving (Eq, Show)
| Represents the decoding of some Bencoding structure into a type
newtype Decoder a = Decoder
{ runDecoder :: Bencoding -> a
}
{- | Decode Bencoding as itself.
This is useful for combining with 'decode'.
-}
decodeBen :: Decoder Bencoding
decodeBen = Decoder id
-- | Decode a bytestring into something
decode :: Decoder a -> ByteString -> Either DecodeError a
decode (Decoder d) = fmap d . makeDecoderError . AP.parseOnly parse
where
makeDecoderError = either (Left . DecodeError . toText) Right
parse :: AP.Parser Bencoding
parse = parseInt <|> (BString <$> parseString) <|> parseList <|> parseMap
parseInt :: AP.Parser Bencoding
parseInt = do
_ <- AP.char 'i'
int <- signedInt
_ <- AP.char 'e'
return (BInt int)
where
signedInt :: AP.Parser Int64
signedInt = (negate <$> (AP.char '-' *> AP.decimal)) <|> AP.decimal
parseString :: AP.Parser ByteString
parseString = do
len <- AP.decimal
_ <- AP.char ':'
AP.take len
parseList :: AP.Parser Bencoding
parseList = do
_ <- AP.char 'l'
xs <- AP.many' parse
_ <- AP.char 'e'
return (BList xs)
parseMap :: AP.Parser Bencoding
parseMap = do
_ <- AP.char 'd'
pairs <- AP.many' parsePair
_ <- AP.char 'e'
return . BMap . HM.fromList $ pairs
parsePair :: AP.Parser (ByteString, Bencoding)
parsePair = do
k <- parseString
v <- parse
return (k, v)
| null | https://raw.githubusercontent.com/cronokirby/haze/3cfbc9de8d923a541429f4a5cb1eb4151d5aea08/src/Haze/Bencoding.hs | haskell | | Encode Bencoding as itself.
This is useful for combining with 'encode'.
| Encode a thing as a bytestring.
| Decode Bencoding as itself.
This is useful for combining with 'decode'.
| Decode a bytestring into something | |
Description : Functions related to the type
Contains the base data type representing
data , as well as encoders and decoders to marshall
this data from byte strings .
ByteString is used as the base target , since Bencoding
is n't guaranteed to be in utf-8 text , however the strings
contained inside the format are .
Description: Functions related to the Bencoding type
Contains the base data type representing Bencoding
data, as well as encoders and decoders to marshall
this data from byte strings.
ByteString is used as the base target, since Bencoding
isn't guaranteed to be in utf-8 text, however the strings
contained inside the format are.
-}
module Haze.Bencoding
( Bencoding(..)
, Encoder(..)
, encodeBen
, encode
, DecodeError(..)
, Decoder(..)
, decodeBen
, decode
)
where
import Relude
import qualified Data.ByteString.Char8 as BS
import qualified Data.Attoparsec.ByteString.Char8
as AP
import qualified Data.HashMap.Strict as HM
import Data.Int ( Int64 )
| Represents data .
strings can be arbitrary byte sequences , and are n't
always guaranteed to be valid text .
The show instance just shows the raw data structure ,
the encode the structure as a sequence of bytes ,
look to the other encoding functions instead .
Bencoded strings can be arbitrary byte sequences, and aren't
always guaranteed to be valid text.
The show instance just shows the raw data structure,
the encode the structure as a sequence of bytes,
look to the other encoding functions instead.
-}
data Bencoding
= BString !ByteString
| BInt !Int64
| BList ![Bencoding]
| BMap !(HM.HashMap ByteString Bencoding)
deriving (Eq, Show)
| Represents the encoding of some type into Bencoding
newtype Encoder a = Encoder
{ runEncoder :: a -> Bencoding
}
encodeBen :: Encoder Bencoding
encodeBen = Encoder id
encode :: Encoder a -> a -> ByteString
encode encoder = encodeBS . runEncoder encoder
where
encodeBS :: Bencoding -> ByteString
encodeBS (BString t ) = show (BS.length t) <> ":" <> t
encodeBS (BInt i ) = "i" <> show i <> "e"
encodeBS (BList bs) = "l" <> foldMap encodeBS bs <> "e"
encodeBS (BMap mp) = "d" <> foldMap encodeKeyPair (toSorted mp) <> "e"
toSorted :: Ord k => HM.HashMap k v -> [(k, v)]
toSorted = sortWith fst . HM.toList
encodeKeyPair :: (ByteString, Bencoding) -> ByteString
encodeKeyPair (k, v) = encodeBS (BString k) <> encodeBS v
| Represents decoding errors for a bytestring
newtype DecodeError = DecodeError Text deriving (Eq, Show)
| Represents the decoding of some Bencoding structure into a type
newtype Decoder a = Decoder
{ runDecoder :: Bencoding -> a
}
decodeBen :: Decoder Bencoding
decodeBen = Decoder id
decode :: Decoder a -> ByteString -> Either DecodeError a
decode (Decoder d) = fmap d . makeDecoderError . AP.parseOnly parse
where
makeDecoderError = either (Left . DecodeError . toText) Right
parse :: AP.Parser Bencoding
parse = parseInt <|> (BString <$> parseString) <|> parseList <|> parseMap
parseInt :: AP.Parser Bencoding
parseInt = do
_ <- AP.char 'i'
int <- signedInt
_ <- AP.char 'e'
return (BInt int)
where
signedInt :: AP.Parser Int64
signedInt = (negate <$> (AP.char '-' *> AP.decimal)) <|> AP.decimal
parseString :: AP.Parser ByteString
parseString = do
len <- AP.decimal
_ <- AP.char ':'
AP.take len
parseList :: AP.Parser Bencoding
parseList = do
_ <- AP.char 'l'
xs <- AP.many' parse
_ <- AP.char 'e'
return (BList xs)
parseMap :: AP.Parser Bencoding
parseMap = do
_ <- AP.char 'd'
pairs <- AP.many' parsePair
_ <- AP.char 'e'
return . BMap . HM.fromList $ pairs
parsePair :: AP.Parser (ByteString, Bencoding)
parsePair = do
k <- parseString
v <- parse
return (k, v)
|
9b4289f3eab80246baa3144256514d70764e4c7d6600acd2a2e158eb4c64a8ca | samedhi/firemore | firebase_test.cljs | (ns firemore.firebase-test
(:require
[firemore.firebase :as sut]
[cljs.test :as t :include-macros true]))
(t/deftest fundamentals-test
(t/is (some? @sut/FB))
(t/is (some? (sut/db @sut/FB)))
(t/is (some? (sut/auth @sut/FB))))
| null | https://raw.githubusercontent.com/samedhi/firemore/8a1829efbe9cbed367f1d49d0e92e01c5aa20ad5/test/firemore/firebase_test.cljs | clojure | (ns firemore.firebase-test
(:require
[firemore.firebase :as sut]
[cljs.test :as t :include-macros true]))
(t/deftest fundamentals-test
(t/is (some? @sut/FB))
(t/is (some? (sut/db @sut/FB)))
(t/is (some? (sut/auth @sut/FB))))
| |
3875f496560a33527862d194b6b8bba46418250dff73c6290b24950d0027bddd | mihaimaruseac/io-manager | SimpleEchoExample.hs | |
Module : $ Header$
Description : Simple Example of using the io - manager library
Copyright : ( c ) : BSD3
Maintainer :
Stability : stable
Portability : portable
A simple test module for IOManager which will echo the contents of the
input files to the output files , paired as specified in standard input :
each line of stdin is made of two words : « input output » meaning that the
contents of « input » should be echoed to « output » . If input is « @stdin » then
the content to be echoed is from the standard input . If output is « @stdout »
then content should be written to standard ouput instead of a file . Same is
true for the case where « @stderr » is in the ouput part .
Assume that the stdin description is valid : no « @stdout»/«@stderr » on the
input part and no « @stdin » on the output part . Also , no file appears both
in the input and output part .
Module : $Header$
Description : Simple Example of using the io-manager library
Copyright : (c) Mihai Maruseac
License : BSD3
Maintainer :
Stability : stable
Portability : portable
A simple test module for IOManager which will echo the contents of the
input files to the output files, paired as specified in standard input:
each line of stdin is made of two words: «input output» meaning that the
contents of «input» should be echoed to «output». If input is «@stdin» then
the content to be echoed is from the standard input. If output is «@stdout»
then content should be written to standard ouput instead of a file. Same is
true for the case where «@stderr» is in the ouput part.
Assume that the stdin description is valid: no «@stdout»/«@stderr» on the
input part and no «@stdin» on the output part. Also, no file appears both
in the input and output part.
-}
module Main where
import Training.MM.IOManager
-- | The main function simply tells which is the function implemented by the
-- student.
main :: IO ()
main = wrapIO solve
-- | Solution of the problem. Add here your implementation.
solve :: Input -> Output -> Output
solve i = convert (map words . lines . getStdIn $ i) i
-- | Convert input representation to output representation.
convert :: [[String]] -> Input -> Output -> Output
convert [] _ o = o
convert ([fi, fo]:fs) i o = convert fs i $ link fi fo i o
convert _ _ _ = error "Invalid input"
| Link input and output and stream the contents between the two .
link :: String -> String -> Input -> Output -> Output
link "@stdin" fo i o = write fo (getStdIn i) o
link fi fo i o = write fo (getInputFile i fi) o
-- | Output the contents.
write :: String -> String -> Output -> Output
write "@stdout" text o = writeStdOut o text
write "@stderr" text o = writeStdErr o text
write fi text o = writeOutputFile o fi text
| null | https://raw.githubusercontent.com/mihaimaruseac/io-manager/590462cd87a22d0eff2b12824cd3c4a775f206ce/simple-echo-example/SimpleEchoExample.hs | haskell | | The main function simply tells which is the function implemented by the
student.
| Solution of the problem. Add here your implementation.
| Convert input representation to output representation.
| Output the contents. | |
Module : $ Header$
Description : Simple Example of using the io - manager library
Copyright : ( c ) : BSD3
Maintainer :
Stability : stable
Portability : portable
A simple test module for IOManager which will echo the contents of the
input files to the output files , paired as specified in standard input :
each line of stdin is made of two words : « input output » meaning that the
contents of « input » should be echoed to « output » . If input is « @stdin » then
the content to be echoed is from the standard input . If output is « @stdout »
then content should be written to standard ouput instead of a file . Same is
true for the case where « @stderr » is in the ouput part .
Assume that the stdin description is valid : no « @stdout»/«@stderr » on the
input part and no « @stdin » on the output part . Also , no file appears both
in the input and output part .
Module : $Header$
Description : Simple Example of using the io-manager library
Copyright : (c) Mihai Maruseac
License : BSD3
Maintainer :
Stability : stable
Portability : portable
A simple test module for IOManager which will echo the contents of the
input files to the output files, paired as specified in standard input:
each line of stdin is made of two words: «input output» meaning that the
contents of «input» should be echoed to «output». If input is «@stdin» then
the content to be echoed is from the standard input. If output is «@stdout»
then content should be written to standard ouput instead of a file. Same is
true for the case where «@stderr» is in the ouput part.
Assume that the stdin description is valid: no «@stdout»/«@stderr» on the
input part and no «@stdin» on the output part. Also, no file appears both
in the input and output part.
-}
module Main where
import Training.MM.IOManager
main :: IO ()
main = wrapIO solve
solve :: Input -> Output -> Output
solve i = convert (map words . lines . getStdIn $ i) i
convert :: [[String]] -> Input -> Output -> Output
convert [] _ o = o
convert ([fi, fo]:fs) i o = convert fs i $ link fi fo i o
convert _ _ _ = error "Invalid input"
| Link input and output and stream the contents between the two .
link :: String -> String -> Input -> Output -> Output
link "@stdin" fo i o = write fo (getStdIn i) o
link fi fo i o = write fo (getInputFile i fi) o
write :: String -> String -> Output -> Output
write "@stdout" text o = writeStdOut o text
write "@stderr" text o = writeStdErr o text
write fi text o = writeOutputFile o fi text
|
4951de74ca183fc77bcd8bc375a85861c38145e0975a51962a8ffe24ac699ecf | VisionsGlobalEmpowerment/webchange | running_time_limited.clj | (ns webchange.templates.library.running-time-limited
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]
[webchange.templates.utils.dialog :as dialog]))
(def available-times (mapv (fn [v] {:text (str v " sec") :value v}) (range 30 70 10)))
(def available-speed
[{:text "Fast" :value 11}
{:text "Medium" :value 5}
{:text "Slow" :value 1}])
(def available-scale
[{:text "1" :value 1}
{:text "0.8" :value 0.8}
{:text "0.5" :value 0.5}])
(def template-options
[{:type "note"
:text "Character will run into boxes filled with letters. They must run through the correct letter to count as a correct answer."}
{:type "group"
:label "Add a Correct Response"
:children [{:type "letter-lookup"
:key :correct-letter
:label "Correct Letter"
:placeholder "Choose"}]}
{:type "group"
:label "Add Incorrect Responses"
:children [{:type "letter-lookup"
:key :incorrect-letter-1
:label "Incorrect Letter"
:placeholder "Choose"}
{:type "letter-lookup"
:key :incorrect-letter-2
:label "Incorrect Letter"
:placeholder "Choose"}
{:type "letter-lookup"
:key :incorrect-letter-3
:label "Incorrect Letter"
:placeholder "Choose"}]}
{:type "group"
:label "Game Settings"
:children [{:type "lookup"
:key :time
:label "Game Duration"
:options available-times}
{:type "lookup"
:key :speed
:label "Character Speed"
:placeholder "Choose"
:options available-speed}
{:type "lookup"
:key :font-scale
:label "Font Scale"
:placeholder "Choose"
:options available-scale}]}])
(def m {:id 34
:name "Running (time limited)"
:tags ["Independent Practice"]
:description "Users move a character around a race track filled with answer images. Before time runs out, users must steer the character to as many correct answer options as possible while avoiding incorrect answer images."
:props {:game-changer? true}
:version 2
:options {:time {:label "Time in seconds"
:type "lookup"
:description "Time in seconds"
:options available-times}
:correct-letter {:label "Correct Letter"
:type "string"}
:incorrect-letter-1 {:label "Incorrect letter 1"
:type "string"}
:incorrect-letter-2 {:label "Incorrect letter 2"
:type "string"}
:incorrect-letter-3 {:label "Incorrect letter 3"
:type "string"}}
:actions {:change-time {:title "Change time"
:options {:time {:type "lookup"
:options available-times}}}
:change-speed {:title "Change speed"
:default-props "change-speed"
:options {:speed {:type "lookup"
:options (mapv (fn [v] {:name (str v) :value v}) (range 1 11))}}}
:template-options {:title "Template Options"
:options template-options}}})
(def concept-var "current-concept")
(def t {:assets [{:url "/raw/img/running-with-letters/bg_01.jpg" :type "image"}
{:url "/raw/img/running-with-letters/bg_02.png" :type "image"}
{:url "/raw/img/vera.png" :type "image"}
{:url "/raw/img/running-with-letters/box.png" :type "image"}]
:objects {:background {:type "carousel"
:x 0
:y 0
:width 1920
:height 1080
:speed 0
:first "/raw/img/running-with-letters/bg_02.png"
:last "/raw/img/running-with-letters/bg_02.png"
:next "/raw/img/running-with-letters/bg_02.png"}
:frame {:type "rectangle"
:x 676
:y 64
:fill 0xFFFFFF
:width 568
:height 152
:border-radius 24}
:timer {:type "timer"
:transition "timer"
:x 1126
:y 88
:show-minutes true
:show-progress true
:size 104
:time 60
:font-size 24
:thickness 12
:font-weight "normal"
:font-family "Roboto"
:progress-color 0xff9000
:color 0x010101
:filters [{:name "brightness" :value 0}]
:actions {:end {:on "end" :type "action" :id "finish-game"}}}
:target-group {:type "group"
:x 676
:y 64
:children ["letter-background" "letter-target"
; "box-target-background" "box-target"
"counter-background" "counter"]}
:letter-background {:type "rectangle"
:x 40
:y 24
:fill 0xFF9000
:width 104
:height 104
:border-radius 52
:filters [{:name "brightness" :value 0}]
:transition "letter-target-background"}
:letter-target {:type "text"
:x 56
:y 35
:width 72
:height 88
:transition "letter-target"
:align "center"
:fill 0xFFFFFF
:font-family "Lexend Deca"
:font-size 72
:text " "
:vertical-align "middle"}
:box-target-background {:type "rectangle"
:x 168
:y 24
:fill 0xECECEC
:width 104
:height 104
:border-radius 52}
:box-target {:type "image",
:x 184,
:y 40,
:transition "box-target"
:width 72,
:height 72,
:src ""}
:counter-background {:type "rectangle"
:x 232
:y 24
:fill 0xECECEC
:width 104
:height 104
:border-radius 52
:filters [{:name "brightness" :value 0}
{:name "glow" :outer-strength 0 :color 0xffd700}]
:transition "counter-background"}
:counter {:type "counter"
:transition "counter"
:x 284
:y 48,
:font-family "Roboto"
:font-size 48
:color 0x000000}
:line-1 {:type "transparent"
:x 0
:y 610
:width 1920
:height 150
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box1"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box1"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box1"}}}}
:line-2 {:type "transparent"
:x 0
:y 780
:width 1920
:height 170
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box2"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box2"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box2"}}}}
:line-3 {:type "transparent"
:x 0
:y 950
:width 1920
:height 180
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box3"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box3"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box3"}}}}
:mari {:type "animation"
:x 1365
:y 311
:width 473
:height 511
:scene-name "mari"
:transition "mari"
:anim "idle"
:anim-offset {:x 0 :y -150}
:name "mari"
:scale-x 0.5
:scale-y 0.5
:speed 0.5
:start true
:editable? {:select true :drag true :show-in-tree? true}}
:vera-group {:type "group"
:x 500
:y 865
:transition "vera-group"
:visible false
:children ["vera" "vera-collision-test"]}
:emit-group {:type "group"}
:vera {:type "animation"
:x 0
:y -55
:width 727
:height 1091
:scene-name "vera"
:transition "vera"
:anim "run"
:meshes true
:name "vera-90"
:scale-x 0.4
:scale-y 0.4
:skin "default"
:speed 1
:start true}
:vera-stopped {:type "image",
:x 300
:y 370
:width 727
:scale {:x 0.75 :y 0.75}
:height 1091
:src "/raw/img/vera.png"}
:vera-collision-test {:type "transparent"
:x 150
:y -55
:width 10
:height 10
:transition "vera-collision-test"
:collidable? true
:actions {:collide {:on "collide-enter"
:collision-type "bounds"
:test ["#^target-letter-.*"]
:type "action"
:id "check-box"
:pick-event-param ["custom-data" "transition-name"]}}}}
:scene-objects [["background"]
["frame"]
["emit-group"]
["vera-stopped" "vera-group" "mari"]
["target-group" "timer" "line-1" "line-2" "line-3"]]
:actions {:dialog-1-welcome {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "welcome"
:phrase-description "Welcome dialog"
:dialog-track "1 Welcome"
:skippable true}
:dialog-2-intro-concept {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "concept"
:phrase-description "Introduce concept"
:dialog-track "2 Introduce"
:tags ["instruction"]}
:dialog-3-intro-timer {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "timer"
:phrase-description "Introduce timer"
:dialog-track "2 Introduce"
:tags ["instruction"]}
:dialog-4-ready-go {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "ready-go"
:phrase-description "Ready-Go"
:dialog-track "3 Start"}
:highlight-target-letter {:type "transition"
:transition-id "letter-target-background"
:return-immediately true
:from {:brightness 0 :hue 0},
:to {:brightness 1 :yoyo true :duration 0.5 :repeat 5}
}
:highlight-counter {:type "transition"
:transition-id "counter-background"
:return-immediately true
:from {:brightness 0 :glow 0}
:to {:brightness 0.1 :glow 10 :yoyo true :duration 0.5 :repeat 5}
}
:highlight-timer {:type "transition"
:transition-id "timer"
:return-immediately true
:from {:brightness 0 :hue 0},
:to {:brightness 1 :yoyo true :duration 0.5 :repeat 5}
}
:dialog-5-starting-noise {:type "sequence-data"
:editor-type "dialog"
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "noise"
:phrase-description "Starting noise"
:dialog-track "3 Start"}
:dialog-6-correct {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "correct"
:phrase-description "Correct dialog"
:dialog-track "4 Options"}
:dialog-7-wrong {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "wrong"
:phrase-description "Wrong dialog"
:dialog-track "4 Options"}
:go-line-check {:type "test-value"
:fail "go-line"
:from-var [{:var-name "current-line" :action-property "value1"}]
:from-params [{:param-property "line" :action-property "value2"}]}
:go-line {:type "sequence-data"
:data [{:type "set-variable"
:var-name "current-line"
:from-params [{:param-property "line" :action-property "var-value"}]}
{:type "case"
:options {:box1 {:id "go-to-box1-line" :type "action"}
:box2 {:id "go-to-box2-line" :type "action"}
:box3 {:id "go-to-box3-line" :type "action"}}
:from-var [{:var-name "current-line" :action-property "value"}]}]}
:go-to-box1-line {:type "transition"
:to {:y 685 :duration 0.5}
:transition-id "vera-group"}
:go-to-box2-line {:type "transition"
:to {:y 865 :duration 0.5}
:transition-id "vera-group"}
:go-to-box3-line {:type "transition"
:to {:y 1040 :duration 0.5}
:transition-id "vera-group"}
:init-vars {:type "parallel"
:data [{:type "set-variable" :var-name "game-finished" :var-value false}
{:type "set-variable" :var-name "current-line" :var-value "box2"}]}
:init-concept {:type "parallel"
:data []}
:check-box {:type "test-var-scalar"
:var-name "game-finished"
:value false
:success {:type "test-value"
:from-var [{:action-property "value1" :var-name concept-var}]
:from-params [{:action-property "value2" :param-property "custom-data"}]
:success "pick-correct"
:fail "pick-wrong"}}
:pick-correct {:type "sequence-data"
:data [{:id "dialog-6-correct" :type "action" :return-immediately true}
{:type "set-attribute" :attr-name "visible" :attr-value false
:from-params [{:action-property "target" :param-property "transition-name"}]}
{:type "counter-inc" :target "counter"}
{:data [{:data [{:id "run_jump" :type "animation" :target "vera" :loop false}
{:id "run" :loop true :type "add-animation" :target "vera"}]
:return-immediately true
:type "sequence-data"}]
:type "parallel"}]}
:pick-wrong {:type "sequence-data"
:data [{:id "dialog-7-wrong" :type "action"}]}
:welcome {:type "sequence-data"
:data [{:type "action" :id "dialog-1-welcome"}]}
:intro {:type "sequence-data"
:data [{:type "action" :id "dialog-2-intro-concept"}
{:type "action" :id "dialog-3-intro-timer"}]}
:start {:type "sequence-data"
:data [{:type "action" :id "dialog-4-ready-go"}
{:type "action" :id "dialog-5-starting-noise"}]}
:emit-objects {:type "sequence-data"
:data [{:type "action" :id "shuffle-boxes"}
{:type "parallel"
:data [{:type "action" :id "emit-object-line-1"}
{:type "action" :id "emit-object-line-2"}
{:type "action" :id "emit-object-line-3"}]}
{:type "empty" :from-var [{:var-name "emit-duration" :action-property "duration"}]}
{:type "test-var-scalar"
:var-name "game-finished"
:value false
:success "emit-objects"
:fail "finish-activity"}]}
:shuffle-boxes {:type "sequence-data"
:data [{:from ["item-1" "item-2" "item-3" "item-4" "item-5" "item-6" "item-7" "item-8"]
:type "vars-var-provider"
:unique true
:from-var [{:var-key "concept-name"
:var-name concept-var
:action-property "exclude-property-values"}]
:shuffled true
:variables ["pair-concept-1" "pair-concept-2"]}
{:from [concept-var "pair-concept-1" "pair-concept-2"]
:type "vars-var-provider"
:shuffled true
:variables ["box1" "box2" "box3"]}]}
:emit-object-line-1 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2100
:y 685
:custom-data ""
:collidable? true
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box1" :action-property "data.target-letter.custom-data"}
{:var-name "box1" :action-property "data.target-letter-text.text"}]}}
:emit-object-line-2 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2200
:y 865
:custom-data ""
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box2" :action-property "data.target-letter.custom-data"}
{:var-name "box2" :action-property "data.target-letter-text.text"}]}}
:emit-object-line-3 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2300
:y 1040
:custom-data ""
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box3" :action-property "data.target-letter.custom-data"}
{:var-name "box3" :action-property "data.target-letter-text.text"}]}}
:move-emitted-letter {:type "transition"
:from-params [{:param-property "transition", :action-property "transition-id"}]
:from-var [{:var-name "move-letter-to" :action-property "to"}]}
:dialog-tap-instructions (-> (dialog/default "Tap instructions")
(assoc :concept-var concept-var))
:start-running {:type "sequence-data"
:data [{:type "set-attribute" :target "vera-stopped" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "vera-group" :attr-name "visible" :attr-value true}
{:type "set-attribute" :target "background" :attr-name "speed"
:from-var [{:var-name "background-speed" :action-property "attr-value"}]}
{:type "set-attribute" :target "vera" :attr-name "speed"
:from-var [{:var-name "animation-speed" :action-property "attr-value"}]}]}
:stop-running {:type "sequence-data"
:data [{:type "set-attribute" :target "emit-group" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "background" :attr-name "speed" :attr-value 0}
{:type "set-attribute" :target "vera-group" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "vera-stopped" :attr-name "visible" :attr-value true}]}
:start-scene {:type "sequence"
:data ["start-activity"
"init-concept"
"init-incorrect"
"welcome"
"intro"
"start"
"init-vars"
"start-running"
"start-timer"
"emit-objects"]}
:finish-game {:type "sequence-data"
:data [{:type "action" :id "stop-running"}
{:type "set-variable" :var-name "game-finished" :var-value true}]}
:start-timer {:type "timer-start" :target "timer"}
:stay-on-line {:type "empty" :duration 100}
:stop-scene {:type "sequence" :data ["stop-activity"]}
:start-activity {:type "start-activity"}
:stop-activity {:type "stop-activity"}
:finish-activity {:type "sequence-data"
:data [{:type "action" :id "finish-activity-dialog"}
{:type "finish-activity"}]}
:finish-activity-dialog (-> (dialog/default "Finish activity dialog")
(assoc :concept-var concept-var)
(assoc :available-activities ["highlight-target-letter", "highlight-timer", "highlight-counter"]))
:wait-for-box-animations {:type "empty" :duration 100}
:init-incorrect {:type "parallel"
:data []}}
:triggers {:stop {:on "back" :action "stop-scene"} :start {:on "start" :action "start-scene"}}
:metadata {:autostart true}})
(defn map-between-ranges [value min-a max-a min-b max-b]
(let [a-size (- max-a min-a)
b-size (- max-b min-b)
proportion (/ (- value min-a) a-size)]
(float (+ (* proportion b-size) min-b))))
(defn set-speed [data speed]
(let [s (map-between-ranges (if (string? speed)
(Integer/parseInt speed)
speed)
1 10 2 10)]
(-> data
(update-in [:actions :init-vars :data] concat
[{:type "set-variable" :var-name "background-speed" :var-value s}
{:type "set-variable" :var-name "animation-speed" :var-value (/ s 4)}
{:type "set-variable" :var-name "emit-duration" :var-value (/ 12000 s)}
{:type "set-variable" :var-name "move-letter-to" :var-value {:x -700 :duration (/ 40 s)}}])
(assoc-in [:metadata :saved-props :change-speed] {:speed speed}))))
(defn- init-correct
[t {:keys [correct-letter]}]
(assoc-in t [:actions :init-concept :data]
[{:type "set-variable"
:var-name concept-var
:var-value correct-letter}
{:type "set-attribute"
:target "letter-target"
:attr-value correct-letter
:attr-name "text"}]))
(defn- init-incorrect
[t {:keys [incorrect-letter-1 incorrect-letter-2 incorrect-letter-3]}]
(let [actions (->> [incorrect-letter-1
incorrect-letter-2
incorrect-letter-3]
(remove empty?)
(repeat 4)
(apply concat)
(take 8)
(map-indexed (fn [idx letter]
{:type "set-variable" :var-name (str "item-" (inc idx)) :var-value letter})))]
(assoc-in t [:actions :init-incorrect :data] actions)))
(defn- init-font-size
[t {:keys [font-scale] :or {font-scale 1}}]
(let [scale (if (string? font-scale)
(Float/parseFloat font-scale)
font-scale)]
(-> t
(assoc-in [:objects :letter-target :font-size] (* scale 72))
(assoc-in [:actions :emit-object-line-1 :success :data :target-letter-text :font-size] (* scale 120))
(assoc-in [:actions :emit-object-line-2 :success :data :target-letter-text :font-size] (* scale 120))
(assoc-in [:actions :emit-object-line-3 :success :data :target-letter-text :font-size] (* scale 120)))))
(defn f
[args]
(-> (common/init-metadata m t args)
(init-correct args)
(init-incorrect args)
(init-font-size args)
(assoc-in [:objects :timer :time] (:time args))
(set-speed 5)
(assoc-in [:metadata :saved-props :template-options] (assoc args :speed 5))
(common/add-available-action "highlight-target-letter" "Highlight letter")
(common/add-available-action "highlight-timer" "Highlight timer")
(common/add-available-action "highlight-counter" "Highlight counter")))
(defn change-speed [data speed]
(-> data
(update-in [:actions :init-vars :data] #(vec (drop-last 4 %)))
(set-speed speed)
(assoc-in [:metadata :saved-props :template-options :speed] speed)))
(defn- template-options
[activity-data args]
(-> activity-data
(init-correct args)
(init-incorrect args)
(init-font-size args)
(assoc-in [:objects :timer :time] (:time args))
(set-speed (:speed args))
(assoc-in [:metadata :saved-props :template-options] args)))
(defn fu
[old-data {:keys [action-name] :as args}]
(case (keyword action-name)
:change-time (assoc-in old-data [:objects :timer :time] (:time args))
:change-speed (change-speed old-data (:speed args))
:template-options (template-options old-data args)))
(core/register-template
m f fu)
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/4be2784b170befe245d60e32271b88e6fae31c13/src/clj/webchange/templates/library/running_time_limited.clj | clojure | "box-target-background" "box-target" | (ns webchange.templates.library.running-time-limited
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]
[webchange.templates.utils.dialog :as dialog]))
(def available-times (mapv (fn [v] {:text (str v " sec") :value v}) (range 30 70 10)))
(def available-speed
[{:text "Fast" :value 11}
{:text "Medium" :value 5}
{:text "Slow" :value 1}])
(def available-scale
[{:text "1" :value 1}
{:text "0.8" :value 0.8}
{:text "0.5" :value 0.5}])
(def template-options
[{:type "note"
:text "Character will run into boxes filled with letters. They must run through the correct letter to count as a correct answer."}
{:type "group"
:label "Add a Correct Response"
:children [{:type "letter-lookup"
:key :correct-letter
:label "Correct Letter"
:placeholder "Choose"}]}
{:type "group"
:label "Add Incorrect Responses"
:children [{:type "letter-lookup"
:key :incorrect-letter-1
:label "Incorrect Letter"
:placeholder "Choose"}
{:type "letter-lookup"
:key :incorrect-letter-2
:label "Incorrect Letter"
:placeholder "Choose"}
{:type "letter-lookup"
:key :incorrect-letter-3
:label "Incorrect Letter"
:placeholder "Choose"}]}
{:type "group"
:label "Game Settings"
:children [{:type "lookup"
:key :time
:label "Game Duration"
:options available-times}
{:type "lookup"
:key :speed
:label "Character Speed"
:placeholder "Choose"
:options available-speed}
{:type "lookup"
:key :font-scale
:label "Font Scale"
:placeholder "Choose"
:options available-scale}]}])
(def m {:id 34
:name "Running (time limited)"
:tags ["Independent Practice"]
:description "Users move a character around a race track filled with answer images. Before time runs out, users must steer the character to as many correct answer options as possible while avoiding incorrect answer images."
:props {:game-changer? true}
:version 2
:options {:time {:label "Time in seconds"
:type "lookup"
:description "Time in seconds"
:options available-times}
:correct-letter {:label "Correct Letter"
:type "string"}
:incorrect-letter-1 {:label "Incorrect letter 1"
:type "string"}
:incorrect-letter-2 {:label "Incorrect letter 2"
:type "string"}
:incorrect-letter-3 {:label "Incorrect letter 3"
:type "string"}}
:actions {:change-time {:title "Change time"
:options {:time {:type "lookup"
:options available-times}}}
:change-speed {:title "Change speed"
:default-props "change-speed"
:options {:speed {:type "lookup"
:options (mapv (fn [v] {:name (str v) :value v}) (range 1 11))}}}
:template-options {:title "Template Options"
:options template-options}}})
(def concept-var "current-concept")
(def t {:assets [{:url "/raw/img/running-with-letters/bg_01.jpg" :type "image"}
{:url "/raw/img/running-with-letters/bg_02.png" :type "image"}
{:url "/raw/img/vera.png" :type "image"}
{:url "/raw/img/running-with-letters/box.png" :type "image"}]
:objects {:background {:type "carousel"
:x 0
:y 0
:width 1920
:height 1080
:speed 0
:first "/raw/img/running-with-letters/bg_02.png"
:last "/raw/img/running-with-letters/bg_02.png"
:next "/raw/img/running-with-letters/bg_02.png"}
:frame {:type "rectangle"
:x 676
:y 64
:fill 0xFFFFFF
:width 568
:height 152
:border-radius 24}
:timer {:type "timer"
:transition "timer"
:x 1126
:y 88
:show-minutes true
:show-progress true
:size 104
:time 60
:font-size 24
:thickness 12
:font-weight "normal"
:font-family "Roboto"
:progress-color 0xff9000
:color 0x010101
:filters [{:name "brightness" :value 0}]
:actions {:end {:on "end" :type "action" :id "finish-game"}}}
:target-group {:type "group"
:x 676
:y 64
:children ["letter-background" "letter-target"
"counter-background" "counter"]}
:letter-background {:type "rectangle"
:x 40
:y 24
:fill 0xFF9000
:width 104
:height 104
:border-radius 52
:filters [{:name "brightness" :value 0}]
:transition "letter-target-background"}
:letter-target {:type "text"
:x 56
:y 35
:width 72
:height 88
:transition "letter-target"
:align "center"
:fill 0xFFFFFF
:font-family "Lexend Deca"
:font-size 72
:text " "
:vertical-align "middle"}
:box-target-background {:type "rectangle"
:x 168
:y 24
:fill 0xECECEC
:width 104
:height 104
:border-radius 52}
:box-target {:type "image",
:x 184,
:y 40,
:transition "box-target"
:width 72,
:height 72,
:src ""}
:counter-background {:type "rectangle"
:x 232
:y 24
:fill 0xECECEC
:width 104
:height 104
:border-radius 52
:filters [{:name "brightness" :value 0}
{:name "glow" :outer-strength 0 :color 0xffd700}]
:transition "counter-background"}
:counter {:type "counter"
:transition "counter"
:x 284
:y 48,
:font-family "Roboto"
:font-size 48
:color 0x000000}
:line-1 {:type "transparent"
:x 0
:y 610
:width 1920
:height 150
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box1"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box1"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box1"}}}}
:line-2 {:type "transparent"
:x 0
:y 780
:width 1920
:height 170
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box2"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box2"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box2"}}}}
:line-3 {:type "transparent"
:x 0
:y 950
:width 1920
:height 180
:actions {:click {:id "go-line-check" :on "click" :type "action" :params {:line "box3"}}
:pointerdown {:id "go-line-check" :on "pointerdown" :type "action" :params {:line "box3"}}
:pointerover {:id "go-line-check" :on "pointerover" :type "action" :params {:line "box3"}}}}
:mari {:type "animation"
:x 1365
:y 311
:width 473
:height 511
:scene-name "mari"
:transition "mari"
:anim "idle"
:anim-offset {:x 0 :y -150}
:name "mari"
:scale-x 0.5
:scale-y 0.5
:speed 0.5
:start true
:editable? {:select true :drag true :show-in-tree? true}}
:vera-group {:type "group"
:x 500
:y 865
:transition "vera-group"
:visible false
:children ["vera" "vera-collision-test"]}
:emit-group {:type "group"}
:vera {:type "animation"
:x 0
:y -55
:width 727
:height 1091
:scene-name "vera"
:transition "vera"
:anim "run"
:meshes true
:name "vera-90"
:scale-x 0.4
:scale-y 0.4
:skin "default"
:speed 1
:start true}
:vera-stopped {:type "image",
:x 300
:y 370
:width 727
:scale {:x 0.75 :y 0.75}
:height 1091
:src "/raw/img/vera.png"}
:vera-collision-test {:type "transparent"
:x 150
:y -55
:width 10
:height 10
:transition "vera-collision-test"
:collidable? true
:actions {:collide {:on "collide-enter"
:collision-type "bounds"
:test ["#^target-letter-.*"]
:type "action"
:id "check-box"
:pick-event-param ["custom-data" "transition-name"]}}}}
:scene-objects [["background"]
["frame"]
["emit-group"]
["vera-stopped" "vera-group" "mari"]
["target-group" "timer" "line-1" "line-2" "line-3"]]
:actions {:dialog-1-welcome {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "welcome"
:phrase-description "Welcome dialog"
:dialog-track "1 Welcome"
:skippable true}
:dialog-2-intro-concept {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "concept"
:phrase-description "Introduce concept"
:dialog-track "2 Introduce"
:tags ["instruction"]}
:dialog-3-intro-timer {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "timer"
:phrase-description "Introduce timer"
:dialog-track "2 Introduce"
:tags ["instruction"]}
:dialog-4-ready-go {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "ready-go"
:phrase-description "Ready-Go"
:dialog-track "3 Start"}
:highlight-target-letter {:type "transition"
:transition-id "letter-target-background"
:return-immediately true
:from {:brightness 0 :hue 0},
:to {:brightness 1 :yoyo true :duration 0.5 :repeat 5}
}
:highlight-counter {:type "transition"
:transition-id "counter-background"
:return-immediately true
:from {:brightness 0 :glow 0}
:to {:brightness 0.1 :glow 10 :yoyo true :duration 0.5 :repeat 5}
}
:highlight-timer {:type "transition"
:transition-id "timer"
:return-immediately true
:from {:brightness 0 :hue 0},
:to {:brightness 1 :yoyo true :duration 0.5 :repeat 5}
}
:dialog-5-starting-noise {:type "sequence-data"
:editor-type "dialog"
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "noise"
:phrase-description "Starting noise"
:dialog-track "3 Start"}
:dialog-6-correct {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "correct"
:phrase-description "Correct dialog"
:dialog-track "4 Options"}
:dialog-7-wrong {:type "sequence-data"
:editor-type "dialog"
:available-activities ["highlight-target-letter", "highlight-timer" "highlight-counter"]
:concept-var concept-var
:data [{:type "sequence-data"
:data [{:type "empty" :duration 0}
{:type "animation-sequence" :phrase-text "New action" :audio nil}]}]
:phrase "wrong"
:phrase-description "Wrong dialog"
:dialog-track "4 Options"}
:go-line-check {:type "test-value"
:fail "go-line"
:from-var [{:var-name "current-line" :action-property "value1"}]
:from-params [{:param-property "line" :action-property "value2"}]}
:go-line {:type "sequence-data"
:data [{:type "set-variable"
:var-name "current-line"
:from-params [{:param-property "line" :action-property "var-value"}]}
{:type "case"
:options {:box1 {:id "go-to-box1-line" :type "action"}
:box2 {:id "go-to-box2-line" :type "action"}
:box3 {:id "go-to-box3-line" :type "action"}}
:from-var [{:var-name "current-line" :action-property "value"}]}]}
:go-to-box1-line {:type "transition"
:to {:y 685 :duration 0.5}
:transition-id "vera-group"}
:go-to-box2-line {:type "transition"
:to {:y 865 :duration 0.5}
:transition-id "vera-group"}
:go-to-box3-line {:type "transition"
:to {:y 1040 :duration 0.5}
:transition-id "vera-group"}
:init-vars {:type "parallel"
:data [{:type "set-variable" :var-name "game-finished" :var-value false}
{:type "set-variable" :var-name "current-line" :var-value "box2"}]}
:init-concept {:type "parallel"
:data []}
:check-box {:type "test-var-scalar"
:var-name "game-finished"
:value false
:success {:type "test-value"
:from-var [{:action-property "value1" :var-name concept-var}]
:from-params [{:action-property "value2" :param-property "custom-data"}]
:success "pick-correct"
:fail "pick-wrong"}}
:pick-correct {:type "sequence-data"
:data [{:id "dialog-6-correct" :type "action" :return-immediately true}
{:type "set-attribute" :attr-name "visible" :attr-value false
:from-params [{:action-property "target" :param-property "transition-name"}]}
{:type "counter-inc" :target "counter"}
{:data [{:data [{:id "run_jump" :type "animation" :target "vera" :loop false}
{:id "run" :loop true :type "add-animation" :target "vera"}]
:return-immediately true
:type "sequence-data"}]
:type "parallel"}]}
:pick-wrong {:type "sequence-data"
:data [{:id "dialog-7-wrong" :type "action"}]}
:welcome {:type "sequence-data"
:data [{:type "action" :id "dialog-1-welcome"}]}
:intro {:type "sequence-data"
:data [{:type "action" :id "dialog-2-intro-concept"}
{:type "action" :id "dialog-3-intro-timer"}]}
:start {:type "sequence-data"
:data [{:type "action" :id "dialog-4-ready-go"}
{:type "action" :id "dialog-5-starting-noise"}]}
:emit-objects {:type "sequence-data"
:data [{:type "action" :id "shuffle-boxes"}
{:type "parallel"
:data [{:type "action" :id "emit-object-line-1"}
{:type "action" :id "emit-object-line-2"}
{:type "action" :id "emit-object-line-3"}]}
{:type "empty" :from-var [{:var-name "emit-duration" :action-property "duration"}]}
{:type "test-var-scalar"
:var-name "game-finished"
:value false
:success "emit-objects"
:fail "finish-activity"}]}
:shuffle-boxes {:type "sequence-data"
:data [{:from ["item-1" "item-2" "item-3" "item-4" "item-5" "item-6" "item-7" "item-8"]
:type "vars-var-provider"
:unique true
:from-var [{:var-key "concept-name"
:var-name concept-var
:action-property "exclude-property-values"}]
:shuffled true
:variables ["pair-concept-1" "pair-concept-2"]}
{:from [concept-var "pair-concept-1" "pair-concept-2"]
:type "vars-var-provider"
:shuffled true
:variables ["box1" "box2" "box3"]}]}
:emit-object-line-1 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2100
:y 685
:custom-data ""
:collidable? true
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box1" :action-property "data.target-letter.custom-data"}
{:var-name "box1" :action-property "data.target-letter-text.text"}]}}
:emit-object-line-2 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2200
:y 865
:custom-data ""
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box2" :action-property "data.target-letter.custom-data"}
{:var-name "box2" :action-property "data.target-letter-text.text"}]}}
:emit-object-line-3 {:type "test-random"
:chance 0.7
:success {:type "create-object"
:target "emit-group"
:root-object "target-letter"
:return-immediately true
:on-emit {:type "action" :id "move-emitted-letter"}
:data {:target-letter {:type "group"
:x 2300
:y 1040
:custom-data ""
:children ["target-letter-box"
"target-letter-text"]}
:target-letter-box {:type "image"
:x -95
:y -135
:src "/raw/img/running-with-letters/box.png"
}
:target-letter-text {:type "text"
:x 0
:y -60
:align "center"
:vertical-align "middle"
:fill 0x000000
:font-family "Lexend Deca"
:font-size 120
:text ""}}
:from-var [{:var-name "box3" :action-property "data.target-letter.custom-data"}
{:var-name "box3" :action-property "data.target-letter-text.text"}]}}
:move-emitted-letter {:type "transition"
:from-params [{:param-property "transition", :action-property "transition-id"}]
:from-var [{:var-name "move-letter-to" :action-property "to"}]}
:dialog-tap-instructions (-> (dialog/default "Tap instructions")
(assoc :concept-var concept-var))
:start-running {:type "sequence-data"
:data [{:type "set-attribute" :target "vera-stopped" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "vera-group" :attr-name "visible" :attr-value true}
{:type "set-attribute" :target "background" :attr-name "speed"
:from-var [{:var-name "background-speed" :action-property "attr-value"}]}
{:type "set-attribute" :target "vera" :attr-name "speed"
:from-var [{:var-name "animation-speed" :action-property "attr-value"}]}]}
:stop-running {:type "sequence-data"
:data [{:type "set-attribute" :target "emit-group" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "background" :attr-name "speed" :attr-value 0}
{:type "set-attribute" :target "vera-group" :attr-name "visible" :attr-value false}
{:type "set-attribute" :target "vera-stopped" :attr-name "visible" :attr-value true}]}
:start-scene {:type "sequence"
:data ["start-activity"
"init-concept"
"init-incorrect"
"welcome"
"intro"
"start"
"init-vars"
"start-running"
"start-timer"
"emit-objects"]}
:finish-game {:type "sequence-data"
:data [{:type "action" :id "stop-running"}
{:type "set-variable" :var-name "game-finished" :var-value true}]}
:start-timer {:type "timer-start" :target "timer"}
:stay-on-line {:type "empty" :duration 100}
:stop-scene {:type "sequence" :data ["stop-activity"]}
:start-activity {:type "start-activity"}
:stop-activity {:type "stop-activity"}
:finish-activity {:type "sequence-data"
:data [{:type "action" :id "finish-activity-dialog"}
{:type "finish-activity"}]}
:finish-activity-dialog (-> (dialog/default "Finish activity dialog")
(assoc :concept-var concept-var)
(assoc :available-activities ["highlight-target-letter", "highlight-timer", "highlight-counter"]))
:wait-for-box-animations {:type "empty" :duration 100}
:init-incorrect {:type "parallel"
:data []}}
:triggers {:stop {:on "back" :action "stop-scene"} :start {:on "start" :action "start-scene"}}
:metadata {:autostart true}})
(defn map-between-ranges [value min-a max-a min-b max-b]
(let [a-size (- max-a min-a)
b-size (- max-b min-b)
proportion (/ (- value min-a) a-size)]
(float (+ (* proportion b-size) min-b))))
(defn set-speed [data speed]
(let [s (map-between-ranges (if (string? speed)
(Integer/parseInt speed)
speed)
1 10 2 10)]
(-> data
(update-in [:actions :init-vars :data] concat
[{:type "set-variable" :var-name "background-speed" :var-value s}
{:type "set-variable" :var-name "animation-speed" :var-value (/ s 4)}
{:type "set-variable" :var-name "emit-duration" :var-value (/ 12000 s)}
{:type "set-variable" :var-name "move-letter-to" :var-value {:x -700 :duration (/ 40 s)}}])
(assoc-in [:metadata :saved-props :change-speed] {:speed speed}))))
(defn- init-correct
[t {:keys [correct-letter]}]
(assoc-in t [:actions :init-concept :data]
[{:type "set-variable"
:var-name concept-var
:var-value correct-letter}
{:type "set-attribute"
:target "letter-target"
:attr-value correct-letter
:attr-name "text"}]))
(defn- init-incorrect
[t {:keys [incorrect-letter-1 incorrect-letter-2 incorrect-letter-3]}]
(let [actions (->> [incorrect-letter-1
incorrect-letter-2
incorrect-letter-3]
(remove empty?)
(repeat 4)
(apply concat)
(take 8)
(map-indexed (fn [idx letter]
{:type "set-variable" :var-name (str "item-" (inc idx)) :var-value letter})))]
(assoc-in t [:actions :init-incorrect :data] actions)))
(defn- init-font-size
[t {:keys [font-scale] :or {font-scale 1}}]
(let [scale (if (string? font-scale)
(Float/parseFloat font-scale)
font-scale)]
(-> t
(assoc-in [:objects :letter-target :font-size] (* scale 72))
(assoc-in [:actions :emit-object-line-1 :success :data :target-letter-text :font-size] (* scale 120))
(assoc-in [:actions :emit-object-line-2 :success :data :target-letter-text :font-size] (* scale 120))
(assoc-in [:actions :emit-object-line-3 :success :data :target-letter-text :font-size] (* scale 120)))))
(defn f
[args]
(-> (common/init-metadata m t args)
(init-correct args)
(init-incorrect args)
(init-font-size args)
(assoc-in [:objects :timer :time] (:time args))
(set-speed 5)
(assoc-in [:metadata :saved-props :template-options] (assoc args :speed 5))
(common/add-available-action "highlight-target-letter" "Highlight letter")
(common/add-available-action "highlight-timer" "Highlight timer")
(common/add-available-action "highlight-counter" "Highlight counter")))
(defn change-speed [data speed]
(-> data
(update-in [:actions :init-vars :data] #(vec (drop-last 4 %)))
(set-speed speed)
(assoc-in [:metadata :saved-props :template-options :speed] speed)))
(defn- template-options
[activity-data args]
(-> activity-data
(init-correct args)
(init-incorrect args)
(init-font-size args)
(assoc-in [:objects :timer :time] (:time args))
(set-speed (:speed args))
(assoc-in [:metadata :saved-props :template-options] args)))
(defn fu
[old-data {:keys [action-name] :as args}]
(case (keyword action-name)
:change-time (assoc-in old-data [:objects :timer :time] (:time args))
:change-speed (change-speed old-data (:speed args))
:template-options (template-options old-data args)))
(core/register-template
m f fu)
|
7963eb3c2d41053ba649ef864b619897804db4b7666fe6bb65a92247d15c04f4 | synrc/cr | cr_tcp.erl | -module(cr_tcp).
-description('prim_inet based TCP non-blocking listener').
-copyright('Synrc Research Center s.r.o.').
-behaviour(gen_server).
-include("cr.hrl").
-export(?GEN_SERVER).
-compile(export_all).
-record(state, {listener,acceptor,module,name,port,ring}).
handle_info({inet_async,ListSock,Ref,Message},
#state{listener=ListSock,acceptor=Ref,module=Module,name=Name,port=Port,ring=HashRing} = State) ->
{ok,CliSocket} = Message,
set_sockopt(ListSock, CliSocket),
{ok, Pid} = cr_connection:start_connection(Module,CliSocket,HashRing),
gen_tcp:controlling_process(CliSocket, Pid),
cr:set_socket(Pid, CliSocket),
Acceptor = case prim_inet:async_accept(ListSock, -1) of
{ok, NewRef} -> NewRef;
{error, Reason} ->
io:format("TCP: Accept Error: ~p~n",[Reason]),
Reason end,
{noreply, State#state{acceptor=Acceptor}};
handle_info(_Info, State) -> {noreply, State}.
terminate(_Reason, State) -> gen_tcp:close(State#state.listener), ok.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
handle_call(Request, _From, State) -> {stop, {unknown_call, Request}, State}.
handle_cast(_Msg, State) -> {noreply, State}.
start_link(Name, Port, Module, HashRing) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, Port, Module, HashRing], []).
init([Name, Port, Module, HashRing]) ->
process_flag(trap_exit, true),
Opts = [binary,{packet,1},{reuseaddr,true},{keepalive,true},{backlog,30},{active,false}],
case gen_tcp:listen(Port, Opts) of
{ok, Listen_socket} ->
{ok, Ref} = prim_inet:async_accept(Listen_socket, -1),
{ok, #state{ listener = Listen_socket,
acceptor = Ref,
ring = HashRing,
module = Module,
port=Port,
name=Name}};
{error, Reason} -> {stop, Reason} end.
set_sockopt(ListSock, CliSocket) ->
true = inet_db:register_socket(CliSocket, inet_tcp),
case prim_inet:getopts(ListSock,[active, nodelay, keepalive, delay_send, priority, tos]) of
{ok, Opts} -> case prim_inet:setopts(CliSocket, Opts) of
ok -> ok;
Error ->
io:format("TCP OPT Socket Error ~p~n",[Error]),
gen_tcp:close(CliSocket), Error end;
Error -> gen_tcp:close(CliSocket),
io:format("TCP Socket Error ~p~n",[Error]),
exit({set_sockopt, Error}) end.
| null | https://raw.githubusercontent.com/synrc/cr/b4a30dc55d30500a1c239d6234444e1ecff5aab5/src/tcp/cr_tcp.erl | erlang | -module(cr_tcp).
-description('prim_inet based TCP non-blocking listener').
-copyright('Synrc Research Center s.r.o.').
-behaviour(gen_server).
-include("cr.hrl").
-export(?GEN_SERVER).
-compile(export_all).
-record(state, {listener,acceptor,module,name,port,ring}).
handle_info({inet_async,ListSock,Ref,Message},
#state{listener=ListSock,acceptor=Ref,module=Module,name=Name,port=Port,ring=HashRing} = State) ->
{ok,CliSocket} = Message,
set_sockopt(ListSock, CliSocket),
{ok, Pid} = cr_connection:start_connection(Module,CliSocket,HashRing),
gen_tcp:controlling_process(CliSocket, Pid),
cr:set_socket(Pid, CliSocket),
Acceptor = case prim_inet:async_accept(ListSock, -1) of
{ok, NewRef} -> NewRef;
{error, Reason} ->
io:format("TCP: Accept Error: ~p~n",[Reason]),
Reason end,
{noreply, State#state{acceptor=Acceptor}};
handle_info(_Info, State) -> {noreply, State}.
terminate(_Reason, State) -> gen_tcp:close(State#state.listener), ok.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
handle_call(Request, _From, State) -> {stop, {unknown_call, Request}, State}.
handle_cast(_Msg, State) -> {noreply, State}.
start_link(Name, Port, Module, HashRing) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, Port, Module, HashRing], []).
init([Name, Port, Module, HashRing]) ->
process_flag(trap_exit, true),
Opts = [binary,{packet,1},{reuseaddr,true},{keepalive,true},{backlog,30},{active,false}],
case gen_tcp:listen(Port, Opts) of
{ok, Listen_socket} ->
{ok, Ref} = prim_inet:async_accept(Listen_socket, -1),
{ok, #state{ listener = Listen_socket,
acceptor = Ref,
ring = HashRing,
module = Module,
port=Port,
name=Name}};
{error, Reason} -> {stop, Reason} end.
set_sockopt(ListSock, CliSocket) ->
true = inet_db:register_socket(CliSocket, inet_tcp),
case prim_inet:getopts(ListSock,[active, nodelay, keepalive, delay_send, priority, tos]) of
{ok, Opts} -> case prim_inet:setopts(CliSocket, Opts) of
ok -> ok;
Error ->
io:format("TCP OPT Socket Error ~p~n",[Error]),
gen_tcp:close(CliSocket), Error end;
Error -> gen_tcp:close(CliSocket),
io:format("TCP Socket Error ~p~n",[Error]),
exit({set_sockopt, Error}) end.
| |
1d9117040608d289e9591e825281a2c257a58c2f5f8011e5998ed2bb4ea2f67e | beamspirit/bigwig | bigwig_http_rb.erl | %%
%% report browser api
%%
-module(bigwig_http_rb).
-behaviour(cowboy_http_handler).
-export([init/3, handle/2, terminate/2]).
init({tcp, http}, Req, _Opts) ->
bigwig_report_reader:start(), %% will only be started once anyway, registered name
bigwig_report_reader:rescan(), %% ouch
{ok, Req, undefined_state}.
handle(Req, State) ->
{Path, Req2} = cowboy_http_req:path(Req),
handle_path(Path, Req2, State).
%% /rb/reports
handle_path([<<"rb">>, <<"reports">>], Req0, State) ->
{ReportFilter, Req} = make_report_filter_from_qs(Req0),
Body = jsx:term_to_json(list_reports(ReportFilter)),
Headers = [{<<"Content-Type">>, <<"application/json">>}],
{ok, Req2} = cowboy_http_req:reply(200, Headers, Body, Req),
{ok, Req2, State};
%% /rb/reports/123
handle_path([<<"rb">>, <<"reports">>, IdBin], Req, State) ->
Id = list_to_integer(binary_to_list(IdBin)),
Rep = bigwig_report_reader:load_number(Id),
Headers = [{<<"Content-Type">>, <<"application/json">>}],
{ok, Req2} = cowboy_http_req:reply(200, Headers, report_to_json(Rep), Req),
{ok, Req2, State};
handle_path(Path, Req, State) ->
FIXME injection
{ok, Req2, State}.
terminate(_Req, _State) ->
ok.
report_to_json({_, {ok, Date0, Report, ReportStr}}) ->
Date = list_to_binary(Date0),
jsx:term_to_json([{date, Date}, {report, Report}, {report_str, ReportStr}]).
list_reports(Filter) ->
Reports = bigwig_report_reader:load_list(Filter),
format_reports(Reports).
format_reports(Reports) ->
[{report, lists:map(fun format_report/1, Reports)}].
NB : added a is_list guard , hpefully all reports are proplists here ?
%% if not, add a format_report that wraps it into an obj that includes hash?
format_report({Hash,_Type,_Pid,_Date,Rep,Str}) when is_list(Rep) ->
[ {'_hash', list_to_binary(Hash)},
{'_str', Str}
| Rep
].
%% Make a proplist to pass to make_filter, from the querstring
make_report_filter_from_qs(Req0) ->
A version of qs_val that url - decodes values ( ie no % 20 etc ) , and to_list
Qsval = fun(K,R) -> case cowboy_http_req:qs_val(K, R) of
{undefined, R2} -> {undefined, R2};
{ValEnc, R2} -> {bigwig_util:url_decode(ValEnc), R2}
end
end,
%% Create a rb filter based on query params
{Opts1,Req1} = case Qsval(<<"type">>, Req0) of
{undefined, R1} -> {[], R1};
{TypeBin, R1} -> {[{type, list_to_atom(TypeBin)}], R1}
end,
{Opts2,Req2} = case Qsval(<<"startdate">>, Req1) of
{undefined, R2} -> {Opts1, R2};
{SD, R2} -> {[{startdate, SD}|Opts1], R2}
end,
{Opts3,Req3} = case Qsval(<<"enddate">>, Req2) of
{undefined, R3} -> {Opts2, R3};
{ED, R3} -> {[{enddate, ED}|Opts2], R3}
end,
{Opts4,Req4} = case Qsval(<<"limit">>, Req3) of
{undefined, R4} -> {Opts3, R4};
{IntStr, R4} -> {[{limit, list_to_integer(IntStr)}|Opts3], R4}
end,
{Opts5,Req5} = case Qsval(<<"level">>, Req4) of
{undefined, R5} -> {Opts4, R5};
{LevelBin, R5} -> {[{level, list_to_atom(LevelBin)}|Opts4], R5}
end,
Filter = bigwig_report_reader:make_filter(Opts5),
{Filter, Req5}.
| null | https://raw.githubusercontent.com/beamspirit/bigwig/552ac9968d2457286056a17f3be7f53ec72a31cc/src/bigwig_http_rb.erl | erlang |
report browser api
will only be started once anyway, registered name
ouch
/rb/reports
/rb/reports/123
if not, add a format_report that wraps it into an obj that includes hash?
Make a proplist to pass to make_filter, from the querstring
20 etc ) , and to_list
Create a rb filter based on query params | -module(bigwig_http_rb).
-behaviour(cowboy_http_handler).
-export([init/3, handle/2, terminate/2]).
init({tcp, http}, Req, _Opts) ->
{ok, Req, undefined_state}.
handle(Req, State) ->
{Path, Req2} = cowboy_http_req:path(Req),
handle_path(Path, Req2, State).
handle_path([<<"rb">>, <<"reports">>], Req0, State) ->
{ReportFilter, Req} = make_report_filter_from_qs(Req0),
Body = jsx:term_to_json(list_reports(ReportFilter)),
Headers = [{<<"Content-Type">>, <<"application/json">>}],
{ok, Req2} = cowboy_http_req:reply(200, Headers, Body, Req),
{ok, Req2, State};
handle_path([<<"rb">>, <<"reports">>, IdBin], Req, State) ->
Id = list_to_integer(binary_to_list(IdBin)),
Rep = bigwig_report_reader:load_number(Id),
Headers = [{<<"Content-Type">>, <<"application/json">>}],
{ok, Req2} = cowboy_http_req:reply(200, Headers, report_to_json(Rep), Req),
{ok, Req2, State};
handle_path(Path, Req, State) ->
FIXME injection
{ok, Req2, State}.
terminate(_Req, _State) ->
ok.
report_to_json({_, {ok, Date0, Report, ReportStr}}) ->
Date = list_to_binary(Date0),
jsx:term_to_json([{date, Date}, {report, Report}, {report_str, ReportStr}]).
list_reports(Filter) ->
Reports = bigwig_report_reader:load_list(Filter),
format_reports(Reports).
format_reports(Reports) ->
[{report, lists:map(fun format_report/1, Reports)}].
NB : added a is_list guard , hpefully all reports are proplists here ?
format_report({Hash,_Type,_Pid,_Date,Rep,Str}) when is_list(Rep) ->
[ {'_hash', list_to_binary(Hash)},
{'_str', Str}
| Rep
].
make_report_filter_from_qs(Req0) ->
Qsval = fun(K,R) -> case cowboy_http_req:qs_val(K, R) of
{undefined, R2} -> {undefined, R2};
{ValEnc, R2} -> {bigwig_util:url_decode(ValEnc), R2}
end
end,
{Opts1,Req1} = case Qsval(<<"type">>, Req0) of
{undefined, R1} -> {[], R1};
{TypeBin, R1} -> {[{type, list_to_atom(TypeBin)}], R1}
end,
{Opts2,Req2} = case Qsval(<<"startdate">>, Req1) of
{undefined, R2} -> {Opts1, R2};
{SD, R2} -> {[{startdate, SD}|Opts1], R2}
end,
{Opts3,Req3} = case Qsval(<<"enddate">>, Req2) of
{undefined, R3} -> {Opts2, R3};
{ED, R3} -> {[{enddate, ED}|Opts2], R3}
end,
{Opts4,Req4} = case Qsval(<<"limit">>, Req3) of
{undefined, R4} -> {Opts3, R4};
{IntStr, R4} -> {[{limit, list_to_integer(IntStr)}|Opts3], R4}
end,
{Opts5,Req5} = case Qsval(<<"level">>, Req4) of
{undefined, R5} -> {Opts4, R5};
{LevelBin, R5} -> {[{level, list_to_atom(LevelBin)}|Opts4], R5}
end,
Filter = bigwig_report_reader:make_filter(Opts5),
{Filter, Req5}.
|
ebe35459c327dc3c57d49300f118f902f7a9a73918a7ae8bdf844a8c47eeae8c | mkurtak/clj-salt-api | http.clj | Copyright ( c )
;; All rights reserved.
(ns ^:no-doc salt.http
(:require [aleph.http :as http]
[byte-streams :as bs]
[cheshire.core :as json]
[clojure.core.async :as a]
[clojure.string :as str]
[manifold.deferred :as d]
[manifold.stream :as ms]
[salt.core :as s]))
(def retriable-status-codes
"Set pof retriable status codes based on
"
#{408 500 502 503 504 509 520 521 522 523 524 598 599})
(defn- status-code->category
[{:keys [:status]}]
(if status
(cond
(= 200 status) :ok
(= 401 status) :unauthorized
(contains? retriable-status-codes status) :retriable
:else :error)
:error))
(defn- parse-json
[body]
(json/parse-string (bs/to-string body) true))
(defn response->channel-response
[resp]
(let [category (status-code->category resp)]
(if (= category :ok)
resp
(throw (ex-info "Request error."
{::s/response resp ::s/response-category category}
(when (instance? Throwable resp)
resp))))))
(defn- connect
"Connects deferred created in `create-deferred-fn` with core.async `resp-chan`"
[create-deferred-fn resp-chan]
(try (d/on-realized (d/catch
(create-deferred-fn)
identity)
(fn [r]
(a/>!! resp-chan (if (nil? r) {} r))
(a/close! resp-chan))
(fn [r]
(a/>!! resp-chan (if (nil? r) {} r))
(a/close! resp-chan)))
resp-chan
;; Exception is only thrown from create-deferred-fn (from calling thread).
;; It must be put on promise/chan asynchronously to prevent deadlock.
(catch Exception e
(a/put! resp-chan e)
(a/close! resp-chan)
resp-chan)))
(defn request
"Invoke `aleph.http/request` and transform manifold deferred to core.async channel.
Return new channel delivering response:
* Ring response if ok
* Exception if error occurs"
[req]
(let [resp-chan (a/promise-chan)]
(connect #(-> (http/request (merge req {:throw-exceptions? false}))
(d/chain response->channel-response))
resp-chan)))
(def empty-line-pattern
"New line followed by whitespace chars ending with new line."
#"\r?\n[ \t\\x0B\f\r]?\n")
(def last-empty-line-pattern
"Empty line not followed by another empty line.
This regex uses negative lookahead and DOTALL mode."
(re-pattern (str "(?s)" empty-line-pattern "(?!.*" empty-line-pattern ")")))
(def sse-supported-attrs #{:event :data :id})
(defn- line->field
[line]
(let [splits (str/split line #":" 2)
attr (first splits)
value (second splits)]
[(keyword attr)
(if (nil? value) "" value)]))
(defn- sse-supported-field?
[[attr val]]
(or
(contains? sse-supported-attrs attr)
(and (= :retry attr)
(every? #(Character/isDigit %) (str/trim val)))))
(defn- reduce-sse-fields
"Join values of field with same attributes with newline."
[fields]
(assoc (apply merge-with
#(str/join "\n" [%1 %2])
(map #(into {} [%]) fields))
:type :data))
(defn- reduce-retry-fields
"Leave only last retry field."
[fields]
{:type :retry
:retry (Short/parseShort (str/trim (second (last fields))))})
(defn- not-comment-line?
[field]
(not= ':' (first field)))
(defn- retry-field?
[field]
(= :retry (first field)))
(defn- split-buffer
[buf]
(let [buf-splits (str/split buf last-empty-line-pattern 2)
can-split? (< 1 (count buf-splits))
chunks (when can-split? (str/split (first buf-splits) empty-line-pattern))
next-buffer (if can-split? (second buf-splits) (first buf-splits))]
[chunks next-buffer]))
(defn sse-buffer->events
"Converts sse buffer to collection of events and next buffer value"
[buf prev-buf]
(let [[chunks next-buf] (split-buffer (str prev-buf buf))]
[(->> chunks
(mapcat (fn [chunk]
(->> chunk
(str/split-lines)
(filter not-comment-line?)
(map line->field)
(filter sse-supported-field?)
(group-by retry-field?)
(map (fn [group]
(let [[retry? fields] group]
(if retry?
(reduce-retry-fields fields)
(reduce-sse-fields fields)))))))))
next-buf]))
(defn mapcat-with-accumulator
"Returns transducer similar to mapcat but with accumulator.
f accepts current val and accumulator from previous call of f
f returns coll and next val of accumulator"
[f]
(comp
(fn [rf]
(let [acc (volatile! nil)]
(fn
([] (rf))
([result] (rf result)) ; TODO what to do with remaining pv?
([result input]
(let [[items next-acc-val] (f input @acc)]
(vreset! acc next-acc-val)
(rf result items))))))
cat))
(defn- sse-pool
"Creates aleph http connection-pool merge options with default options"
[{:keys [:connection-options] :as opts}]
(http/connection-pool (merge opts
wanted 1 but did not work
:max-queue-size 1
:target-utilization 1
:connection-options (merge
connection-options
{:raw-stream? true})})))
(defn sse
"Invoke [[aleph.http/request]] on SSE endpoint and stream response to core.async channel.
Uses pool defined in `sse-pool` -> with 1 connection and raw-stream? option.
Pool could be customized with `pool-opts` (see [[aleph.http/request]] documentation).
Server-sent events is a stream of text lines separated by empty lines.
Returns new channel delivering server-sent events with following types
| Type | Attributes | Description
| -----------| -----------|
| `:connect` | :stream | Connection is established. Use :stream to close SSE.
| `:data` | :id :data | One SSE event. When event contains multiple data attributes, they are concatenated with newline character.
| `:retry` | :retry | SSE indicates to set retry-timeout.
| `:close` | | Sent before stream and respective core.async channel is closed.
If SSE request could not be made, exception is written to the channel and channel is closed.
"
[req pool-opts resp-chan]
(connect (fn [] (-> (http/request (merge req {:throw-exceptions? false
:pool (sse-pool pool-opts)}))
(d/chain response->channel-response
:body
#(do (a/>!! resp-chan {:type :connect
:stream %})
%)
#(ms/map bs/to-string %)
#(ms/filter some? %)
#(ms/transform (mapcat-with-accumulator
sse-buffer->events)
%)
#(ms/map (fn [x] (a/>!! resp-chan x) x) %)
#(ms/reduce (fn [r _] r) {:type :close} %))))
resp-chan))
(defn close-sse
"Close manifold stream if any."
[{:keys [:stream]}]
(when stream
(ms/close! stream)))
(defn parse-body
"Parse body from `response` using content-type header to determine format.
If content-type is missing or unsupported, or parsing error occurs, throw ex with response in data.
If body is not present, return nil.
If body is successfully parsed, return map and full response in meta."
[response]
(if-let [body (:body response)]
(if-let [content-type (get-in response [:headers "content-type"])]
(cond
(= "application/json" content-type) (with-meta
(parse-json body)
{::s/presponse response})
(str/starts-with? content-type "text/") (with-meta
[(bs/to-string body)]
{::s/response response})
:else (throw
(ex-info "Unsupported content-type" {::s/response response
:content-type content-type})))
(throw (ex-info "No content-type." {::s/response response})))))
(defn parse-sse
"Parse JSON from `event` if event is type of :data"
[event]
(if (= :data (:type event))
{:type :data
:data (json/parse-string (:data event) true)}
event))
| null | https://raw.githubusercontent.com/mkurtak/clj-salt-api/6c6889640c1dc8717f4268202d05d330f6a6d162/src/salt/http.clj | clojure | All rights reserved.
Exception is only thrown from create-deferred-fn (from calling thread).
It must be put on promise/chan asynchronously to prevent deadlock.
TODO what to do with remaining pv? | Copyright ( c )
(ns ^:no-doc salt.http
(:require [aleph.http :as http]
[byte-streams :as bs]
[cheshire.core :as json]
[clojure.core.async :as a]
[clojure.string :as str]
[manifold.deferred :as d]
[manifold.stream :as ms]
[salt.core :as s]))
(def retriable-status-codes
"Set pof retriable status codes based on
"
#{408 500 502 503 504 509 520 521 522 523 524 598 599})
(defn- status-code->category
[{:keys [:status]}]
(if status
(cond
(= 200 status) :ok
(= 401 status) :unauthorized
(contains? retriable-status-codes status) :retriable
:else :error)
:error))
(defn- parse-json
[body]
(json/parse-string (bs/to-string body) true))
(defn response->channel-response
[resp]
(let [category (status-code->category resp)]
(if (= category :ok)
resp
(throw (ex-info "Request error."
{::s/response resp ::s/response-category category}
(when (instance? Throwable resp)
resp))))))
(defn- connect
"Connects deferred created in `create-deferred-fn` with core.async `resp-chan`"
[create-deferred-fn resp-chan]
(try (d/on-realized (d/catch
(create-deferred-fn)
identity)
(fn [r]
(a/>!! resp-chan (if (nil? r) {} r))
(a/close! resp-chan))
(fn [r]
(a/>!! resp-chan (if (nil? r) {} r))
(a/close! resp-chan)))
resp-chan
(catch Exception e
(a/put! resp-chan e)
(a/close! resp-chan)
resp-chan)))
(defn request
"Invoke `aleph.http/request` and transform manifold deferred to core.async channel.
Return new channel delivering response:
* Ring response if ok
* Exception if error occurs"
[req]
(let [resp-chan (a/promise-chan)]
(connect #(-> (http/request (merge req {:throw-exceptions? false}))
(d/chain response->channel-response))
resp-chan)))
(def empty-line-pattern
"New line followed by whitespace chars ending with new line."
#"\r?\n[ \t\\x0B\f\r]?\n")
(def last-empty-line-pattern
"Empty line not followed by another empty line.
This regex uses negative lookahead and DOTALL mode."
(re-pattern (str "(?s)" empty-line-pattern "(?!.*" empty-line-pattern ")")))
(def sse-supported-attrs #{:event :data :id})
(defn- line->field
[line]
(let [splits (str/split line #":" 2)
attr (first splits)
value (second splits)]
[(keyword attr)
(if (nil? value) "" value)]))
(defn- sse-supported-field?
[[attr val]]
(or
(contains? sse-supported-attrs attr)
(and (= :retry attr)
(every? #(Character/isDigit %) (str/trim val)))))
(defn- reduce-sse-fields
"Join values of field with same attributes with newline."
[fields]
(assoc (apply merge-with
#(str/join "\n" [%1 %2])
(map #(into {} [%]) fields))
:type :data))
(defn- reduce-retry-fields
"Leave only last retry field."
[fields]
{:type :retry
:retry (Short/parseShort (str/trim (second (last fields))))})
(defn- not-comment-line?
[field]
(not= ':' (first field)))
(defn- retry-field?
[field]
(= :retry (first field)))
(defn- split-buffer
[buf]
(let [buf-splits (str/split buf last-empty-line-pattern 2)
can-split? (< 1 (count buf-splits))
chunks (when can-split? (str/split (first buf-splits) empty-line-pattern))
next-buffer (if can-split? (second buf-splits) (first buf-splits))]
[chunks next-buffer]))
(defn sse-buffer->events
"Converts sse buffer to collection of events and next buffer value"
[buf prev-buf]
(let [[chunks next-buf] (split-buffer (str prev-buf buf))]
[(->> chunks
(mapcat (fn [chunk]
(->> chunk
(str/split-lines)
(filter not-comment-line?)
(map line->field)
(filter sse-supported-field?)
(group-by retry-field?)
(map (fn [group]
(let [[retry? fields] group]
(if retry?
(reduce-retry-fields fields)
(reduce-sse-fields fields)))))))))
next-buf]))
(defn mapcat-with-accumulator
"Returns transducer similar to mapcat but with accumulator.
f accepts current val and accumulator from previous call of f
f returns coll and next val of accumulator"
[f]
(comp
(fn [rf]
(let [acc (volatile! nil)]
(fn
([] (rf))
([result input]
(let [[items next-acc-val] (f input @acc)]
(vreset! acc next-acc-val)
(rf result items))))))
cat))
(defn- sse-pool
"Creates aleph http connection-pool merge options with default options"
[{:keys [:connection-options] :as opts}]
(http/connection-pool (merge opts
wanted 1 but did not work
:max-queue-size 1
:target-utilization 1
:connection-options (merge
connection-options
{:raw-stream? true})})))
(defn sse
"Invoke [[aleph.http/request]] on SSE endpoint and stream response to core.async channel.
Uses pool defined in `sse-pool` -> with 1 connection and raw-stream? option.
Pool could be customized with `pool-opts` (see [[aleph.http/request]] documentation).
Server-sent events is a stream of text lines separated by empty lines.
Returns new channel delivering server-sent events with following types
| Type | Attributes | Description
| -----------| -----------|
| `:connect` | :stream | Connection is established. Use :stream to close SSE.
| `:data` | :id :data | One SSE event. When event contains multiple data attributes, they are concatenated with newline character.
| `:retry` | :retry | SSE indicates to set retry-timeout.
| `:close` | | Sent before stream and respective core.async channel is closed.
If SSE request could not be made, exception is written to the channel and channel is closed.
"
[req pool-opts resp-chan]
(connect (fn [] (-> (http/request (merge req {:throw-exceptions? false
:pool (sse-pool pool-opts)}))
(d/chain response->channel-response
:body
#(do (a/>!! resp-chan {:type :connect
:stream %})
%)
#(ms/map bs/to-string %)
#(ms/filter some? %)
#(ms/transform (mapcat-with-accumulator
sse-buffer->events)
%)
#(ms/map (fn [x] (a/>!! resp-chan x) x) %)
#(ms/reduce (fn [r _] r) {:type :close} %))))
resp-chan))
(defn close-sse
"Close manifold stream if any."
[{:keys [:stream]}]
(when stream
(ms/close! stream)))
(defn parse-body
"Parse body from `response` using content-type header to determine format.
If content-type is missing or unsupported, or parsing error occurs, throw ex with response in data.
If body is not present, return nil.
If body is successfully parsed, return map and full response in meta."
[response]
(if-let [body (:body response)]
(if-let [content-type (get-in response [:headers "content-type"])]
(cond
(= "application/json" content-type) (with-meta
(parse-json body)
{::s/presponse response})
(str/starts-with? content-type "text/") (with-meta
[(bs/to-string body)]
{::s/response response})
:else (throw
(ex-info "Unsupported content-type" {::s/response response
:content-type content-type})))
(throw (ex-info "No content-type." {::s/response response})))))
(defn parse-sse
"Parse JSON from `event` if event is type of :data"
[event]
(if (= :data (:type event))
{:type :data
:data (json/parse-string (:data event) true)}
event))
|
d49633c26df0a8114e3ea16d50dbcdef2305e8eb35b2b1219f42d8a7778af8eb | dparis/gen-phzr | collision_group.cljs | (ns phzr.physics.p2.collision-group
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->CollisionGroup
"Collision Group
Parameters:
* bitmask (number) - The CollisionGroup bitmask."
([bitmask]
(js/Phaser.Physics.P2.CollisionGroup. (clj->phaser bitmask))))
| null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/physics/p2/collision_group.cljs | clojure | (ns phzr.physics.p2.collision-group
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->CollisionGroup
"Collision Group
Parameters:
* bitmask (number) - The CollisionGroup bitmask."
([bitmask]
(js/Phaser.Physics.P2.CollisionGroup. (clj->phaser bitmask))))
| |
6d9d56981de78c0e015456e69df93f2be95941202e042bb3f2fa125382c60357 | rjnw/sham | info-values.rkt | #lang racket
(require "private/utils.rkt")
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/rjnw/sham/6e0524b1eb01bcda83ae7a5be6339da4257c6781/sham-sam/sham/sam/syntax/info-values.rkt | racket | #lang racket
(require "private/utils.rkt")
(provide (all-defined-out))
| |
7c1b1459b12883d7ffd8a907c63ac72a677c2e0b84c9c4d182b56f994bac5a26 | inhabitedtype/ocaml-aws | errors_internal.ml | type t =
| AlreadyExistsException
| AssociatedInstances
| AssociationAlreadyExists
| AssociationDoesNotExist
| AssociationExecutionDoesNotExist
| AssociationLimitExceeded
| AssociationVersionLimitExceeded
| AuthFailure
| AutomationDefinitionNotFoundException
| AutomationDefinitionVersionNotFoundException
| AutomationExecutionLimitExceededException
| AutomationExecutionNotFoundException
| AutomationStepNotFoundException
| Blocked
| ComplianceTypeCountLimitExceededException
| CustomSchemaCountLimitExceededException
| DocumentAlreadyExists
| DocumentLimitExceeded
| DocumentPermissionLimit
| DocumentVersionLimitExceeded
| DoesNotExistException
| DryRunOperation
| DuplicateDocumentContent
| DuplicateDocumentVersionName
| DuplicateInstanceId
| FeatureNotAvailableException
| HierarchyLevelLimitExceededException
| HierarchyTypeMismatchException
| IdempotentParameterMismatch
| IncompatiblePolicyException
| IncompleteSignature
| InternalFailure
| InternalServerError
| InvalidAction
| InvalidActivation
| InvalidActivationId
| InvalidAggregatorException
| InvalidAllowedPatternException
| InvalidAssociation
| InvalidAssociationVersion
| InvalidAutomationExecutionParametersException
| InvalidAutomationSignalException
| InvalidAutomationStatusUpdateException
| InvalidClientTokenId
| InvalidCommandId
| InvalidDeleteInventoryParametersException
| InvalidDeletionIdException
| InvalidDocument
| InvalidDocumentContent
| InvalidDocumentOperation
| InvalidDocumentSchemaVersion
| InvalidDocumentType
| InvalidDocumentVersion
| InvalidFilter
| InvalidFilterKey
| InvalidFilterOption
| InvalidFilterValue
| InvalidInstanceId
| InvalidInstanceInformationFilterValue
| InvalidInventoryGroupException
| InvalidInventoryItemContextException
| InvalidInventoryRequestException
| InvalidItemContentException
| InvalidKeyId
| InvalidNextToken
| InvalidNotificationConfig
| InvalidOptionException
| InvalidOutputFolder
| InvalidOutputLocation
| InvalidParameter
| InvalidParameterCombination
| InvalidParameterValue
| InvalidParameters
| InvalidPermissionType
| InvalidPluginName
| InvalidPolicyAttributeException
| InvalidPolicyTypeException
| InvalidQueryParameter
| InvalidResourceId
| InvalidResourceType
| InvalidResultAttributeException
| InvalidRole
| InvalidSchedule
| InvalidTarget
| InvalidTypeNameException
| InvalidUpdate
| InvocationDoesNotExist
| ItemContentMismatchException
| ItemSizeLimitExceededException
| MalformedQueryString
| MaxDocumentSizeExceeded
| MissingAction
| MissingAuthenticationToken
| MissingParameter
| OpsItemAlreadyExistsException
| OpsItemInvalidParameterException
| OpsItemLimitExceededException
| OpsItemNotFoundException
| OptInRequired
| ParameterAlreadyExists
| ParameterLimitExceeded
| ParameterMaxVersionLimitExceeded
| ParameterNotFound
| ParameterPatternMismatchException
| ParameterVersionLabelLimitExceeded
| ParameterVersionNotFound
| PendingVerification
| PoliciesLimitExceededException
| RequestExpired
| RequestLimitExceeded
| ResourceDataSyncAlreadyExistsException
| ResourceDataSyncConflictException
| ResourceDataSyncCountExceededException
| ResourceDataSyncInvalidConfigurationException
| ResourceDataSyncNotFoundException
| ResourceInUseException
| ResourceLimitExceededException
| ServiceSettingNotFound
| ServiceUnavailable
| StatusUnchanged
| SubTypeCountLimitExceededException
| TargetInUseException
| TargetNotConnected
| Throttling
| TooManyTagsError
| TooManyUpdates
| TotalSizeLimitExceededException
| UnauthorizedOperation
| UnknownParameter
| UnsupportedCalendarException
| UnsupportedFeatureRequiredException
| UnsupportedInventoryItemContextException
| UnsupportedInventorySchemaVersionException
| UnsupportedOperatingSystem
| UnsupportedParameterType
| UnsupportedPlatformType
| UnsupportedProtocol
| ValidationError
| Uninhabited
let common =
[ UnsupportedProtocol
; UnknownParameter
; UnauthorizedOperation
; RequestLimitExceeded
; PendingVerification
; InvalidParameter
; IdempotentParameterMismatch
; DryRunOperation
; Blocked
; AuthFailure
; ValidationError
; Throttling
; ServiceUnavailable
; RequestExpired
; OptInRequired
; MissingParameter
; MissingAuthenticationToken
; MissingAction
; MalformedQueryString
; InvalidQueryParameter
; InvalidParameterValue
; InvalidParameterCombination
; InvalidClientTokenId
; InvalidAction
; InternalFailure
; IncompleteSignature
]
let to_http_code e =
match e with
| AlreadyExistsException -> None
| AssociatedInstances -> None
| AssociationAlreadyExists -> None
| AssociationDoesNotExist -> None
| AssociationExecutionDoesNotExist -> None
| AssociationLimitExceeded -> None
| AssociationVersionLimitExceeded -> None
| AuthFailure -> None
| AutomationDefinitionNotFoundException -> None
| AutomationDefinitionVersionNotFoundException -> None
| AutomationExecutionLimitExceededException -> None
| AutomationExecutionNotFoundException -> None
| AutomationStepNotFoundException -> None
| Blocked -> None
| ComplianceTypeCountLimitExceededException -> None
| CustomSchemaCountLimitExceededException -> None
| DocumentAlreadyExists -> None
| DocumentLimitExceeded -> None
| DocumentPermissionLimit -> None
| DocumentVersionLimitExceeded -> None
| DoesNotExistException -> None
| DryRunOperation -> None
| DuplicateDocumentContent -> None
| DuplicateDocumentVersionName -> None
| DuplicateInstanceId -> None
| FeatureNotAvailableException -> None
| HierarchyLevelLimitExceededException -> None
| HierarchyTypeMismatchException -> None
| IdempotentParameterMismatch -> None
| IncompatiblePolicyException -> None
| IncompleteSignature -> Some 400
| InternalFailure -> Some 500
| InternalServerError -> None
| InvalidAction -> Some 400
| InvalidActivation -> None
| InvalidActivationId -> None
| InvalidAggregatorException -> None
| InvalidAllowedPatternException -> None
| InvalidAssociation -> None
| InvalidAssociationVersion -> None
| InvalidAutomationExecutionParametersException -> None
| InvalidAutomationSignalException -> None
| InvalidAutomationStatusUpdateException -> None
| InvalidClientTokenId -> Some 403
| InvalidCommandId -> None
| InvalidDeleteInventoryParametersException -> None
| InvalidDeletionIdException -> None
| InvalidDocument -> None
| InvalidDocumentContent -> None
| InvalidDocumentOperation -> None
| InvalidDocumentSchemaVersion -> None
| InvalidDocumentType -> None
| InvalidDocumentVersion -> None
| InvalidFilter -> None
| InvalidFilterKey -> None
| InvalidFilterOption -> None
| InvalidFilterValue -> None
| InvalidInstanceId -> None
| InvalidInstanceInformationFilterValue -> None
| InvalidInventoryGroupException -> None
| InvalidInventoryItemContextException -> None
| InvalidInventoryRequestException -> None
| InvalidItemContentException -> None
| InvalidKeyId -> None
| InvalidNextToken -> None
| InvalidNotificationConfig -> None
| InvalidOptionException -> None
| InvalidOutputFolder -> None
| InvalidOutputLocation -> None
| InvalidParameter -> None
| InvalidParameterCombination -> Some 400
| InvalidParameterValue -> Some 400
| InvalidParameters -> None
| InvalidPermissionType -> None
| InvalidPluginName -> None
| InvalidPolicyAttributeException -> None
| InvalidPolicyTypeException -> None
| InvalidQueryParameter -> Some 400
| InvalidResourceId -> None
| InvalidResourceType -> None
| InvalidResultAttributeException -> None
| InvalidRole -> None
| InvalidSchedule -> None
| InvalidTarget -> None
| InvalidTypeNameException -> None
| InvalidUpdate -> None
| InvocationDoesNotExist -> None
| ItemContentMismatchException -> None
| ItemSizeLimitExceededException -> None
| MalformedQueryString -> Some 404
| MaxDocumentSizeExceeded -> None
| MissingAction -> Some 400
| MissingAuthenticationToken -> Some 403
| MissingParameter -> Some 400
| OpsItemAlreadyExistsException -> None
| OpsItemInvalidParameterException -> None
| OpsItemLimitExceededException -> None
| OpsItemNotFoundException -> None
| OptInRequired -> Some 403
| ParameterAlreadyExists -> None
| ParameterLimitExceeded -> None
| ParameterMaxVersionLimitExceeded -> None
| ParameterNotFound -> None
| ParameterPatternMismatchException -> None
| ParameterVersionLabelLimitExceeded -> None
| ParameterVersionNotFound -> None
| PendingVerification -> None
| PoliciesLimitExceededException -> None
| RequestExpired -> Some 400
| RequestLimitExceeded -> None
| ResourceDataSyncAlreadyExistsException -> None
| ResourceDataSyncConflictException -> None
| ResourceDataSyncCountExceededException -> None
| ResourceDataSyncInvalidConfigurationException -> None
| ResourceDataSyncNotFoundException -> None
| ResourceInUseException -> None
| ResourceLimitExceededException -> None
| ServiceSettingNotFound -> None
| ServiceUnavailable -> Some 503
| StatusUnchanged -> None
| SubTypeCountLimitExceededException -> None
| TargetInUseException -> None
| TargetNotConnected -> None
| Throttling -> Some 400
| TooManyTagsError -> None
| TooManyUpdates -> None
| TotalSizeLimitExceededException -> None
| UnauthorizedOperation -> None
| UnknownParameter -> None
| UnsupportedCalendarException -> None
| UnsupportedFeatureRequiredException -> None
| UnsupportedInventoryItemContextException -> None
| UnsupportedInventorySchemaVersionException -> None
| UnsupportedOperatingSystem -> None
| UnsupportedParameterType -> None
| UnsupportedPlatformType -> None
| UnsupportedProtocol -> None
| ValidationError -> Some 400
| Uninhabited -> None
let to_string e =
match e with
| AlreadyExistsException -> "AlreadyExistsException"
| AssociatedInstances -> "AssociatedInstances"
| AssociationAlreadyExists -> "AssociationAlreadyExists"
| AssociationDoesNotExist -> "AssociationDoesNotExist"
| AssociationExecutionDoesNotExist -> "AssociationExecutionDoesNotExist"
| AssociationLimitExceeded -> "AssociationLimitExceeded"
| AssociationVersionLimitExceeded -> "AssociationVersionLimitExceeded"
| AuthFailure -> "AuthFailure"
| AutomationDefinitionNotFoundException -> "AutomationDefinitionNotFoundException"
| AutomationDefinitionVersionNotFoundException ->
"AutomationDefinitionVersionNotFoundException"
| AutomationExecutionLimitExceededException ->
"AutomationExecutionLimitExceededException"
| AutomationExecutionNotFoundException -> "AutomationExecutionNotFoundException"
| AutomationStepNotFoundException -> "AutomationStepNotFoundException"
| Blocked -> "Blocked"
| ComplianceTypeCountLimitExceededException ->
"ComplianceTypeCountLimitExceededException"
| CustomSchemaCountLimitExceededException -> "CustomSchemaCountLimitExceededException"
| DocumentAlreadyExists -> "DocumentAlreadyExists"
| DocumentLimitExceeded -> "DocumentLimitExceeded"
| DocumentPermissionLimit -> "DocumentPermissionLimit"
| DocumentVersionLimitExceeded -> "DocumentVersionLimitExceeded"
| DoesNotExistException -> "DoesNotExistException"
| DryRunOperation -> "DryRunOperation"
| DuplicateDocumentContent -> "DuplicateDocumentContent"
| DuplicateDocumentVersionName -> "DuplicateDocumentVersionName"
| DuplicateInstanceId -> "DuplicateInstanceId"
| FeatureNotAvailableException -> "FeatureNotAvailableException"
| HierarchyLevelLimitExceededException -> "HierarchyLevelLimitExceededException"
| HierarchyTypeMismatchException -> "HierarchyTypeMismatchException"
| IdempotentParameterMismatch -> "IdempotentParameterMismatch"
| IncompatiblePolicyException -> "IncompatiblePolicyException"
| IncompleteSignature -> "IncompleteSignature"
| InternalFailure -> "InternalFailure"
| InternalServerError -> "InternalServerError"
| InvalidAction -> "InvalidAction"
| InvalidActivation -> "InvalidActivation"
| InvalidActivationId -> "InvalidActivationId"
| InvalidAggregatorException -> "InvalidAggregatorException"
| InvalidAllowedPatternException -> "InvalidAllowedPatternException"
| InvalidAssociation -> "InvalidAssociation"
| InvalidAssociationVersion -> "InvalidAssociationVersion"
| InvalidAutomationExecutionParametersException ->
"InvalidAutomationExecutionParametersException"
| InvalidAutomationSignalException -> "InvalidAutomationSignalException"
| InvalidAutomationStatusUpdateException -> "InvalidAutomationStatusUpdateException"
| InvalidClientTokenId -> "InvalidClientTokenId"
| InvalidCommandId -> "InvalidCommandId"
| InvalidDeleteInventoryParametersException ->
"InvalidDeleteInventoryParametersException"
| InvalidDeletionIdException -> "InvalidDeletionIdException"
| InvalidDocument -> "InvalidDocument"
| InvalidDocumentContent -> "InvalidDocumentContent"
| InvalidDocumentOperation -> "InvalidDocumentOperation"
| InvalidDocumentSchemaVersion -> "InvalidDocumentSchemaVersion"
| InvalidDocumentType -> "InvalidDocumentType"
| InvalidDocumentVersion -> "InvalidDocumentVersion"
| InvalidFilter -> "InvalidFilter"
| InvalidFilterKey -> "InvalidFilterKey"
| InvalidFilterOption -> "InvalidFilterOption"
| InvalidFilterValue -> "InvalidFilterValue"
| InvalidInstanceId -> "InvalidInstanceId"
| InvalidInstanceInformationFilterValue -> "InvalidInstanceInformationFilterValue"
| InvalidInventoryGroupException -> "InvalidInventoryGroupException"
| InvalidInventoryItemContextException -> "InvalidInventoryItemContextException"
| InvalidInventoryRequestException -> "InvalidInventoryRequestException"
| InvalidItemContentException -> "InvalidItemContentException"
| InvalidKeyId -> "InvalidKeyId"
| InvalidNextToken -> "InvalidNextToken"
| InvalidNotificationConfig -> "InvalidNotificationConfig"
| InvalidOptionException -> "InvalidOptionException"
| InvalidOutputFolder -> "InvalidOutputFolder"
| InvalidOutputLocation -> "InvalidOutputLocation"
| InvalidParameter -> "InvalidParameter"
| InvalidParameterCombination -> "InvalidParameterCombination"
| InvalidParameterValue -> "InvalidParameterValue"
| InvalidParameters -> "InvalidParameters"
| InvalidPermissionType -> "InvalidPermissionType"
| InvalidPluginName -> "InvalidPluginName"
| InvalidPolicyAttributeException -> "InvalidPolicyAttributeException"
| InvalidPolicyTypeException -> "InvalidPolicyTypeException"
| InvalidQueryParameter -> "InvalidQueryParameter"
| InvalidResourceId -> "InvalidResourceId"
| InvalidResourceType -> "InvalidResourceType"
| InvalidResultAttributeException -> "InvalidResultAttributeException"
| InvalidRole -> "InvalidRole"
| InvalidSchedule -> "InvalidSchedule"
| InvalidTarget -> "InvalidTarget"
| InvalidTypeNameException -> "InvalidTypeNameException"
| InvalidUpdate -> "InvalidUpdate"
| InvocationDoesNotExist -> "InvocationDoesNotExist"
| ItemContentMismatchException -> "ItemContentMismatchException"
| ItemSizeLimitExceededException -> "ItemSizeLimitExceededException"
| MalformedQueryString -> "MalformedQueryString"
| MaxDocumentSizeExceeded -> "MaxDocumentSizeExceeded"
| MissingAction -> "MissingAction"
| MissingAuthenticationToken -> "MissingAuthenticationToken"
| MissingParameter -> "MissingParameter"
| OpsItemAlreadyExistsException -> "OpsItemAlreadyExistsException"
| OpsItemInvalidParameterException -> "OpsItemInvalidParameterException"
| OpsItemLimitExceededException -> "OpsItemLimitExceededException"
| OpsItemNotFoundException -> "OpsItemNotFoundException"
| OptInRequired -> "OptInRequired"
| ParameterAlreadyExists -> "ParameterAlreadyExists"
| ParameterLimitExceeded -> "ParameterLimitExceeded"
| ParameterMaxVersionLimitExceeded -> "ParameterMaxVersionLimitExceeded"
| ParameterNotFound -> "ParameterNotFound"
| ParameterPatternMismatchException -> "ParameterPatternMismatchException"
| ParameterVersionLabelLimitExceeded -> "ParameterVersionLabelLimitExceeded"
| ParameterVersionNotFound -> "ParameterVersionNotFound"
| PendingVerification -> "PendingVerification"
| PoliciesLimitExceededException -> "PoliciesLimitExceededException"
| RequestExpired -> "RequestExpired"
| RequestLimitExceeded -> "RequestLimitExceeded"
| ResourceDataSyncAlreadyExistsException -> "ResourceDataSyncAlreadyExistsException"
| ResourceDataSyncConflictException -> "ResourceDataSyncConflictException"
| ResourceDataSyncCountExceededException -> "ResourceDataSyncCountExceededException"
| ResourceDataSyncInvalidConfigurationException ->
"ResourceDataSyncInvalidConfigurationException"
| ResourceDataSyncNotFoundException -> "ResourceDataSyncNotFoundException"
| ResourceInUseException -> "ResourceInUseException"
| ResourceLimitExceededException -> "ResourceLimitExceededException"
| ServiceSettingNotFound -> "ServiceSettingNotFound"
| ServiceUnavailable -> "ServiceUnavailable"
| StatusUnchanged -> "StatusUnchanged"
| SubTypeCountLimitExceededException -> "SubTypeCountLimitExceededException"
| TargetInUseException -> "TargetInUseException"
| TargetNotConnected -> "TargetNotConnected"
| Throttling -> "Throttling"
| TooManyTagsError -> "TooManyTagsError"
| TooManyUpdates -> "TooManyUpdates"
| TotalSizeLimitExceededException -> "TotalSizeLimitExceededException"
| UnauthorizedOperation -> "UnauthorizedOperation"
| UnknownParameter -> "UnknownParameter"
| UnsupportedCalendarException -> "UnsupportedCalendarException"
| UnsupportedFeatureRequiredException -> "UnsupportedFeatureRequiredException"
| UnsupportedInventoryItemContextException -> "UnsupportedInventoryItemContextException"
| UnsupportedInventorySchemaVersionException ->
"UnsupportedInventorySchemaVersionException"
| UnsupportedOperatingSystem -> "UnsupportedOperatingSystem"
| UnsupportedParameterType -> "UnsupportedParameterType"
| UnsupportedPlatformType -> "UnsupportedPlatformType"
| UnsupportedProtocol -> "UnsupportedProtocol"
| ValidationError -> "ValidationError"
| Uninhabited -> "Uninhabited"
let of_string e =
match e with
| "AlreadyExistsException" -> Some AlreadyExistsException
| "AssociatedInstances" -> Some AssociatedInstances
| "AssociationAlreadyExists" -> Some AssociationAlreadyExists
| "AssociationDoesNotExist" -> Some AssociationDoesNotExist
| "AssociationExecutionDoesNotExist" -> Some AssociationExecutionDoesNotExist
| "AssociationLimitExceeded" -> Some AssociationLimitExceeded
| "AssociationVersionLimitExceeded" -> Some AssociationVersionLimitExceeded
| "AuthFailure" -> Some AuthFailure
| "AutomationDefinitionNotFoundException" -> Some AutomationDefinitionNotFoundException
| "AutomationDefinitionVersionNotFoundException" ->
Some AutomationDefinitionVersionNotFoundException
| "AutomationExecutionLimitExceededException" ->
Some AutomationExecutionLimitExceededException
| "AutomationExecutionNotFoundException" -> Some AutomationExecutionNotFoundException
| "AutomationStepNotFoundException" -> Some AutomationStepNotFoundException
| "Blocked" -> Some Blocked
| "ComplianceTypeCountLimitExceededException" ->
Some ComplianceTypeCountLimitExceededException
| "CustomSchemaCountLimitExceededException" ->
Some CustomSchemaCountLimitExceededException
| "DocumentAlreadyExists" -> Some DocumentAlreadyExists
| "DocumentLimitExceeded" -> Some DocumentLimitExceeded
| "DocumentPermissionLimit" -> Some DocumentPermissionLimit
| "DocumentVersionLimitExceeded" -> Some DocumentVersionLimitExceeded
| "DoesNotExistException" -> Some DoesNotExistException
| "DryRunOperation" -> Some DryRunOperation
| "DuplicateDocumentContent" -> Some DuplicateDocumentContent
| "DuplicateDocumentVersionName" -> Some DuplicateDocumentVersionName
| "DuplicateInstanceId" -> Some DuplicateInstanceId
| "FeatureNotAvailableException" -> Some FeatureNotAvailableException
| "HierarchyLevelLimitExceededException" -> Some HierarchyLevelLimitExceededException
| "HierarchyTypeMismatchException" -> Some HierarchyTypeMismatchException
| "IdempotentParameterMismatch" -> Some IdempotentParameterMismatch
| "IncompatiblePolicyException" -> Some IncompatiblePolicyException
| "IncompleteSignature" -> Some IncompleteSignature
| "InternalFailure" -> Some InternalFailure
| "InternalServerError" -> Some InternalServerError
| "InvalidAction" -> Some InvalidAction
| "InvalidActivation" -> Some InvalidActivation
| "InvalidActivationId" -> Some InvalidActivationId
| "InvalidAggregatorException" -> Some InvalidAggregatorException
| "InvalidAllowedPatternException" -> Some InvalidAllowedPatternException
| "InvalidAssociation" -> Some InvalidAssociation
| "InvalidAssociationVersion" -> Some InvalidAssociationVersion
| "InvalidAutomationExecutionParametersException" ->
Some InvalidAutomationExecutionParametersException
| "InvalidAutomationSignalException" -> Some InvalidAutomationSignalException
| "InvalidAutomationStatusUpdateException" ->
Some InvalidAutomationStatusUpdateException
| "InvalidClientTokenId" -> Some InvalidClientTokenId
| "InvalidCommandId" -> Some InvalidCommandId
| "InvalidDeleteInventoryParametersException" ->
Some InvalidDeleteInventoryParametersException
| "InvalidDeletionIdException" -> Some InvalidDeletionIdException
| "InvalidDocument" -> Some InvalidDocument
| "InvalidDocumentContent" -> Some InvalidDocumentContent
| "InvalidDocumentOperation" -> Some InvalidDocumentOperation
| "InvalidDocumentSchemaVersion" -> Some InvalidDocumentSchemaVersion
| "InvalidDocumentType" -> Some InvalidDocumentType
| "InvalidDocumentVersion" -> Some InvalidDocumentVersion
| "InvalidFilter" -> Some InvalidFilter
| "InvalidFilterKey" -> Some InvalidFilterKey
| "InvalidFilterOption" -> Some InvalidFilterOption
| "InvalidFilterValue" -> Some InvalidFilterValue
| "InvalidInstanceId" -> Some InvalidInstanceId
| "InvalidInstanceInformationFilterValue" -> Some InvalidInstanceInformationFilterValue
| "InvalidInventoryGroupException" -> Some InvalidInventoryGroupException
| "InvalidInventoryItemContextException" -> Some InvalidInventoryItemContextException
| "InvalidInventoryRequestException" -> Some InvalidInventoryRequestException
| "InvalidItemContentException" -> Some InvalidItemContentException
| "InvalidKeyId" -> Some InvalidKeyId
| "InvalidNextToken" -> Some InvalidNextToken
| "InvalidNotificationConfig" -> Some InvalidNotificationConfig
| "InvalidOptionException" -> Some InvalidOptionException
| "InvalidOutputFolder" -> Some InvalidOutputFolder
| "InvalidOutputLocation" -> Some InvalidOutputLocation
| "InvalidParameter" -> Some InvalidParameter
| "InvalidParameterCombination" -> Some InvalidParameterCombination
| "InvalidParameterValue" -> Some InvalidParameterValue
| "InvalidParameters" -> Some InvalidParameters
| "InvalidPermissionType" -> Some InvalidPermissionType
| "InvalidPluginName" -> Some InvalidPluginName
| "InvalidPolicyAttributeException" -> Some InvalidPolicyAttributeException
| "InvalidPolicyTypeException" -> Some InvalidPolicyTypeException
| "InvalidQueryParameter" -> Some InvalidQueryParameter
| "InvalidResourceId" -> Some InvalidResourceId
| "InvalidResourceType" -> Some InvalidResourceType
| "InvalidResultAttributeException" -> Some InvalidResultAttributeException
| "InvalidRole" -> Some InvalidRole
| "InvalidSchedule" -> Some InvalidSchedule
| "InvalidTarget" -> Some InvalidTarget
| "InvalidTypeNameException" -> Some InvalidTypeNameException
| "InvalidUpdate" -> Some InvalidUpdate
| "InvocationDoesNotExist" -> Some InvocationDoesNotExist
| "ItemContentMismatchException" -> Some ItemContentMismatchException
| "ItemSizeLimitExceededException" -> Some ItemSizeLimitExceededException
| "MalformedQueryString" -> Some MalformedQueryString
| "MaxDocumentSizeExceeded" -> Some MaxDocumentSizeExceeded
| "MissingAction" -> Some MissingAction
| "MissingAuthenticationToken" -> Some MissingAuthenticationToken
| "MissingParameter" -> Some MissingParameter
| "OpsItemAlreadyExistsException" -> Some OpsItemAlreadyExistsException
| "OpsItemInvalidParameterException" -> Some OpsItemInvalidParameterException
| "OpsItemLimitExceededException" -> Some OpsItemLimitExceededException
| "OpsItemNotFoundException" -> Some OpsItemNotFoundException
| "OptInRequired" -> Some OptInRequired
| "ParameterAlreadyExists" -> Some ParameterAlreadyExists
| "ParameterLimitExceeded" -> Some ParameterLimitExceeded
| "ParameterMaxVersionLimitExceeded" -> Some ParameterMaxVersionLimitExceeded
| "ParameterNotFound" -> Some ParameterNotFound
| "ParameterPatternMismatchException" -> Some ParameterPatternMismatchException
| "ParameterVersionLabelLimitExceeded" -> Some ParameterVersionLabelLimitExceeded
| "ParameterVersionNotFound" -> Some ParameterVersionNotFound
| "PendingVerification" -> Some PendingVerification
| "PoliciesLimitExceededException" -> Some PoliciesLimitExceededException
| "RequestExpired" -> Some RequestExpired
| "RequestLimitExceeded" -> Some RequestLimitExceeded
| "ResourceDataSyncAlreadyExistsException" ->
Some ResourceDataSyncAlreadyExistsException
| "ResourceDataSyncConflictException" -> Some ResourceDataSyncConflictException
| "ResourceDataSyncCountExceededException" ->
Some ResourceDataSyncCountExceededException
| "ResourceDataSyncInvalidConfigurationException" ->
Some ResourceDataSyncInvalidConfigurationException
| "ResourceDataSyncNotFoundException" -> Some ResourceDataSyncNotFoundException
| "ResourceInUseException" -> Some ResourceInUseException
| "ResourceLimitExceededException" -> Some ResourceLimitExceededException
| "ServiceSettingNotFound" -> Some ServiceSettingNotFound
| "ServiceUnavailable" -> Some ServiceUnavailable
| "StatusUnchanged" -> Some StatusUnchanged
| "SubTypeCountLimitExceededException" -> Some SubTypeCountLimitExceededException
| "TargetInUseException" -> Some TargetInUseException
| "TargetNotConnected" -> Some TargetNotConnected
| "Throttling" -> Some Throttling
| "TooManyTagsError" -> Some TooManyTagsError
| "TooManyUpdates" -> Some TooManyUpdates
| "TotalSizeLimitExceededException" -> Some TotalSizeLimitExceededException
| "UnauthorizedOperation" -> Some UnauthorizedOperation
| "UnknownParameter" -> Some UnknownParameter
| "UnsupportedCalendarException" -> Some UnsupportedCalendarException
| "UnsupportedFeatureRequiredException" -> Some UnsupportedFeatureRequiredException
| "UnsupportedInventoryItemContextException" ->
Some UnsupportedInventoryItemContextException
| "UnsupportedInventorySchemaVersionException" ->
Some UnsupportedInventorySchemaVersionException
| "UnsupportedOperatingSystem" -> Some UnsupportedOperatingSystem
| "UnsupportedParameterType" -> Some UnsupportedParameterType
| "UnsupportedPlatformType" -> Some UnsupportedPlatformType
| "UnsupportedProtocol" -> Some UnsupportedProtocol
| "ValidationError" -> Some ValidationError
| "Uninhabited" -> Some Uninhabited
| _ -> None
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/3bc554af7ae7ef9e2dcea44a1b72c9e687435fa9/libraries/ssm/lib/errors_internal.ml | ocaml | type t =
| AlreadyExistsException
| AssociatedInstances
| AssociationAlreadyExists
| AssociationDoesNotExist
| AssociationExecutionDoesNotExist
| AssociationLimitExceeded
| AssociationVersionLimitExceeded
| AuthFailure
| AutomationDefinitionNotFoundException
| AutomationDefinitionVersionNotFoundException
| AutomationExecutionLimitExceededException
| AutomationExecutionNotFoundException
| AutomationStepNotFoundException
| Blocked
| ComplianceTypeCountLimitExceededException
| CustomSchemaCountLimitExceededException
| DocumentAlreadyExists
| DocumentLimitExceeded
| DocumentPermissionLimit
| DocumentVersionLimitExceeded
| DoesNotExistException
| DryRunOperation
| DuplicateDocumentContent
| DuplicateDocumentVersionName
| DuplicateInstanceId
| FeatureNotAvailableException
| HierarchyLevelLimitExceededException
| HierarchyTypeMismatchException
| IdempotentParameterMismatch
| IncompatiblePolicyException
| IncompleteSignature
| InternalFailure
| InternalServerError
| InvalidAction
| InvalidActivation
| InvalidActivationId
| InvalidAggregatorException
| InvalidAllowedPatternException
| InvalidAssociation
| InvalidAssociationVersion
| InvalidAutomationExecutionParametersException
| InvalidAutomationSignalException
| InvalidAutomationStatusUpdateException
| InvalidClientTokenId
| InvalidCommandId
| InvalidDeleteInventoryParametersException
| InvalidDeletionIdException
| InvalidDocument
| InvalidDocumentContent
| InvalidDocumentOperation
| InvalidDocumentSchemaVersion
| InvalidDocumentType
| InvalidDocumentVersion
| InvalidFilter
| InvalidFilterKey
| InvalidFilterOption
| InvalidFilterValue
| InvalidInstanceId
| InvalidInstanceInformationFilterValue
| InvalidInventoryGroupException
| InvalidInventoryItemContextException
| InvalidInventoryRequestException
| InvalidItemContentException
| InvalidKeyId
| InvalidNextToken
| InvalidNotificationConfig
| InvalidOptionException
| InvalidOutputFolder
| InvalidOutputLocation
| InvalidParameter
| InvalidParameterCombination
| InvalidParameterValue
| InvalidParameters
| InvalidPermissionType
| InvalidPluginName
| InvalidPolicyAttributeException
| InvalidPolicyTypeException
| InvalidQueryParameter
| InvalidResourceId
| InvalidResourceType
| InvalidResultAttributeException
| InvalidRole
| InvalidSchedule
| InvalidTarget
| InvalidTypeNameException
| InvalidUpdate
| InvocationDoesNotExist
| ItemContentMismatchException
| ItemSizeLimitExceededException
| MalformedQueryString
| MaxDocumentSizeExceeded
| MissingAction
| MissingAuthenticationToken
| MissingParameter
| OpsItemAlreadyExistsException
| OpsItemInvalidParameterException
| OpsItemLimitExceededException
| OpsItemNotFoundException
| OptInRequired
| ParameterAlreadyExists
| ParameterLimitExceeded
| ParameterMaxVersionLimitExceeded
| ParameterNotFound
| ParameterPatternMismatchException
| ParameterVersionLabelLimitExceeded
| ParameterVersionNotFound
| PendingVerification
| PoliciesLimitExceededException
| RequestExpired
| RequestLimitExceeded
| ResourceDataSyncAlreadyExistsException
| ResourceDataSyncConflictException
| ResourceDataSyncCountExceededException
| ResourceDataSyncInvalidConfigurationException
| ResourceDataSyncNotFoundException
| ResourceInUseException
| ResourceLimitExceededException
| ServiceSettingNotFound
| ServiceUnavailable
| StatusUnchanged
| SubTypeCountLimitExceededException
| TargetInUseException
| TargetNotConnected
| Throttling
| TooManyTagsError
| TooManyUpdates
| TotalSizeLimitExceededException
| UnauthorizedOperation
| UnknownParameter
| UnsupportedCalendarException
| UnsupportedFeatureRequiredException
| UnsupportedInventoryItemContextException
| UnsupportedInventorySchemaVersionException
| UnsupportedOperatingSystem
| UnsupportedParameterType
| UnsupportedPlatformType
| UnsupportedProtocol
| ValidationError
| Uninhabited
let common =
[ UnsupportedProtocol
; UnknownParameter
; UnauthorizedOperation
; RequestLimitExceeded
; PendingVerification
; InvalidParameter
; IdempotentParameterMismatch
; DryRunOperation
; Blocked
; AuthFailure
; ValidationError
; Throttling
; ServiceUnavailable
; RequestExpired
; OptInRequired
; MissingParameter
; MissingAuthenticationToken
; MissingAction
; MalformedQueryString
; InvalidQueryParameter
; InvalidParameterValue
; InvalidParameterCombination
; InvalidClientTokenId
; InvalidAction
; InternalFailure
; IncompleteSignature
]
let to_http_code e =
match e with
| AlreadyExistsException -> None
| AssociatedInstances -> None
| AssociationAlreadyExists -> None
| AssociationDoesNotExist -> None
| AssociationExecutionDoesNotExist -> None
| AssociationLimitExceeded -> None
| AssociationVersionLimitExceeded -> None
| AuthFailure -> None
| AutomationDefinitionNotFoundException -> None
| AutomationDefinitionVersionNotFoundException -> None
| AutomationExecutionLimitExceededException -> None
| AutomationExecutionNotFoundException -> None
| AutomationStepNotFoundException -> None
| Blocked -> None
| ComplianceTypeCountLimitExceededException -> None
| CustomSchemaCountLimitExceededException -> None
| DocumentAlreadyExists -> None
| DocumentLimitExceeded -> None
| DocumentPermissionLimit -> None
| DocumentVersionLimitExceeded -> None
| DoesNotExistException -> None
| DryRunOperation -> None
| DuplicateDocumentContent -> None
| DuplicateDocumentVersionName -> None
| DuplicateInstanceId -> None
| FeatureNotAvailableException -> None
| HierarchyLevelLimitExceededException -> None
| HierarchyTypeMismatchException -> None
| IdempotentParameterMismatch -> None
| IncompatiblePolicyException -> None
| IncompleteSignature -> Some 400
| InternalFailure -> Some 500
| InternalServerError -> None
| InvalidAction -> Some 400
| InvalidActivation -> None
| InvalidActivationId -> None
| InvalidAggregatorException -> None
| InvalidAllowedPatternException -> None
| InvalidAssociation -> None
| InvalidAssociationVersion -> None
| InvalidAutomationExecutionParametersException -> None
| InvalidAutomationSignalException -> None
| InvalidAutomationStatusUpdateException -> None
| InvalidClientTokenId -> Some 403
| InvalidCommandId -> None
| InvalidDeleteInventoryParametersException -> None
| InvalidDeletionIdException -> None
| InvalidDocument -> None
| InvalidDocumentContent -> None
| InvalidDocumentOperation -> None
| InvalidDocumentSchemaVersion -> None
| InvalidDocumentType -> None
| InvalidDocumentVersion -> None
| InvalidFilter -> None
| InvalidFilterKey -> None
| InvalidFilterOption -> None
| InvalidFilterValue -> None
| InvalidInstanceId -> None
| InvalidInstanceInformationFilterValue -> None
| InvalidInventoryGroupException -> None
| InvalidInventoryItemContextException -> None
| InvalidInventoryRequestException -> None
| InvalidItemContentException -> None
| InvalidKeyId -> None
| InvalidNextToken -> None
| InvalidNotificationConfig -> None
| InvalidOptionException -> None
| InvalidOutputFolder -> None
| InvalidOutputLocation -> None
| InvalidParameter -> None
| InvalidParameterCombination -> Some 400
| InvalidParameterValue -> Some 400
| InvalidParameters -> None
| InvalidPermissionType -> None
| InvalidPluginName -> None
| InvalidPolicyAttributeException -> None
| InvalidPolicyTypeException -> None
| InvalidQueryParameter -> Some 400
| InvalidResourceId -> None
| InvalidResourceType -> None
| InvalidResultAttributeException -> None
| InvalidRole -> None
| InvalidSchedule -> None
| InvalidTarget -> None
| InvalidTypeNameException -> None
| InvalidUpdate -> None
| InvocationDoesNotExist -> None
| ItemContentMismatchException -> None
| ItemSizeLimitExceededException -> None
| MalformedQueryString -> Some 404
| MaxDocumentSizeExceeded -> None
| MissingAction -> Some 400
| MissingAuthenticationToken -> Some 403
| MissingParameter -> Some 400
| OpsItemAlreadyExistsException -> None
| OpsItemInvalidParameterException -> None
| OpsItemLimitExceededException -> None
| OpsItemNotFoundException -> None
| OptInRequired -> Some 403
| ParameterAlreadyExists -> None
| ParameterLimitExceeded -> None
| ParameterMaxVersionLimitExceeded -> None
| ParameterNotFound -> None
| ParameterPatternMismatchException -> None
| ParameterVersionLabelLimitExceeded -> None
| ParameterVersionNotFound -> None
| PendingVerification -> None
| PoliciesLimitExceededException -> None
| RequestExpired -> Some 400
| RequestLimitExceeded -> None
| ResourceDataSyncAlreadyExistsException -> None
| ResourceDataSyncConflictException -> None
| ResourceDataSyncCountExceededException -> None
| ResourceDataSyncInvalidConfigurationException -> None
| ResourceDataSyncNotFoundException -> None
| ResourceInUseException -> None
| ResourceLimitExceededException -> None
| ServiceSettingNotFound -> None
| ServiceUnavailable -> Some 503
| StatusUnchanged -> None
| SubTypeCountLimitExceededException -> None
| TargetInUseException -> None
| TargetNotConnected -> None
| Throttling -> Some 400
| TooManyTagsError -> None
| TooManyUpdates -> None
| TotalSizeLimitExceededException -> None
| UnauthorizedOperation -> None
| UnknownParameter -> None
| UnsupportedCalendarException -> None
| UnsupportedFeatureRequiredException -> None
| UnsupportedInventoryItemContextException -> None
| UnsupportedInventorySchemaVersionException -> None
| UnsupportedOperatingSystem -> None
| UnsupportedParameterType -> None
| UnsupportedPlatformType -> None
| UnsupportedProtocol -> None
| ValidationError -> Some 400
| Uninhabited -> None
let to_string e =
match e with
| AlreadyExistsException -> "AlreadyExistsException"
| AssociatedInstances -> "AssociatedInstances"
| AssociationAlreadyExists -> "AssociationAlreadyExists"
| AssociationDoesNotExist -> "AssociationDoesNotExist"
| AssociationExecutionDoesNotExist -> "AssociationExecutionDoesNotExist"
| AssociationLimitExceeded -> "AssociationLimitExceeded"
| AssociationVersionLimitExceeded -> "AssociationVersionLimitExceeded"
| AuthFailure -> "AuthFailure"
| AutomationDefinitionNotFoundException -> "AutomationDefinitionNotFoundException"
| AutomationDefinitionVersionNotFoundException ->
"AutomationDefinitionVersionNotFoundException"
| AutomationExecutionLimitExceededException ->
"AutomationExecutionLimitExceededException"
| AutomationExecutionNotFoundException -> "AutomationExecutionNotFoundException"
| AutomationStepNotFoundException -> "AutomationStepNotFoundException"
| Blocked -> "Blocked"
| ComplianceTypeCountLimitExceededException ->
"ComplianceTypeCountLimitExceededException"
| CustomSchemaCountLimitExceededException -> "CustomSchemaCountLimitExceededException"
| DocumentAlreadyExists -> "DocumentAlreadyExists"
| DocumentLimitExceeded -> "DocumentLimitExceeded"
| DocumentPermissionLimit -> "DocumentPermissionLimit"
| DocumentVersionLimitExceeded -> "DocumentVersionLimitExceeded"
| DoesNotExistException -> "DoesNotExistException"
| DryRunOperation -> "DryRunOperation"
| DuplicateDocumentContent -> "DuplicateDocumentContent"
| DuplicateDocumentVersionName -> "DuplicateDocumentVersionName"
| DuplicateInstanceId -> "DuplicateInstanceId"
| FeatureNotAvailableException -> "FeatureNotAvailableException"
| HierarchyLevelLimitExceededException -> "HierarchyLevelLimitExceededException"
| HierarchyTypeMismatchException -> "HierarchyTypeMismatchException"
| IdempotentParameterMismatch -> "IdempotentParameterMismatch"
| IncompatiblePolicyException -> "IncompatiblePolicyException"
| IncompleteSignature -> "IncompleteSignature"
| InternalFailure -> "InternalFailure"
| InternalServerError -> "InternalServerError"
| InvalidAction -> "InvalidAction"
| InvalidActivation -> "InvalidActivation"
| InvalidActivationId -> "InvalidActivationId"
| InvalidAggregatorException -> "InvalidAggregatorException"
| InvalidAllowedPatternException -> "InvalidAllowedPatternException"
| InvalidAssociation -> "InvalidAssociation"
| InvalidAssociationVersion -> "InvalidAssociationVersion"
| InvalidAutomationExecutionParametersException ->
"InvalidAutomationExecutionParametersException"
| InvalidAutomationSignalException -> "InvalidAutomationSignalException"
| InvalidAutomationStatusUpdateException -> "InvalidAutomationStatusUpdateException"
| InvalidClientTokenId -> "InvalidClientTokenId"
| InvalidCommandId -> "InvalidCommandId"
| InvalidDeleteInventoryParametersException ->
"InvalidDeleteInventoryParametersException"
| InvalidDeletionIdException -> "InvalidDeletionIdException"
| InvalidDocument -> "InvalidDocument"
| InvalidDocumentContent -> "InvalidDocumentContent"
| InvalidDocumentOperation -> "InvalidDocumentOperation"
| InvalidDocumentSchemaVersion -> "InvalidDocumentSchemaVersion"
| InvalidDocumentType -> "InvalidDocumentType"
| InvalidDocumentVersion -> "InvalidDocumentVersion"
| InvalidFilter -> "InvalidFilter"
| InvalidFilterKey -> "InvalidFilterKey"
| InvalidFilterOption -> "InvalidFilterOption"
| InvalidFilterValue -> "InvalidFilterValue"
| InvalidInstanceId -> "InvalidInstanceId"
| InvalidInstanceInformationFilterValue -> "InvalidInstanceInformationFilterValue"
| InvalidInventoryGroupException -> "InvalidInventoryGroupException"
| InvalidInventoryItemContextException -> "InvalidInventoryItemContextException"
| InvalidInventoryRequestException -> "InvalidInventoryRequestException"
| InvalidItemContentException -> "InvalidItemContentException"
| InvalidKeyId -> "InvalidKeyId"
| InvalidNextToken -> "InvalidNextToken"
| InvalidNotificationConfig -> "InvalidNotificationConfig"
| InvalidOptionException -> "InvalidOptionException"
| InvalidOutputFolder -> "InvalidOutputFolder"
| InvalidOutputLocation -> "InvalidOutputLocation"
| InvalidParameter -> "InvalidParameter"
| InvalidParameterCombination -> "InvalidParameterCombination"
| InvalidParameterValue -> "InvalidParameterValue"
| InvalidParameters -> "InvalidParameters"
| InvalidPermissionType -> "InvalidPermissionType"
| InvalidPluginName -> "InvalidPluginName"
| InvalidPolicyAttributeException -> "InvalidPolicyAttributeException"
| InvalidPolicyTypeException -> "InvalidPolicyTypeException"
| InvalidQueryParameter -> "InvalidQueryParameter"
| InvalidResourceId -> "InvalidResourceId"
| InvalidResourceType -> "InvalidResourceType"
| InvalidResultAttributeException -> "InvalidResultAttributeException"
| InvalidRole -> "InvalidRole"
| InvalidSchedule -> "InvalidSchedule"
| InvalidTarget -> "InvalidTarget"
| InvalidTypeNameException -> "InvalidTypeNameException"
| InvalidUpdate -> "InvalidUpdate"
| InvocationDoesNotExist -> "InvocationDoesNotExist"
| ItemContentMismatchException -> "ItemContentMismatchException"
| ItemSizeLimitExceededException -> "ItemSizeLimitExceededException"
| MalformedQueryString -> "MalformedQueryString"
| MaxDocumentSizeExceeded -> "MaxDocumentSizeExceeded"
| MissingAction -> "MissingAction"
| MissingAuthenticationToken -> "MissingAuthenticationToken"
| MissingParameter -> "MissingParameter"
| OpsItemAlreadyExistsException -> "OpsItemAlreadyExistsException"
| OpsItemInvalidParameterException -> "OpsItemInvalidParameterException"
| OpsItemLimitExceededException -> "OpsItemLimitExceededException"
| OpsItemNotFoundException -> "OpsItemNotFoundException"
| OptInRequired -> "OptInRequired"
| ParameterAlreadyExists -> "ParameterAlreadyExists"
| ParameterLimitExceeded -> "ParameterLimitExceeded"
| ParameterMaxVersionLimitExceeded -> "ParameterMaxVersionLimitExceeded"
| ParameterNotFound -> "ParameterNotFound"
| ParameterPatternMismatchException -> "ParameterPatternMismatchException"
| ParameterVersionLabelLimitExceeded -> "ParameterVersionLabelLimitExceeded"
| ParameterVersionNotFound -> "ParameterVersionNotFound"
| PendingVerification -> "PendingVerification"
| PoliciesLimitExceededException -> "PoliciesLimitExceededException"
| RequestExpired -> "RequestExpired"
| RequestLimitExceeded -> "RequestLimitExceeded"
| ResourceDataSyncAlreadyExistsException -> "ResourceDataSyncAlreadyExistsException"
| ResourceDataSyncConflictException -> "ResourceDataSyncConflictException"
| ResourceDataSyncCountExceededException -> "ResourceDataSyncCountExceededException"
| ResourceDataSyncInvalidConfigurationException ->
"ResourceDataSyncInvalidConfigurationException"
| ResourceDataSyncNotFoundException -> "ResourceDataSyncNotFoundException"
| ResourceInUseException -> "ResourceInUseException"
| ResourceLimitExceededException -> "ResourceLimitExceededException"
| ServiceSettingNotFound -> "ServiceSettingNotFound"
| ServiceUnavailable -> "ServiceUnavailable"
| StatusUnchanged -> "StatusUnchanged"
| SubTypeCountLimitExceededException -> "SubTypeCountLimitExceededException"
| TargetInUseException -> "TargetInUseException"
| TargetNotConnected -> "TargetNotConnected"
| Throttling -> "Throttling"
| TooManyTagsError -> "TooManyTagsError"
| TooManyUpdates -> "TooManyUpdates"
| TotalSizeLimitExceededException -> "TotalSizeLimitExceededException"
| UnauthorizedOperation -> "UnauthorizedOperation"
| UnknownParameter -> "UnknownParameter"
| UnsupportedCalendarException -> "UnsupportedCalendarException"
| UnsupportedFeatureRequiredException -> "UnsupportedFeatureRequiredException"
| UnsupportedInventoryItemContextException -> "UnsupportedInventoryItemContextException"
| UnsupportedInventorySchemaVersionException ->
"UnsupportedInventorySchemaVersionException"
| UnsupportedOperatingSystem -> "UnsupportedOperatingSystem"
| UnsupportedParameterType -> "UnsupportedParameterType"
| UnsupportedPlatformType -> "UnsupportedPlatformType"
| UnsupportedProtocol -> "UnsupportedProtocol"
| ValidationError -> "ValidationError"
| Uninhabited -> "Uninhabited"
let of_string e =
match e with
| "AlreadyExistsException" -> Some AlreadyExistsException
| "AssociatedInstances" -> Some AssociatedInstances
| "AssociationAlreadyExists" -> Some AssociationAlreadyExists
| "AssociationDoesNotExist" -> Some AssociationDoesNotExist
| "AssociationExecutionDoesNotExist" -> Some AssociationExecutionDoesNotExist
| "AssociationLimitExceeded" -> Some AssociationLimitExceeded
| "AssociationVersionLimitExceeded" -> Some AssociationVersionLimitExceeded
| "AuthFailure" -> Some AuthFailure
| "AutomationDefinitionNotFoundException" -> Some AutomationDefinitionNotFoundException
| "AutomationDefinitionVersionNotFoundException" ->
Some AutomationDefinitionVersionNotFoundException
| "AutomationExecutionLimitExceededException" ->
Some AutomationExecutionLimitExceededException
| "AutomationExecutionNotFoundException" -> Some AutomationExecutionNotFoundException
| "AutomationStepNotFoundException" -> Some AutomationStepNotFoundException
| "Blocked" -> Some Blocked
| "ComplianceTypeCountLimitExceededException" ->
Some ComplianceTypeCountLimitExceededException
| "CustomSchemaCountLimitExceededException" ->
Some CustomSchemaCountLimitExceededException
| "DocumentAlreadyExists" -> Some DocumentAlreadyExists
| "DocumentLimitExceeded" -> Some DocumentLimitExceeded
| "DocumentPermissionLimit" -> Some DocumentPermissionLimit
| "DocumentVersionLimitExceeded" -> Some DocumentVersionLimitExceeded
| "DoesNotExistException" -> Some DoesNotExistException
| "DryRunOperation" -> Some DryRunOperation
| "DuplicateDocumentContent" -> Some DuplicateDocumentContent
| "DuplicateDocumentVersionName" -> Some DuplicateDocumentVersionName
| "DuplicateInstanceId" -> Some DuplicateInstanceId
| "FeatureNotAvailableException" -> Some FeatureNotAvailableException
| "HierarchyLevelLimitExceededException" -> Some HierarchyLevelLimitExceededException
| "HierarchyTypeMismatchException" -> Some HierarchyTypeMismatchException
| "IdempotentParameterMismatch" -> Some IdempotentParameterMismatch
| "IncompatiblePolicyException" -> Some IncompatiblePolicyException
| "IncompleteSignature" -> Some IncompleteSignature
| "InternalFailure" -> Some InternalFailure
| "InternalServerError" -> Some InternalServerError
| "InvalidAction" -> Some InvalidAction
| "InvalidActivation" -> Some InvalidActivation
| "InvalidActivationId" -> Some InvalidActivationId
| "InvalidAggregatorException" -> Some InvalidAggregatorException
| "InvalidAllowedPatternException" -> Some InvalidAllowedPatternException
| "InvalidAssociation" -> Some InvalidAssociation
| "InvalidAssociationVersion" -> Some InvalidAssociationVersion
| "InvalidAutomationExecutionParametersException" ->
Some InvalidAutomationExecutionParametersException
| "InvalidAutomationSignalException" -> Some InvalidAutomationSignalException
| "InvalidAutomationStatusUpdateException" ->
Some InvalidAutomationStatusUpdateException
| "InvalidClientTokenId" -> Some InvalidClientTokenId
| "InvalidCommandId" -> Some InvalidCommandId
| "InvalidDeleteInventoryParametersException" ->
Some InvalidDeleteInventoryParametersException
| "InvalidDeletionIdException" -> Some InvalidDeletionIdException
| "InvalidDocument" -> Some InvalidDocument
| "InvalidDocumentContent" -> Some InvalidDocumentContent
| "InvalidDocumentOperation" -> Some InvalidDocumentOperation
| "InvalidDocumentSchemaVersion" -> Some InvalidDocumentSchemaVersion
| "InvalidDocumentType" -> Some InvalidDocumentType
| "InvalidDocumentVersion" -> Some InvalidDocumentVersion
| "InvalidFilter" -> Some InvalidFilter
| "InvalidFilterKey" -> Some InvalidFilterKey
| "InvalidFilterOption" -> Some InvalidFilterOption
| "InvalidFilterValue" -> Some InvalidFilterValue
| "InvalidInstanceId" -> Some InvalidInstanceId
| "InvalidInstanceInformationFilterValue" -> Some InvalidInstanceInformationFilterValue
| "InvalidInventoryGroupException" -> Some InvalidInventoryGroupException
| "InvalidInventoryItemContextException" -> Some InvalidInventoryItemContextException
| "InvalidInventoryRequestException" -> Some InvalidInventoryRequestException
| "InvalidItemContentException" -> Some InvalidItemContentException
| "InvalidKeyId" -> Some InvalidKeyId
| "InvalidNextToken" -> Some InvalidNextToken
| "InvalidNotificationConfig" -> Some InvalidNotificationConfig
| "InvalidOptionException" -> Some InvalidOptionException
| "InvalidOutputFolder" -> Some InvalidOutputFolder
| "InvalidOutputLocation" -> Some InvalidOutputLocation
| "InvalidParameter" -> Some InvalidParameter
| "InvalidParameterCombination" -> Some InvalidParameterCombination
| "InvalidParameterValue" -> Some InvalidParameterValue
| "InvalidParameters" -> Some InvalidParameters
| "InvalidPermissionType" -> Some InvalidPermissionType
| "InvalidPluginName" -> Some InvalidPluginName
| "InvalidPolicyAttributeException" -> Some InvalidPolicyAttributeException
| "InvalidPolicyTypeException" -> Some InvalidPolicyTypeException
| "InvalidQueryParameter" -> Some InvalidQueryParameter
| "InvalidResourceId" -> Some InvalidResourceId
| "InvalidResourceType" -> Some InvalidResourceType
| "InvalidResultAttributeException" -> Some InvalidResultAttributeException
| "InvalidRole" -> Some InvalidRole
| "InvalidSchedule" -> Some InvalidSchedule
| "InvalidTarget" -> Some InvalidTarget
| "InvalidTypeNameException" -> Some InvalidTypeNameException
| "InvalidUpdate" -> Some InvalidUpdate
| "InvocationDoesNotExist" -> Some InvocationDoesNotExist
| "ItemContentMismatchException" -> Some ItemContentMismatchException
| "ItemSizeLimitExceededException" -> Some ItemSizeLimitExceededException
| "MalformedQueryString" -> Some MalformedQueryString
| "MaxDocumentSizeExceeded" -> Some MaxDocumentSizeExceeded
| "MissingAction" -> Some MissingAction
| "MissingAuthenticationToken" -> Some MissingAuthenticationToken
| "MissingParameter" -> Some MissingParameter
| "OpsItemAlreadyExistsException" -> Some OpsItemAlreadyExistsException
| "OpsItemInvalidParameterException" -> Some OpsItemInvalidParameterException
| "OpsItemLimitExceededException" -> Some OpsItemLimitExceededException
| "OpsItemNotFoundException" -> Some OpsItemNotFoundException
| "OptInRequired" -> Some OptInRequired
| "ParameterAlreadyExists" -> Some ParameterAlreadyExists
| "ParameterLimitExceeded" -> Some ParameterLimitExceeded
| "ParameterMaxVersionLimitExceeded" -> Some ParameterMaxVersionLimitExceeded
| "ParameterNotFound" -> Some ParameterNotFound
| "ParameterPatternMismatchException" -> Some ParameterPatternMismatchException
| "ParameterVersionLabelLimitExceeded" -> Some ParameterVersionLabelLimitExceeded
| "ParameterVersionNotFound" -> Some ParameterVersionNotFound
| "PendingVerification" -> Some PendingVerification
| "PoliciesLimitExceededException" -> Some PoliciesLimitExceededException
| "RequestExpired" -> Some RequestExpired
| "RequestLimitExceeded" -> Some RequestLimitExceeded
| "ResourceDataSyncAlreadyExistsException" ->
Some ResourceDataSyncAlreadyExistsException
| "ResourceDataSyncConflictException" -> Some ResourceDataSyncConflictException
| "ResourceDataSyncCountExceededException" ->
Some ResourceDataSyncCountExceededException
| "ResourceDataSyncInvalidConfigurationException" ->
Some ResourceDataSyncInvalidConfigurationException
| "ResourceDataSyncNotFoundException" -> Some ResourceDataSyncNotFoundException
| "ResourceInUseException" -> Some ResourceInUseException
| "ResourceLimitExceededException" -> Some ResourceLimitExceededException
| "ServiceSettingNotFound" -> Some ServiceSettingNotFound
| "ServiceUnavailable" -> Some ServiceUnavailable
| "StatusUnchanged" -> Some StatusUnchanged
| "SubTypeCountLimitExceededException" -> Some SubTypeCountLimitExceededException
| "TargetInUseException" -> Some TargetInUseException
| "TargetNotConnected" -> Some TargetNotConnected
| "Throttling" -> Some Throttling
| "TooManyTagsError" -> Some TooManyTagsError
| "TooManyUpdates" -> Some TooManyUpdates
| "TotalSizeLimitExceededException" -> Some TotalSizeLimitExceededException
| "UnauthorizedOperation" -> Some UnauthorizedOperation
| "UnknownParameter" -> Some UnknownParameter
| "UnsupportedCalendarException" -> Some UnsupportedCalendarException
| "UnsupportedFeatureRequiredException" -> Some UnsupportedFeatureRequiredException
| "UnsupportedInventoryItemContextException" ->
Some UnsupportedInventoryItemContextException
| "UnsupportedInventorySchemaVersionException" ->
Some UnsupportedInventorySchemaVersionException
| "UnsupportedOperatingSystem" -> Some UnsupportedOperatingSystem
| "UnsupportedParameterType" -> Some UnsupportedParameterType
| "UnsupportedPlatformType" -> Some UnsupportedPlatformType
| "UnsupportedProtocol" -> Some UnsupportedProtocol
| "ValidationError" -> Some ValidationError
| "Uninhabited" -> Some Uninhabited
| _ -> None
| |
b4f850ac1c2eb925883e0e76c12c8a4499597a78ef87def002dc03e90eacd080 | BillHallahan/G2 | InitRewrite.hs | module G2.Equiv.InitRewrite (initWithRHS, initWithLHS) where
import G2.Language
import qualified G2.Language.ExprEnv as E
import qualified G2.Language.Typing as T
import qualified G2.Language.Expr as X
import G2.Execution.Memory
initWithRHS :: State t -> Bindings -> RewriteRule -> (State t, Bindings)
initWithRHS s b r =
let s' = s {
curr_expr = CurrExpr Evaluate (ru_rhs r)
, expr_env = foldr E.insertSymbolic (expr_env s) (ru_bndrs r)
}
b' = b { input_names = map idName $ ru_bndrs r }
in
markAndSweepPreserving emptyMemConfig s' b'
initWithLHS :: State t -> Bindings -> RewriteRule -> (State t, Bindings)
initWithLHS s b r =
make LHS into a single expr
let f_name = ru_head r
f_maybe = E.lookup f_name (expr_env s)
in
case f_maybe of
Nothing -> error "function name not found"
Just f -> let t = T.typeOf f
i = Id f_name t
v = Var i
app = X.mkApp (v:ru_args r)
s' = s {
curr_expr = CurrExpr Evaluate app
, expr_env = foldr E.insertSymbolic (expr_env s) (ru_bndrs r)
}
b' = b { input_names = map idName $ ru_bndrs r }
in
markAndSweepPreserving emptyMemConfig s' b'
| null | https://raw.githubusercontent.com/BillHallahan/G2/8a28e05ae500f0a6893c10e58d39674c76ecbefd/src/G2/Equiv/InitRewrite.hs | haskell | module G2.Equiv.InitRewrite (initWithRHS, initWithLHS) where
import G2.Language
import qualified G2.Language.ExprEnv as E
import qualified G2.Language.Typing as T
import qualified G2.Language.Expr as X
import G2.Execution.Memory
initWithRHS :: State t -> Bindings -> RewriteRule -> (State t, Bindings)
initWithRHS s b r =
let s' = s {
curr_expr = CurrExpr Evaluate (ru_rhs r)
, expr_env = foldr E.insertSymbolic (expr_env s) (ru_bndrs r)
}
b' = b { input_names = map idName $ ru_bndrs r }
in
markAndSweepPreserving emptyMemConfig s' b'
initWithLHS :: State t -> Bindings -> RewriteRule -> (State t, Bindings)
initWithLHS s b r =
make LHS into a single expr
let f_name = ru_head r
f_maybe = E.lookup f_name (expr_env s)
in
case f_maybe of
Nothing -> error "function name not found"
Just f -> let t = T.typeOf f
i = Id f_name t
v = Var i
app = X.mkApp (v:ru_args r)
s' = s {
curr_expr = CurrExpr Evaluate app
, expr_env = foldr E.insertSymbolic (expr_env s) (ru_bndrs r)
}
b' = b { input_names = map idName $ ru_bndrs r }
in
markAndSweepPreserving emptyMemConfig s' b'
| |
f6a6324c50e213a7bf9af8ceb7888d1a343b582ebb7ed90acca72daf28588a3a | typedclojure/typedclojure | runtime_infer.clj | (ns typed.clj.annotator.test.runtime-infer
(:require [clojure.test :refer :all]
[clojure.pprint :as pp]
[clojure.repl :as repl]
[clojure.set :as set]
[com.gfredericks.test.chuck :as chuck]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[com.gfredericks.test.chuck.generators :as gen']
[clojure.test.check.generators :as gen]
[typed.clojure :as t]
[typed.cljc.analyzer :as ana2]
[clojure.tools.analyzer.jvm :as taj]
[typed.clj.analyzer :as jana2]
[typed.clj.annotator :refer :all :as infer]
[clojure.core.typed.coerce-utils :as coerce]
[typed.clj.annotator.pprint :refer [pprint]]
[typed.clj.annotator.parse :refer [prs parse-type]]
[typed.clj.annotator.rep :refer [infer-result
var-path
key-path
fn-rng-path
fn-dom-path]
:as rep]
[typed.clj.annotator.track :refer [track-var] :as track]
[typed.clj.annotator.join :refer [make-Union
join*
join-HMaps
join]]
[typed.clj.annotator.env :as env]
[typed.clj.annotator.frontend.spec :refer [unparse-spec'
envs-to-specs]]
[typed.clj.annotator.insert :refer [delete-generated-annotations-in-str
generate-ann-start
generate-ann-end]
:as insert]
[typed.clj.annotator.util :refer [*ann-for-ns*
spec-ns
current-ns
unparse-type
*debug*
update-alias-env
type-env
*envs*
update-type-env
HMap-req-keyset]]
)
(:import (clojure.lang IExceptionInfo)))
; ppenv : Env -> nil
(defn ppenv [env]
(pprint (into {}
(map (fn [[k v]]
[k (unparse-type v)]))
env)))
(defn add-tmp-aliases [env as]
(update-alias-env env merge (zipmap as (repeat nil))))
(defmacro with-tmp-aliases [env as & body]
`(binding [*envs* (atom (add-tmp-aliases ~env ~as))]
~@body))
#_(defmacro with-type-and-alias-env
[t a & body]
`(binding [*envs*
(atom {(current-ns)
{:type-env ~t
:alias-env ~a}})]
~@body))
(deftest union-test
(is (= (make-Union [(prs Long)
(prs Double)])
(prs Number)
(make-Union [(prs Long)
(prs Double)
(prs Number)])
)))
(deftest join-test
(is (= (join* (prs String))
(make-Union [(prs String)])
(prs String)))
(is (= (join* (prs Sym))
(make-Union [(prs Sym)])
(prs Sym)))
(is (not= (prs Any)
(prs (U Sym String))))
FIXME
#_
(is (=
(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}))
(prs (HMap :mandatory {:f [? :-> java.lang.Long]}
:optional {:a ?}))))
FIXME
#_
(is (=
(join-HMaps
(prs '{:f ':op1, :a Any})
(prs '{:f ':op2, :a Any}))
(join
(prs '{:f ':op1, :a Any})
(prs '{:f ':op2, :a Any}))
(prs (U
'{:f ':op1, :a Any}
'{:f ':op2, :a Any}))))
(checking
"join maps"
5
[ts (gen/shuffle
[(prs
'{:E ':false})
(prs
'{:args (Vec '{:name clojure.lang.Symbol, :E ':var}),
:fun
(U
'{:E ':lambda,
:arg clojure.lang.Symbol,
:body '{:name clojure.lang.Symbol, :E ':var},
:arg-type
'{:T ':intersection, :types (Seqable Any)}}
'{:name clojure.lang.Symbol, :E ':var}),
:E ':app})])]
(is (= (apply join ts)
(prs
(U
'{:E ':false}
'{:args (Vec '{:name clojure.lang.Symbol, :E ':var}),
:fun
(U
'{:E ':lambda,
:arg clojure.lang.Symbol,
:body '{:name clojure.lang.Symbol, :E ':var},
:arg-type
'{:T ':intersection, :types (Seqable Any)}}
'{:name clojure.lang.Symbol, :E ':var}),
:E ':app})))))
(checking
"inner vec"
5
[ts (gen/shuffle
[(prs (Vec '{:a ?}))
(prs (Vec '{:b ?}))])]
(is
(= (apply join* ts)
(prs (Vec (U '{:a ?} '{:b ?}))))))
(checking
"functions"
30
[ts (gen/shuffle
[(prs '{:a ?})
(prs '{:a [? :-> ?]})
(prs '{:a [? :-> Long]})
(prs '{:a [Long :-> Long]})])]
(= (apply join* ts)
(prs '{:a [Long :-> Long]})))
(checking
"HOF"
5
[ts (gen/shuffle
[(prs '{:f ?, :a java.lang.Long})
(prs '{:f [[? :-> java.lang.Long] :-> ?], :a ?})])]
(is
(= (apply join* ts)
(prs '{:f [[? :-> java.lang.Long] :-> ?], :a java.lang.Long}))))
(checking
"map return"
5
[ts (gen/shuffle
[(prs ['{:f ?, :a java.lang.Long} :-> '{:b ?, :f clojure.lang.IFn, :a ?}])
(prs ['{:f [[? :-> java.lang.Long] :-> ?], :a ?} :-> ?])])]
(is (= (apply join ts)
(prs
['{:f [[? :-> java.lang.Long] :-> ?], :a java.lang.Long}
:->
'{:b ?, :f clojure.lang.IFn, :a ?}]))))
(checking
"join union"
5
[ts (gen/shuffle
[(prs
(U '{:f [? :-> java.lang.Long], :a ?}
'{:f clojure.lang.IFn}))
(prs
'{:f [? :-> java.lang.Long]})])]
(is (= (apply join ts)
(prs
(U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]})))))
(checking
"join fn in map"
5
[ts (gen/shuffle
[(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> ?]})
(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> java.lang.Long]})])]
(is (= (apply join ts)
(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> java.lang.Long]})))))
#_
(deftest squash-test
#_
(let [config (init-config)
env (init-env)
env (update-alias-env env merge
(with-tmp-aliases env '[a1 a2]
{'a1 (prs '{:a a2})
'a2 (prs '{:a nil})}))]
(binding [*envs* (atom env)]
(is (= (alias-env (squash env config (prs a1)))
{'a1 (prs '{:a (U nil a1)})
'a2 (prs a1)}))))
#_
(let [aenv (with-tmp-aliases '[a1 a2 a3 a4]
{'a1 (prs a2)
'a2 (prs '{:a a3})
'a3 (prs a4)
'a4 (prs
'{:a nil})})]
(with-type-and-alias-env
(type-env @*envs*)
aenv
(is (= (squash-all (prs a1))
(prs a1)))
(is (= (alias-env)
(with-tmp-aliases '[a1 a2]
{'a1 (prs '{:a (U nil a1)})
'a2 (prs a1)
TODO < ^v
'a4 (prs a3)
}))))))
;; testing only
(defn update-path' [env infer-results]
(let [config (init-config)
env (generate-tenv
env
config
{:infer-results infer-results})]
(type-env env)))
(deftest update-path-test
(checking
"update map"
10
[infers (gen/shuffle
[(infer-result [(var-path 'use-map)
(key-path #{:a} :a)]
-unknown)
(infer-result [(var-path 'use-map)
(key-path #{:a} :a)]
(prs Long))])]
(is (= (update-path' (init-env) infers)
{'use-map (prs '{:a Long})})))
(checking
"update nested map"
20
[infers (gen/shuffle
[(infer-result [(var-path 'use-map)]
(prs clojure.lang.IFn))
(infer-result [(var-path 'use-map)
(fn-rng-path 1)
(key-path #{:b :f :a} :f)]
(prs clojure.lang.IFn))
(infer-result [(var-path 'use-map)
(fn-dom-path 1 0)
(key-path #{:f :a} :a)]
(prs Long))
(infer-result [(var-path 'use-map)
(fn-dom-path 1 0)
(key-path #{:f :a} :f)
(fn-dom-path 1 0)
(fn-rng-path 1)]
(prs Long))])]
(is (=
(update-path' (init-env) infers)
{'use-map
(-> (prs ['{:f [[? :-> Long] :-> ?], :a java.lang.Long} :-> '{:b ?, :f clojure.lang.IFn, :a ?}])
(assoc :top-level-def 'use-map))})))
(checking
"function dom rng"
10
[infers (gen/shuffle
[(infer-result [(var-path 'foo) (fn-rng-path 1)]
(parse-type 'Long))
(infer-result [(var-path 'foo) (fn-dom-path 1 0)]
(parse-type 'Long))])]
(is (= (update-path' (init-env) infers)
{'foo (-> (prs [Long :-> Long])
(assoc :top-level-def 'foo))})))
(checking
"unknown with function"
10
[infers (gen/shuffle
[(infer-result [(var-path 'foo)] (prs ?))
(infer-result [(var-path 'foo)] (prs [Long :-> ?]))])]
(is (= (update-path' (init-env) infers)
{'foo (-> (prs [java.lang.Long :-> ?])
(assoc :top-level-def 'foo))})))
(checking
"combine functions"
10
[ts (gen/shuffle
[(prs [(U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}) :->
'{:b ?, :f ?, :a java.lang.Long}])
(prs ['{:f clojure.lang.IFn, :a ?} :-> ?])])]
(is
(= (apply join ts)
(prs
[(U '{:f [? :-> java.lang.Long], :a ?} '{:f [? :-> java.lang.Long]})
:->
'{:b ?, :f ?, :a java.lang.Long}]))))
(checking
"IFn with fns"
10
[ts (gen/shuffle
[(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}))
(prs '{:f clojure.lang.IFn, :a ?})])]
(is
(= (apply join ts)
(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]})))))
)
(deftest delete-generated-annotations-in-str-test
(is (= (delete-generated-annotations-in-str
(str "\n"
generate-ann-start
"\n"
"foo\n"
generate-ann-end "\n"
"bar\n\n"))
"\nbar\n")))
#_(deftest group-arities-test
(is (group-arities
{:op :IFn,
:arities [{:op :IFn1, :dom [], :rng {:op :class, :class java.lang.Long, :args []}}]}
{:op :IFn, :arities [{:op :IFn1, :dom [{:op :unknown}], :rng {:op :class, :class java.lang.Long, :args []}}]})))
(deftest squash-horizonally-test
(is (let [config (init-config)
env (as-> (init-env) env
(update-alias-env env
assoc
'foo (prs '{:baz Long})
'bar (prs '{:baz Boolean}))
(binding [*envs* (atom env)]
(update-type-env env
assoc
`var1 (prs foo)
`var2 (prs bar))))
env (squash-horizonally env config)
env (follow-all env (assoc config :simplify? false))]
(pprint env)
(= (get (type-env env) `var1)
(get (type-env env) `var2)))))
(defmacro try-prim [invoke-args & body]
(let [name (gensym)]
`(do (defn ~name ~@body)
(alter-var-root
(var ~name)
(constantly
(track-var ~name)))
(~name ~@invoke-args))))
(deftest track-prim-fn
(is (=
:ok
(try-prim
[1]
[^long a]
:ok)))
(is (=
10
(try-prim
[1 'a]
^long [^long a ^Object b]
10)))
(is (=
10
(try-prim
[]
^long []
10)))
(is (=
10.5
(try-prim
[]
^double []
10.5)))
(is (=
:ok
(try-prim
[1]
(^double [] 10.5)
(^Object [^long a] :ok))))
(is (=
10.5
(try-prim
[]
(^double [] 10.5)
(^Object [^long a] :ok))))
)
(deftest mini-occ-test
(is
(do
(require 'typed.clj.annotator.test.mini-occ :reload)
:ok)))
(deftest optional-keys-test
(is
(=
(join-HMaps
(prs
(HMap :optional {:a Any}))
(prs
(HMap :optional {:a Any})))
(prs
(HMap :optional {:a Any}))))
(is
(=
(join-HMaps
(prs
(HMap :optional {:a String}))
(prs
(HMap :mandatory {:a Long})))
(prs
(HMap :optional {:a (U String Long)}))))
(is
(=
(join-HMaps
(prs
(HMap :mandatory {:op ':Foo}))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:bar String})))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:bar String}))))
(is (=
(HMap-req-keyset
(prs
(HMap :mandatory {:a Long})))
#{:a}))
(is (=
(HMap-req-keyset
(prs
(HMap :optional {:a Long})))
#{}))
(is (=
(HMap-req-keyset
(prs
(HMap :optional {:a Long}
:mandatory {:b Long})))
#{:b}))
(is (=
(HMap-req-opt-keysets
(prs
(HMap :optional {:a Long}
:mandatory {:b Long})))
#{{:req-keyset #{:b}, :opt-keyset #{:a}}}))
(is (=
(make-Union
[(prs
(HMap :mandatory {:op ':Foo}))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:opt String}))])
(prs (HMap :mandatory {:op ':Foo}
:optional {:opt String}))))
(is (=
(make-Union
[(prs
(HMap :mandatory {:op ':Foo}
:optional {:opt Long}))
(prs
(HMap :optional {:op ':Foo
:opt String}))])
(prs (HMap :optional {:op ':Foo
:opt (U Long String)}))))
(is
(=
(join-HMaps
(prs
'{:op ':the-foo
:the-bar Sym
:opt Sym})
(prs
'{:op ':the-foo
:the-bar Sym}))
(prs
(HMap :mandatory {:op ':the-foo
:the-bar Sym}
:optional {:opt Sym}))))
)
(def ^:dynamic *print-anns* true)
(defn *-from-tenv [f tenv config]
(let [ns (create-ns (gensym))]
(binding [*ann-for-ns* (constantly ns)
*ns* ns
*debug* (if-let [[_ debug] (find config :debug)]
debug
*debug*)]
;; set up ns refers
(refer-clojure)
(require '[typed.clojure
:as t
:refer [defalias ann
;Any U Vec Map
;Sym HMap Nothing
]])
(when spec-ns
(require [spec-ns :as 's]))
(let [_ (prn "Current ns:" (current-ns))
env (as-> (init-env) env
(update-type-env env merge tenv))
env (populate-envs env config)
anns (f env config)]
(when *print-anns*
(pprint anns)))
:ok)))
(defn anns-from-tenv [tenv & [config]]
(binding [unparse-type unparse-type']
(*-from-tenv envs-to-annotations
tenv
(or config {}))))
(defn specs-from-tenv [tenv & [config]]
(binding [unparse-type unparse-spec']
(*-from-tenv envs-to-specs
tenv
(merge config
{:spec? true}))))
(deftest gen-anns
(is (let [st-type (prs
'{:quads
(Vec
'{:klingons java.lang.Long,
:quadrant (Vec java.lang.Long),
:stars java.lang.Long,
:bases java.lang.Long}),
:stardate
'{:start java.lang.Long,
:current java.lang.Long,
:end java.lang.Long},
:current-klingons (Vec Nothing),
:starting-klingons java.lang.Long,
:lrs-history (Vec java.lang.String),
:current-sector (Vec java.lang.Long),
:enterprise
'{:photon_torpedoes java.lang.Long,
:sector (Vec java.lang.Long),
:quadrant (Vec (U java.lang.Integer java.lang.Long)),
:energy java.lang.Long,
:damage
'{:phasers java.lang.Long,
:warp_engines java.lang.Long,
:damage_control java.lang.Long,
:long_range_sensors java.lang.Long,
:short_range_sensors java.lang.Long,
:computer_display java.lang.Long,
:photon_torpedo_tubes java.lang.Long,
:shields java.lang.Long},
:is_docked false,
:shields java.lang.Long}})]
(anns-from-tenv {'t1 st-type
't2 st-type})))
(is
(let [t (prs
[(U
'{:exp '{:name Sym, :E ':var},
:P ':is,
:type '{:T ':intersection, :types (Set Nothing)}}
'{:P ':not,
:p
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}}
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}
'{:P (U ':or ':and),
:ps
(Set
(U
'{:exp '{:name Sym, :E ':var},
:P ':is,
:type '{:T ':intersection, :types (Set Nothing)}
:foo Sym}
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}))})
:->
Any])]
(anns-from-tenv {;'unparse-prop1 t
'unparse-prop2 t}
{:debug :iterations})))
(is
(let [t (prs
['{:P ':or
:ps
(Set
'{:P ':and
:ps
(Set
'{:P ':Top})})}
:->
Any])]
(anns-from-tenv {;'unparse-prop1 t
'unparse-prop2 t}
{:debug #{:iterations :squash}})))
(is
(let [t (prs
['{:P ':or
:ps
(Set
(U '{:P ':Top}
'{:P ':and
:ps
(Set
'{:P ':Top})}))}
:->
Any])]
(anns-from-tenv {;'unparse-prop1 t
'unparse-prop2 t}
{:debug #{:iterations :squash}})))
(is
(let [t (prs
'{:P ':or
:ps
(Set
'{:P ':and
:ps
(Set
(U '{:P ':Top}
'{:P ':Bottom}))})}
)]
(anns-from-tenv {;'unparse-prop1 t
'unparse-prop2 t}
{:debug #{:squash :iterations
:squash-horizontally}}))))
(deftest ann-combine
;; collapse maps with completely disjoint keys
(is
(let [t (prs
[(U '{:entry1 String}
'{:entry2 Boolean}
'{:entry3 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
;; don't collapse common keys with keyword entry
(is
(let [t (prs
[(U '{:op :foo
:entry1 String}
'{:op :bar
:entry2 Boolean}
'{:op :baz
:entry3 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
;; upcast Kw + HMap to Any
(is
(let [t (prs
[(U ':foo
'{:op :bar
:entry2 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
;; simplify keywords + seqables to Any
(is
(let [t (prs
[(U ':foo
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
simplify Sym / Kw + seqable to Any
(is
(let [t (prs
[(U Sym
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
;; don't simplify Seqable + nil
(is
(let [t (prs
[(U nil
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
;; use optional keys
(is
(let [t (prs
[(U '{:foo String}
'{:foo String
:bar Boolean})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
upcast union to Any
(is
(let [t (prs
[(U Any
'{:foo String
:bar Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
; Kw simplification
(is
(let [t (prs
[(U ':foo ':bar)
:->
Any])]
(anns-from-tenv {'config-in t})))
; join on class arguments
(is
(let [t (prs
[(U (Vec Integer)
(Vec Long))
:->
Any])]
(anns-from-tenv {'config-in t})))
; don't alias args implicitly
(is
(let [t (prs
[[Any :-> Any]
:->
Any])]
(anns-from-tenv {'config-in t}))))
(deftest ann-hmaps
; upcast HMaps to Map if they appear in a union
(is
(let [t (prs
[(U '{:foo Any}
(clojure.lang.IPersistentMap Any Any))
:->
Any])]
(anns-from-tenv {'config-in t})))
; upcast HMaps to Map if they appear in a union
(is
(let [t (prs
[(U '{:foo Any}
(clojure.lang.IPersistentMap Any Any))
:->
Any])]
(anns-from-tenv {'config-in t})))
; optional HMaps test
(is
(let [t (prs
[(HMap :optional {:foo String})
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U '{:op ':the-foo
:the-bar Sym
:opt Sym}
'{:op ':the-foo
:the-bar Sym})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
; namespaced entry + spec
(is
(let [t (prs
['{::op ':the-foo}
:->
Any])]
(specs-from-tenv {'config-in t})))
TODO recursive example of this test
(is
(let [t (prs
[(U '{:op ':the-bar
:the-foo String
:the-baz String}
'{:op ':the-foo
:the-foo Sym
:the-baz Sym})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
HMap alias naming test
(is
(let [t (prs
[
'{:op ':foo
:the-bar '{:op ':term
:val Sym}}
:->
Any])]
((juxt specs-from-tenv
anns-from-tenv)
{'config-in t})))
; recursive HMaps test
(is
(let [t (prs
[(U
'{:op ':foo
:the-bar '{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':foo
:opt Sym
:the-bar '{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':term
:val Sym}}}}
#_
'{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':bar
:the-foo '{:op ':term
:val Sym}}}}
)
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
{:debug true})))
FIXME prefer : op over : type ?
(is
(let [t (prs
[
(U
'{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':foo
:type (U ':int ':nil ':faz)
:opt Sym
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':term
:val Sym}}}})
:->
Any])]
(;specs-from-tenv
anns-from-tenv
{'config-in t}
{:fuel 0})))
(is
(let [t (prs
[':a
Integer
':b
Boolean
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
{:debug true})))
;; combine maps that look similar
(is
(let [t (prs
['{:a Long
:b Long
:c Long
:d Long}
'{:a Long
:b Long
:c Long}
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
))))
;; performance tests
(defn gen-height [n]
{:pre [(not (neg? n))]}
(if (zero? n)
`'{:tag ':null}
`'{:tag ':cons :cdr ~(gen-height (dec n))}))
(defn gen-tagged-union [n]
{:pre [(not (neg? n))]}
(if (zero? n)
`'{:tag ':null}
`'{:tag '~(keyword (str "cons" n)) :cdr ~(gen-tagged-union (dec n))}))
(defmacro bench
"Evaluates expr and returns the time it took."
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr
msduration# (/ (double (- (. System (nanoTime)) start#)) 1000000.0)]
[ret# msduration#]))
(defn bench-iteratively
([f n] (bench-iteratively f 0 n))
([f start n]
(loop [times []
i start]
(if (< n i)
times
(let [[_ t] (f i)]
(recur (conj times t)
(inc i)))))))
(defn write-csv [n v]
{:pre [(vector? v)]}
(spit n (apply str (interpose "," v))))
(comment
(def bench-height
(write-csv "height-bench.csv"
(bench-iteratively
#(let [t (parse-type (gen-height %))]
(binding [*print-anns* false]
(bench
(anns-from-tenv {'prop t}
{}))))
120)))
(let [t (parse-type (gen-height 5))]
(anns-from-tenv {'prop t}
{}))
(pprint (gen-tagged-union 5))
(let [t (parse-type (gen-tagged-union 10))]
(anns-from-tenv {'prop t}
{}))
(def bench-tagged
(write-csv
"tagged-bench-past110.csv"
(bench-iteratively
#(let [t (parse-type (gen-tagged-union %))]
(binding [*print-anns* false]
(bench
(anns-from-tenv {'prop t}
{}
#_{:debug #{:squash :iterations
:squash-horizontally}}))))
111
120)))
)
(deftest map-merge-test
TODO
; other types don't collapse tagged maps
; should combining tagged and untagged maps upcast to (Map Any Any)?
(is
(let [t (prs
(U nil
'{:a ':b}
'{:op ':foo
:b Long
:c Long
:d Long}
'{:op ':bar
:e Long
:w Long
:q Long}))]
FIXME
anns-from-tenv)
{'config-in t}
)))
; merging sufficiently similar maps
(is
(let [t (prs
(U nil
(HMap
:mandatory {:name Sym}
:optional {:blah Sym})
(HMap
:mandatory {:name Sym
:ns Sym}
:optional {:tag Sym
:tag1 Sym
:tag2 Sym
:tag3 Sym
:tag4 Sym
:tag5 Sym
:tag6 Sym
:tag7 Sym
:tag8 Sym
:tag9 Sym
:tag10 Sym
:tag11 Sym
})))]
FIXME
anns-from-tenv)
{'config-in t}
))))
(deftest instrument-top-level-form-test
(is (.contains (with-out-str (instrument-top-level-form '(println "a")))
"a\n"))
(is (.contains
(with-out-str (instrument-top-level-form '(do (def a "a") (println a))))
"a\n")))
(defn code-to-gen-spec [root-ns root-key samples {:keys [spec?] :as config}]
(binding [*ns* *ns*]
(in-ns root-ns)
(binding [*ns* (the-ns root-ns)
*ann-for-ns* #(the-ns root-ns)
unparse-type (if spec?
unparse-spec'
unparse-type')]
(let [results-atom (atom (env/initial-results))
;instrument and track
_ (run!
(fn [e]
(track/track
(track/gen-track-config)
results-atom
e
#{[(rep/var-path root-ns root-key)]}
#{}))
(concat (map eval (:eval samples))
(:edn samples)))
_ (prn @results-atom)
infer-out
(infer/infer-anns root-ns
{:spec? spec?
:allow-top-level-non-IFn true
:results-atom results-atom})
_ (prn "infer out" infer-out)
spec-out
(apply insert/prepare-ann
((juxt :requires :top-level :local-fns) infer-out))]
spec-out))))
(deftest manual-track-test
(is (code-to-gen-spec 'user 'user/foo-bar {:eval '[(inc 1)] :edn '[a]}
{:spec? true})))
(declare *-from-infer-results)
TODO remove # suffixes
(defn infer-test*
[{:keys [defs tests expected-specs expected-types] :as opts}]
(assert (vector? defs))
(assert (vector? tests))
(let [ns# (create-ns (gensym))]
(binding [*ns* ns#
*ann-for-ns* (constantly ns#)]
(refer-clojure)
(require '[typed.clojure :as t])
(when spec-ns
(require [spec-ns :as 's]))
(let [result# (atom :ok)
run-until-bad-result!# (fn [f# c#]
(loop [c# c#]
(when @result#
(when (seq c#)
(do (f# (first c#))
(recur (rest c#)))))))
defs# defs
tests# tests
_# (infer/refresh-runtime-infer)
_# (run-until-bad-result!#
(fn [f#]
(try (instrument-top-level-form f#)
(catch Throwable e#
(println (str "Failed to evaluate " f# " with error " e#))
(reset! result# nil))))
(concat defs# tests#))]
(when @result#
(let [expected-specs# (let [s# expected-specs]
(if (string? s#)
(read-string s#)
s#))
_# (assert ((some-fn nil? vector?) expected-specs#))
expected-types# (let [t# expected-types]
(if (string? t#)
(read-string t#)
t#))
_# (assert ((some-fn nil? vector?) expected-types#))
specs# (*-from-infer-results *ns* {:spec? true})
types# (*-from-infer-results *ns* {})
assert-equal# (fn [actual# expected# msg#]
(when-not (= actual# expected#)
(println msg#)
(println "Actual:")
(pprint actual#)
(println "Expected:")
(pprint expected#)
(reset! result# nil)))
]
(if expected-specs#
(assert-equal# (:top-level specs#) expected-specs#
"Actual specs didn't match expected specs")
(do (println "Here are the generated specs:")
(pprint (:top-level specs#))))
(if expected-types#
(assert-equal# (:top-level types#) expected-types#
"Actual types didn't match expected types")
(do (println "Here are the generated types:")
(pprint (:top-level types#))))
(when spec-ns
(let [instrumentable-syms# (set
(keep (fn [spc#]
(when (seq? spc#)
(when (= 's/fdef (first spc#))
(let [^clojure.lang.Var v# (resolve (second spc#))]
(when (var? v#)
(coerce/var->symbol v#))))))
(:top-level specs#)))
spec-defs# (set
(keep (fn [spc#]
(when (seq? spc#)
(when (= 's/def (first spc#))
(let [kw# (second spc#)]
(when (keyword? kw#)
(when (namespace kw#)
kw#))))))
(:top-level specs#)))
_# (require ['clojure.spec.test.alpha])
instrument# (resolve 'clojure.spec.test.alpha/instrument)
_# (assert instrument#)
exercise# (resolve 'clojure.spec.alpha/exercise)
_# (assert exercise#)
exercise-fn# (resolve 'clojure.spec.alpha/exercise-fn)
_# (assert exercise-fn#)
exercise-fn-or-fail# (fn [sym#]
(try (doall (exercise-fn# sym#))
(catch Throwable e#
(println "Function failed to exercise:" sym#)
(println "With the following error:")
(binding [*err* *out*]
(repl/pst e#))
(when (instance? IExceptionInfo e#)
(pp/pprint (ex-data e#)))
(reset! result# nil))))
exercise-or-fail# (fn [spc#]
(try (doall (exercise# spc#))
(catch Throwable e#
(println "Spec failed to exercise")
(pp/pprint spc#)
(println "With the following error:")
(binding [*err* *out*]
(repl/pst e#))
(when (instance? IExceptionInfo e#)
(pp/pprint (ex-data e#)))
(reset! result# nil))))
eval-or-fail# (fn [form#]
(try (eval form#)
(catch Throwable e#
(println "Expression failed to evaluate:")
(pp/pprint form#)
(println "With the following error:")
(repl/pst e#)
(reset! result# nil))))]
(testing "spec declarations should evaluate"
(run-until-bad-result!# eval-or-fail# (concat (:requires specs#) (:top-level specs#))))
(testing "should be able to exercise spec defs"
(run-until-bad-result!# exercise-or-fail# spec-defs#))
(testing "should be able to exercise fns"
(run-until-bad-result!# exercise-fn-or-fail# instrumentable-syms#))
(when @result#
(testing "specs should instrument"
(when-not (= instrumentable-syms#
(set (instrument# instrumentable-syms#)))
(println "Expected to instrument "
(set/difference
instrumentable-syms#
(set (instrument# instrumentable-syms#)))
" but didn't")
(reset! result# nil))))
(testing "tests should evaluate under instrumentation"
(run-until-bad-result!# eval-or-fail# tests#))))
@result#))))))
(defmacro infer-test
"Given a vector of definitions :defs and a vector of tests :tests, then
does these steps in order. Short-circuits and returns a false value if previous steps fail.
Returns a true value on success.
1. Generates specs and types for the definitions
2. Ensure generated specs and types are identical to :expected-types and :expected-specs, respectively (when provided).
These are either vectors of annotations, or a string that contains vectors
of annotations that will be `read` in the correct namespace (useful to aid keyword namespace
resolution in specs).
If one of these is not provided, the respective annotations are pprint'ed so they can
be easily copied into the test.
3. Evaluates generated specs.
4. Exercises spec aliases.
5. Exercises spec'd functions.
6. Instruments all spec'd functions.
7. Runs :test code again under spec instrumentation."
[& {:keys [defs tests expected-specs expected-types] :as opts}]
`(infer-test* '~opts))
(defn *-from-infer-results [ns config]
(binding [*ann-for-ns* (constantly *ns*)
*debug* (if-let [[_ debug] (find config :debug)]
debug
*debug*)
unparse-type (if (:spec? config)
unparse-spec'
unparse-type')]
(infer/infer-anns *ns* config)))
(deftest test-infer-test
(is (infer-test :defs [(defn blah [a] (inc a))]
:tests [(blah 1)]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))
(testing "detects Exception always thrown in :defs"
(is (not
(infer-test :defs [(throw (Exception.))]
:tests []
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))))
(testing "detects Exception always thrown in :tests"
(is (not
(infer-test :defs [(defn blah [a] (inc a))]
:tests [(throw (Exception.))]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))))
(testing "detects bad exercise-fn"
(is (not
(infer-test :defs [(defn blah [a]
(assert (zero? a)))]
:tests [(blah 0)]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret nil?)]
:expected-types [(declare) (t/ann blah [t/Int :-> nil])]))))
(testing "detects bad exercise-fn"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])]))))
(testing "detects bad provided specs"
(is (not
(infer-test :defs [(defn blah [f])]
:tests [(blah identity)]
:expected-specs [(s/fdef blah :args (s/cat :asdf ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [AnyFunction :-> nil])]))))
(testing "detects bad provided specs (wrong quantity)"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef food :args (s/cat :asdf ifn?) :ret nil?)
(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])]))))
(testing "detects bad provided types"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [t/Int :-> nil])]))))
(testing "detects bad provided types (wrong quantity)"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])
(t/ann food [t/Int :-> nil])]))))
)
(deftest HMap-infer-test
(is (infer-test :defs [(defn takes-map [m]
{:pre [(or (some-> m :a #{1})
true)]}
(mapv identity m))]
:tests [(takes-map {})
(takes-map {:a 1})
(takes-map {:b 1})]
:expected-specs "[(s/def ::ABMap (s/keys :opt-un [::a ::b]))
(s/def ::a int?)
(s/def ::b int?)
(s/fdef
takes-map
:args
(s/cat :m ::ABMap)
:ret
(s/coll-of (s/tuple #{:b :a} int?) :into vector?))]"
:expected-types [(declare ABMap)
(t/defalias ABMap (t/HMap :optional {:a t/Int, :b t/Int}))
(t/ann
takes-map
[ABMap
:->
(t/Vec '[(t/U ':a ':b) t/Int])])]))
(is (infer-test :defs [(defn gives-map []
{:a 1})]
:tests [(require '[clojure.walk :as w])
(w/prewalk identity (gives-map))]))
(is (infer-test :defs [(require '[clojure.walk :as w])
(defn takes-map [m]
(w/prewalk identity m))]
:tests [(takes-map {})
(takes-map {:a {:b 1}})
(takes-map {:b {:a 1}})]
))
;; clashes detected between :req-un keys and normal aliases
(is (infer-test :defs [(require '[clojure.walk :as w])
(defn takes-op [a]
(w/prewalk identity a))]
:tests [(takes-op {:Op :val :val 'blah})
(takes-op {:Op :if
:test {:Op :val :val 'blah}
:then {:Op :val :val 'blah}
:else {:Op :val :val 'blah}})]))
;; clashes detected for multi-specs
(is (infer-test :defs [(require '[clojure.walk :as w])
(def Op-multi-spec nil)
(defn takes-op [a]
(w/prewalk identity a))]
:tests [(takes-op {:Op :val :val 'blah})
(takes-op {:Op :if
:test {:Op :val :val 'blah}
:then {:Op :val :val 'blah}
:else {:Op :val :val 'blah}})]))
;; ensure unvisited map entries have type Any/? not (s/or)
(is (infer-test :defs [(require '[clojure.walk :as w])
(def Op-multi-spec nil)
(defn takes-op [a]
(mapv identity a))]
:tests [(takes-op {:Op :val :val 'blah
:children {:foo [:a :b :c]}
})
(takes-op {:Op :if
:children {:foo [:a :b :c]}})]))
)
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/1b3c9ef6786a792ae991c438ea8dca31175aa4a7/typed/clj.annotator/test/typed/clj/annotator/test/runtime_infer.clj | clojure | ppenv : Env -> nil
testing only
set up ns refers
Any U Vec Map
Sym HMap Nothing
'unparse-prop1 t
'unparse-prop1 t
'unparse-prop1 t
'unparse-prop1 t
collapse maps with completely disjoint keys
don't collapse common keys with keyword entry
upcast Kw + HMap to Any
simplify keywords + seqables to Any
don't simplify Seqable + nil
use optional keys
Kw simplification
join on class arguments
don't alias args implicitly
upcast HMaps to Map if they appear in a union
upcast HMaps to Map if they appear in a union
optional HMaps test
namespaced entry + spec
recursive HMaps test
specs-from-tenv
combine maps that look similar
performance tests
other types don't collapse tagged maps
should combining tagged and untagged maps upcast to (Map Any Any)?
merging sufficiently similar maps
instrument and track
clashes detected between :req-un keys and normal aliases
clashes detected for multi-specs
ensure unvisited map entries have type Any/? not (s/or) | (ns typed.clj.annotator.test.runtime-infer
(:require [clojure.test :refer :all]
[clojure.pprint :as pp]
[clojure.repl :as repl]
[clojure.set :as set]
[com.gfredericks.test.chuck :as chuck]
[com.gfredericks.test.chuck.clojure-test :refer [checking]]
[com.gfredericks.test.chuck.generators :as gen']
[clojure.test.check.generators :as gen]
[typed.clojure :as t]
[typed.cljc.analyzer :as ana2]
[clojure.tools.analyzer.jvm :as taj]
[typed.clj.analyzer :as jana2]
[typed.clj.annotator :refer :all :as infer]
[clojure.core.typed.coerce-utils :as coerce]
[typed.clj.annotator.pprint :refer [pprint]]
[typed.clj.annotator.parse :refer [prs parse-type]]
[typed.clj.annotator.rep :refer [infer-result
var-path
key-path
fn-rng-path
fn-dom-path]
:as rep]
[typed.clj.annotator.track :refer [track-var] :as track]
[typed.clj.annotator.join :refer [make-Union
join*
join-HMaps
join]]
[typed.clj.annotator.env :as env]
[typed.clj.annotator.frontend.spec :refer [unparse-spec'
envs-to-specs]]
[typed.clj.annotator.insert :refer [delete-generated-annotations-in-str
generate-ann-start
generate-ann-end]
:as insert]
[typed.clj.annotator.util :refer [*ann-for-ns*
spec-ns
current-ns
unparse-type
*debug*
update-alias-env
type-env
*envs*
update-type-env
HMap-req-keyset]]
)
(:import (clojure.lang IExceptionInfo)))
(defn ppenv [env]
(pprint (into {}
(map (fn [[k v]]
[k (unparse-type v)]))
env)))
(defn add-tmp-aliases [env as]
(update-alias-env env merge (zipmap as (repeat nil))))
(defmacro with-tmp-aliases [env as & body]
`(binding [*envs* (atom (add-tmp-aliases ~env ~as))]
~@body))
#_(defmacro with-type-and-alias-env
[t a & body]
`(binding [*envs*
(atom {(current-ns)
{:type-env ~t
:alias-env ~a}})]
~@body))
(deftest union-test
(is (= (make-Union [(prs Long)
(prs Double)])
(prs Number)
(make-Union [(prs Long)
(prs Double)
(prs Number)])
)))
(deftest join-test
(is (= (join* (prs String))
(make-Union [(prs String)])
(prs String)))
(is (= (join* (prs Sym))
(make-Union [(prs Sym)])
(prs Sym)))
(is (not= (prs Any)
(prs (U Sym String))))
FIXME
#_
(is (=
(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}))
(prs (HMap :mandatory {:f [? :-> java.lang.Long]}
:optional {:a ?}))))
FIXME
#_
(is (=
(join-HMaps
(prs '{:f ':op1, :a Any})
(prs '{:f ':op2, :a Any}))
(join
(prs '{:f ':op1, :a Any})
(prs '{:f ':op2, :a Any}))
(prs (U
'{:f ':op1, :a Any}
'{:f ':op2, :a Any}))))
(checking
"join maps"
5
[ts (gen/shuffle
[(prs
'{:E ':false})
(prs
'{:args (Vec '{:name clojure.lang.Symbol, :E ':var}),
:fun
(U
'{:E ':lambda,
:arg clojure.lang.Symbol,
:body '{:name clojure.lang.Symbol, :E ':var},
:arg-type
'{:T ':intersection, :types (Seqable Any)}}
'{:name clojure.lang.Symbol, :E ':var}),
:E ':app})])]
(is (= (apply join ts)
(prs
(U
'{:E ':false}
'{:args (Vec '{:name clojure.lang.Symbol, :E ':var}),
:fun
(U
'{:E ':lambda,
:arg clojure.lang.Symbol,
:body '{:name clojure.lang.Symbol, :E ':var},
:arg-type
'{:T ':intersection, :types (Seqable Any)}}
'{:name clojure.lang.Symbol, :E ':var}),
:E ':app})))))
(checking
"inner vec"
5
[ts (gen/shuffle
[(prs (Vec '{:a ?}))
(prs (Vec '{:b ?}))])]
(is
(= (apply join* ts)
(prs (Vec (U '{:a ?} '{:b ?}))))))
(checking
"functions"
30
[ts (gen/shuffle
[(prs '{:a ?})
(prs '{:a [? :-> ?]})
(prs '{:a [? :-> Long]})
(prs '{:a [Long :-> Long]})])]
(= (apply join* ts)
(prs '{:a [Long :-> Long]})))
(checking
"HOF"
5
[ts (gen/shuffle
[(prs '{:f ?, :a java.lang.Long})
(prs '{:f [[? :-> java.lang.Long] :-> ?], :a ?})])]
(is
(= (apply join* ts)
(prs '{:f [[? :-> java.lang.Long] :-> ?], :a java.lang.Long}))))
(checking
"map return"
5
[ts (gen/shuffle
[(prs ['{:f ?, :a java.lang.Long} :-> '{:b ?, :f clojure.lang.IFn, :a ?}])
(prs ['{:f [[? :-> java.lang.Long] :-> ?], :a ?} :-> ?])])]
(is (= (apply join ts)
(prs
['{:f [[? :-> java.lang.Long] :-> ?], :a java.lang.Long}
:->
'{:b ?, :f clojure.lang.IFn, :a ?}]))))
(checking
"join union"
5
[ts (gen/shuffle
[(prs
(U '{:f [? :-> java.lang.Long], :a ?}
'{:f clojure.lang.IFn}))
(prs
'{:f [? :-> java.lang.Long]})])]
(is (= (apply join ts)
(prs
(U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]})))))
(checking
"join fn in map"
5
[ts (gen/shuffle
[(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> ?]})
(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> java.lang.Long]})])]
(is (= (apply join ts)
(prs '{:f [[java.lang.Long :-> java.lang.Long] :-> java.lang.Long]})))))
#_
(deftest squash-test
#_
(let [config (init-config)
env (init-env)
env (update-alias-env env merge
(with-tmp-aliases env '[a1 a2]
{'a1 (prs '{:a a2})
'a2 (prs '{:a nil})}))]
(binding [*envs* (atom env)]
(is (= (alias-env (squash env config (prs a1)))
{'a1 (prs '{:a (U nil a1)})
'a2 (prs a1)}))))
#_
(let [aenv (with-tmp-aliases '[a1 a2 a3 a4]
{'a1 (prs a2)
'a2 (prs '{:a a3})
'a3 (prs a4)
'a4 (prs
'{:a nil})})]
(with-type-and-alias-env
(type-env @*envs*)
aenv
(is (= (squash-all (prs a1))
(prs a1)))
(is (= (alias-env)
(with-tmp-aliases '[a1 a2]
{'a1 (prs '{:a (U nil a1)})
'a2 (prs a1)
TODO < ^v
'a4 (prs a3)
}))))))
(defn update-path' [env infer-results]
(let [config (init-config)
env (generate-tenv
env
config
{:infer-results infer-results})]
(type-env env)))
(deftest update-path-test
(checking
"update map"
10
[infers (gen/shuffle
[(infer-result [(var-path 'use-map)
(key-path #{:a} :a)]
-unknown)
(infer-result [(var-path 'use-map)
(key-path #{:a} :a)]
(prs Long))])]
(is (= (update-path' (init-env) infers)
{'use-map (prs '{:a Long})})))
(checking
"update nested map"
20
[infers (gen/shuffle
[(infer-result [(var-path 'use-map)]
(prs clojure.lang.IFn))
(infer-result [(var-path 'use-map)
(fn-rng-path 1)
(key-path #{:b :f :a} :f)]
(prs clojure.lang.IFn))
(infer-result [(var-path 'use-map)
(fn-dom-path 1 0)
(key-path #{:f :a} :a)]
(prs Long))
(infer-result [(var-path 'use-map)
(fn-dom-path 1 0)
(key-path #{:f :a} :f)
(fn-dom-path 1 0)
(fn-rng-path 1)]
(prs Long))])]
(is (=
(update-path' (init-env) infers)
{'use-map
(-> (prs ['{:f [[? :-> Long] :-> ?], :a java.lang.Long} :-> '{:b ?, :f clojure.lang.IFn, :a ?}])
(assoc :top-level-def 'use-map))})))
(checking
"function dom rng"
10
[infers (gen/shuffle
[(infer-result [(var-path 'foo) (fn-rng-path 1)]
(parse-type 'Long))
(infer-result [(var-path 'foo) (fn-dom-path 1 0)]
(parse-type 'Long))])]
(is (= (update-path' (init-env) infers)
{'foo (-> (prs [Long :-> Long])
(assoc :top-level-def 'foo))})))
(checking
"unknown with function"
10
[infers (gen/shuffle
[(infer-result [(var-path 'foo)] (prs ?))
(infer-result [(var-path 'foo)] (prs [Long :-> ?]))])]
(is (= (update-path' (init-env) infers)
{'foo (-> (prs [java.lang.Long :-> ?])
(assoc :top-level-def 'foo))})))
(checking
"combine functions"
10
[ts (gen/shuffle
[(prs [(U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}) :->
'{:b ?, :f ?, :a java.lang.Long}])
(prs ['{:f clojure.lang.IFn, :a ?} :-> ?])])]
(is
(= (apply join ts)
(prs
[(U '{:f [? :-> java.lang.Long], :a ?} '{:f [? :-> java.lang.Long]})
:->
'{:b ?, :f ?, :a java.lang.Long}]))))
(checking
"IFn with fns"
10
[ts (gen/shuffle
[(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]}))
(prs '{:f clojure.lang.IFn, :a ?})])]
(is
(= (apply join ts)
(prs (U '{:f [? :-> java.lang.Long], :a ?}
'{:f [? :-> java.lang.Long]})))))
)
(deftest delete-generated-annotations-in-str-test
(is (= (delete-generated-annotations-in-str
(str "\n"
generate-ann-start
"\n"
"foo\n"
generate-ann-end "\n"
"bar\n\n"))
"\nbar\n")))
#_(deftest group-arities-test
(is (group-arities
{:op :IFn,
:arities [{:op :IFn1, :dom [], :rng {:op :class, :class java.lang.Long, :args []}}]}
{:op :IFn, :arities [{:op :IFn1, :dom [{:op :unknown}], :rng {:op :class, :class java.lang.Long, :args []}}]})))
(deftest squash-horizonally-test
(is (let [config (init-config)
env (as-> (init-env) env
(update-alias-env env
assoc
'foo (prs '{:baz Long})
'bar (prs '{:baz Boolean}))
(binding [*envs* (atom env)]
(update-type-env env
assoc
`var1 (prs foo)
`var2 (prs bar))))
env (squash-horizonally env config)
env (follow-all env (assoc config :simplify? false))]
(pprint env)
(= (get (type-env env) `var1)
(get (type-env env) `var2)))))
(defmacro try-prim [invoke-args & body]
(let [name (gensym)]
`(do (defn ~name ~@body)
(alter-var-root
(var ~name)
(constantly
(track-var ~name)))
(~name ~@invoke-args))))
(deftest track-prim-fn
(is (=
:ok
(try-prim
[1]
[^long a]
:ok)))
(is (=
10
(try-prim
[1 'a]
^long [^long a ^Object b]
10)))
(is (=
10
(try-prim
[]
^long []
10)))
(is (=
10.5
(try-prim
[]
^double []
10.5)))
(is (=
:ok
(try-prim
[1]
(^double [] 10.5)
(^Object [^long a] :ok))))
(is (=
10.5
(try-prim
[]
(^double [] 10.5)
(^Object [^long a] :ok))))
)
(deftest mini-occ-test
(is
(do
(require 'typed.clj.annotator.test.mini-occ :reload)
:ok)))
(deftest optional-keys-test
(is
(=
(join-HMaps
(prs
(HMap :optional {:a Any}))
(prs
(HMap :optional {:a Any})))
(prs
(HMap :optional {:a Any}))))
(is
(=
(join-HMaps
(prs
(HMap :optional {:a String}))
(prs
(HMap :mandatory {:a Long})))
(prs
(HMap :optional {:a (U String Long)}))))
(is
(=
(join-HMaps
(prs
(HMap :mandatory {:op ':Foo}))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:bar String})))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:bar String}))))
(is (=
(HMap-req-keyset
(prs
(HMap :mandatory {:a Long})))
#{:a}))
(is (=
(HMap-req-keyset
(prs
(HMap :optional {:a Long})))
#{}))
(is (=
(HMap-req-keyset
(prs
(HMap :optional {:a Long}
:mandatory {:b Long})))
#{:b}))
(is (=
(HMap-req-opt-keysets
(prs
(HMap :optional {:a Long}
:mandatory {:b Long})))
#{{:req-keyset #{:b}, :opt-keyset #{:a}}}))
(is (=
(make-Union
[(prs
(HMap :mandatory {:op ':Foo}))
(prs
(HMap :mandatory {:op ':Foo}
:optional {:opt String}))])
(prs (HMap :mandatory {:op ':Foo}
:optional {:opt String}))))
(is (=
(make-Union
[(prs
(HMap :mandatory {:op ':Foo}
:optional {:opt Long}))
(prs
(HMap :optional {:op ':Foo
:opt String}))])
(prs (HMap :optional {:op ':Foo
:opt (U Long String)}))))
(is
(=
(join-HMaps
(prs
'{:op ':the-foo
:the-bar Sym
:opt Sym})
(prs
'{:op ':the-foo
:the-bar Sym}))
(prs
(HMap :mandatory {:op ':the-foo
:the-bar Sym}
:optional {:opt Sym}))))
)
(def ^:dynamic *print-anns* true)
(defn *-from-tenv [f tenv config]
(let [ns (create-ns (gensym))]
(binding [*ann-for-ns* (constantly ns)
*ns* ns
*debug* (if-let [[_ debug] (find config :debug)]
debug
*debug*)]
(refer-clojure)
(require '[typed.clojure
:as t
:refer [defalias ann
]])
(when spec-ns
(require [spec-ns :as 's]))
(let [_ (prn "Current ns:" (current-ns))
env (as-> (init-env) env
(update-type-env env merge tenv))
env (populate-envs env config)
anns (f env config)]
(when *print-anns*
(pprint anns)))
:ok)))
(defn anns-from-tenv [tenv & [config]]
(binding [unparse-type unparse-type']
(*-from-tenv envs-to-annotations
tenv
(or config {}))))
(defn specs-from-tenv [tenv & [config]]
(binding [unparse-type unparse-spec']
(*-from-tenv envs-to-specs
tenv
(merge config
{:spec? true}))))
(deftest gen-anns
(is (let [st-type (prs
'{:quads
(Vec
'{:klingons java.lang.Long,
:quadrant (Vec java.lang.Long),
:stars java.lang.Long,
:bases java.lang.Long}),
:stardate
'{:start java.lang.Long,
:current java.lang.Long,
:end java.lang.Long},
:current-klingons (Vec Nothing),
:starting-klingons java.lang.Long,
:lrs-history (Vec java.lang.String),
:current-sector (Vec java.lang.Long),
:enterprise
'{:photon_torpedoes java.lang.Long,
:sector (Vec java.lang.Long),
:quadrant (Vec (U java.lang.Integer java.lang.Long)),
:energy java.lang.Long,
:damage
'{:phasers java.lang.Long,
:warp_engines java.lang.Long,
:damage_control java.lang.Long,
:long_range_sensors java.lang.Long,
:short_range_sensors java.lang.Long,
:computer_display java.lang.Long,
:photon_torpedo_tubes java.lang.Long,
:shields java.lang.Long},
:is_docked false,
:shields java.lang.Long}})]
(anns-from-tenv {'t1 st-type
't2 st-type})))
(is
(let [t (prs
[(U
'{:exp '{:name Sym, :E ':var},
:P ':is,
:type '{:T ':intersection, :types (Set Nothing)}}
'{:P ':not,
:p
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}}
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}
'{:P (U ':or ':and),
:ps
(Set
(U
'{:exp '{:name Sym, :E ':var},
:P ':is,
:type '{:T ':intersection, :types (Set Nothing)}
:foo Sym}
'{:P ':=,
:exps
(Set
(U
'{:name Sym, :E ':var}
'{:args (Vec '{:name Sym, :E ':var}),
:fun '{:name Sym, :E ':var},
:E ':app}))}))})
:->
Any])]
'unparse-prop2 t}
{:debug :iterations})))
(is
(let [t (prs
['{:P ':or
:ps
(Set
'{:P ':and
:ps
(Set
'{:P ':Top})})}
:->
Any])]
'unparse-prop2 t}
{:debug #{:iterations :squash}})))
(is
(let [t (prs
['{:P ':or
:ps
(Set
(U '{:P ':Top}
'{:P ':and
:ps
(Set
'{:P ':Top})}))}
:->
Any])]
'unparse-prop2 t}
{:debug #{:iterations :squash}})))
(is
(let [t (prs
'{:P ':or
:ps
(Set
'{:P ':and
:ps
(Set
(U '{:P ':Top}
'{:P ':Bottom}))})}
)]
'unparse-prop2 t}
{:debug #{:squash :iterations
:squash-horizontally}}))))
(deftest ann-combine
(is
(let [t (prs
[(U '{:entry1 String}
'{:entry2 Boolean}
'{:entry3 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U '{:op :foo
:entry1 String}
'{:op :bar
:entry2 Boolean}
'{:op :baz
:entry3 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
(is
(let [t (prs
[(U ':foo
'{:op :bar
:entry2 Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U ':foo
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
simplify Sym / Kw + seqable to Any
(is
(let [t (prs
[(U Sym
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U nil
(clojure.lang.Seqable String))
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U '{:foo String}
'{:foo String
:bar Boolean})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
upcast union to Any
(is
(let [t (prs
[(U Any
'{:foo String
:bar Boolean})
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U ':foo ':bar)
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U (Vec Integer)
(Vec Long))
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[[Any :-> Any]
:->
Any])]
(anns-from-tenv {'config-in t}))))
(deftest ann-hmaps
(is
(let [t (prs
[(U '{:foo Any}
(clojure.lang.IPersistentMap Any Any))
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U '{:foo Any}
(clojure.lang.IPersistentMap Any Any))
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(HMap :optional {:foo String})
:->
Any])]
(anns-from-tenv {'config-in t})))
(is
(let [t (prs
[(U '{:op ':the-foo
:the-bar Sym
:opt Sym}
'{:op ':the-foo
:the-bar Sym})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
(is
(let [t (prs
['{::op ':the-foo}
:->
Any])]
(specs-from-tenv {'config-in t})))
TODO recursive example of this test
(is
(let [t (prs
[(U '{:op ':the-bar
:the-foo String
:the-baz String}
'{:op ':the-foo
:the-foo Sym
:the-baz Sym})
:->
Any])]
(anns-from-tenv {'config-in t}
{:debug true})))
HMap alias naming test
(is
(let [t (prs
[
'{:op ':foo
:the-bar '{:op ':term
:val Sym}}
:->
Any])]
((juxt specs-from-tenv
anns-from-tenv)
{'config-in t})))
(is
(let [t (prs
[(U
'{:op ':foo
:the-bar '{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':foo
:opt Sym
:the-bar '{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':term
:val Sym}}}}
#_
'{:op ':bar
:the-foo '{:op ':foo
:the-bar '{:op ':bar
:the-foo '{:op ':term
:val Sym}}}}
)
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
{:debug true})))
FIXME prefer : op over : type ?
(is
(let [t (prs
[
(U
'{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':foo
:type (U ':int ':nil ':faz)
:opt Sym
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':term
:val Sym}}}}
'{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':foo
:type (U ':int ':nil ':faz)
:the-bar '{:op ':bar
:type (U ':int ':nil ':faz)
:the-foo '{:op ':term
:val Sym}}}})
:->
Any])]
anns-from-tenv
{'config-in t}
{:fuel 0})))
(is
(let [t (prs
[':a
Integer
':b
Boolean
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
{:debug true})))
(is
(let [t (prs
['{:a Long
:b Long
:c Long
:d Long}
'{:a Long
:b Long
:c Long}
:->
Any])]
FIXME
anns-from-tenv)
{'config-in t}
))))
(defn gen-height [n]
{:pre [(not (neg? n))]}
(if (zero? n)
`'{:tag ':null}
`'{:tag ':cons :cdr ~(gen-height (dec n))}))
(defn gen-tagged-union [n]
{:pre [(not (neg? n))]}
(if (zero? n)
`'{:tag ':null}
`'{:tag '~(keyword (str "cons" n)) :cdr ~(gen-tagged-union (dec n))}))
(defmacro bench
"Evaluates expr and returns the time it took."
[expr]
`(let [start# (. System (nanoTime))
ret# ~expr
msduration# (/ (double (- (. System (nanoTime)) start#)) 1000000.0)]
[ret# msduration#]))
(defn bench-iteratively
([f n] (bench-iteratively f 0 n))
([f start n]
(loop [times []
i start]
(if (< n i)
times
(let [[_ t] (f i)]
(recur (conj times t)
(inc i)))))))
(defn write-csv [n v]
{:pre [(vector? v)]}
(spit n (apply str (interpose "," v))))
(comment
(def bench-height
(write-csv "height-bench.csv"
(bench-iteratively
#(let [t (parse-type (gen-height %))]
(binding [*print-anns* false]
(bench
(anns-from-tenv {'prop t}
{}))))
120)))
(let [t (parse-type (gen-height 5))]
(anns-from-tenv {'prop t}
{}))
(pprint (gen-tagged-union 5))
(let [t (parse-type (gen-tagged-union 10))]
(anns-from-tenv {'prop t}
{}))
(def bench-tagged
(write-csv
"tagged-bench-past110.csv"
(bench-iteratively
#(let [t (parse-type (gen-tagged-union %))]
(binding [*print-anns* false]
(bench
(anns-from-tenv {'prop t}
{}
#_{:debug #{:squash :iterations
:squash-horizontally}}))))
111
120)))
)
(deftest map-merge-test
TODO
(is
(let [t (prs
(U nil
'{:a ':b}
'{:op ':foo
:b Long
:c Long
:d Long}
'{:op ':bar
:e Long
:w Long
:q Long}))]
FIXME
anns-from-tenv)
{'config-in t}
)))
(is
(let [t (prs
(U nil
(HMap
:mandatory {:name Sym}
:optional {:blah Sym})
(HMap
:mandatory {:name Sym
:ns Sym}
:optional {:tag Sym
:tag1 Sym
:tag2 Sym
:tag3 Sym
:tag4 Sym
:tag5 Sym
:tag6 Sym
:tag7 Sym
:tag8 Sym
:tag9 Sym
:tag10 Sym
:tag11 Sym
})))]
FIXME
anns-from-tenv)
{'config-in t}
))))
(deftest instrument-top-level-form-test
(is (.contains (with-out-str (instrument-top-level-form '(println "a")))
"a\n"))
(is (.contains
(with-out-str (instrument-top-level-form '(do (def a "a") (println a))))
"a\n")))
(defn code-to-gen-spec [root-ns root-key samples {:keys [spec?] :as config}]
(binding [*ns* *ns*]
(in-ns root-ns)
(binding [*ns* (the-ns root-ns)
*ann-for-ns* #(the-ns root-ns)
unparse-type (if spec?
unparse-spec'
unparse-type')]
(let [results-atom (atom (env/initial-results))
_ (run!
(fn [e]
(track/track
(track/gen-track-config)
results-atom
e
#{[(rep/var-path root-ns root-key)]}
#{}))
(concat (map eval (:eval samples))
(:edn samples)))
_ (prn @results-atom)
infer-out
(infer/infer-anns root-ns
{:spec? spec?
:allow-top-level-non-IFn true
:results-atom results-atom})
_ (prn "infer out" infer-out)
spec-out
(apply insert/prepare-ann
((juxt :requires :top-level :local-fns) infer-out))]
spec-out))))
(deftest manual-track-test
(is (code-to-gen-spec 'user 'user/foo-bar {:eval '[(inc 1)] :edn '[a]}
{:spec? true})))
(declare *-from-infer-results)
TODO remove # suffixes
(defn infer-test*
[{:keys [defs tests expected-specs expected-types] :as opts}]
(assert (vector? defs))
(assert (vector? tests))
(let [ns# (create-ns (gensym))]
(binding [*ns* ns#
*ann-for-ns* (constantly ns#)]
(refer-clojure)
(require '[typed.clojure :as t])
(when spec-ns
(require [spec-ns :as 's]))
(let [result# (atom :ok)
run-until-bad-result!# (fn [f# c#]
(loop [c# c#]
(when @result#
(when (seq c#)
(do (f# (first c#))
(recur (rest c#)))))))
defs# defs
tests# tests
_# (infer/refresh-runtime-infer)
_# (run-until-bad-result!#
(fn [f#]
(try (instrument-top-level-form f#)
(catch Throwable e#
(println (str "Failed to evaluate " f# " with error " e#))
(reset! result# nil))))
(concat defs# tests#))]
(when @result#
(let [expected-specs# (let [s# expected-specs]
(if (string? s#)
(read-string s#)
s#))
_# (assert ((some-fn nil? vector?) expected-specs#))
expected-types# (let [t# expected-types]
(if (string? t#)
(read-string t#)
t#))
_# (assert ((some-fn nil? vector?) expected-types#))
specs# (*-from-infer-results *ns* {:spec? true})
types# (*-from-infer-results *ns* {})
assert-equal# (fn [actual# expected# msg#]
(when-not (= actual# expected#)
(println msg#)
(println "Actual:")
(pprint actual#)
(println "Expected:")
(pprint expected#)
(reset! result# nil)))
]
(if expected-specs#
(assert-equal# (:top-level specs#) expected-specs#
"Actual specs didn't match expected specs")
(do (println "Here are the generated specs:")
(pprint (:top-level specs#))))
(if expected-types#
(assert-equal# (:top-level types#) expected-types#
"Actual types didn't match expected types")
(do (println "Here are the generated types:")
(pprint (:top-level types#))))
(when spec-ns
(let [instrumentable-syms# (set
(keep (fn [spc#]
(when (seq? spc#)
(when (= 's/fdef (first spc#))
(let [^clojure.lang.Var v# (resolve (second spc#))]
(when (var? v#)
(coerce/var->symbol v#))))))
(:top-level specs#)))
spec-defs# (set
(keep (fn [spc#]
(when (seq? spc#)
(when (= 's/def (first spc#))
(let [kw# (second spc#)]
(when (keyword? kw#)
(when (namespace kw#)
kw#))))))
(:top-level specs#)))
_# (require ['clojure.spec.test.alpha])
instrument# (resolve 'clojure.spec.test.alpha/instrument)
_# (assert instrument#)
exercise# (resolve 'clojure.spec.alpha/exercise)
_# (assert exercise#)
exercise-fn# (resolve 'clojure.spec.alpha/exercise-fn)
_# (assert exercise-fn#)
exercise-fn-or-fail# (fn [sym#]
(try (doall (exercise-fn# sym#))
(catch Throwable e#
(println "Function failed to exercise:" sym#)
(println "With the following error:")
(binding [*err* *out*]
(repl/pst e#))
(when (instance? IExceptionInfo e#)
(pp/pprint (ex-data e#)))
(reset! result# nil))))
exercise-or-fail# (fn [spc#]
(try (doall (exercise# spc#))
(catch Throwable e#
(println "Spec failed to exercise")
(pp/pprint spc#)
(println "With the following error:")
(binding [*err* *out*]
(repl/pst e#))
(when (instance? IExceptionInfo e#)
(pp/pprint (ex-data e#)))
(reset! result# nil))))
eval-or-fail# (fn [form#]
(try (eval form#)
(catch Throwable e#
(println "Expression failed to evaluate:")
(pp/pprint form#)
(println "With the following error:")
(repl/pst e#)
(reset! result# nil))))]
(testing "spec declarations should evaluate"
(run-until-bad-result!# eval-or-fail# (concat (:requires specs#) (:top-level specs#))))
(testing "should be able to exercise spec defs"
(run-until-bad-result!# exercise-or-fail# spec-defs#))
(testing "should be able to exercise fns"
(run-until-bad-result!# exercise-fn-or-fail# instrumentable-syms#))
(when @result#
(testing "specs should instrument"
(when-not (= instrumentable-syms#
(set (instrument# instrumentable-syms#)))
(println "Expected to instrument "
(set/difference
instrumentable-syms#
(set (instrument# instrumentable-syms#)))
" but didn't")
(reset! result# nil))))
(testing "tests should evaluate under instrumentation"
(run-until-bad-result!# eval-or-fail# tests#))))
@result#))))))
(defmacro infer-test
"Given a vector of definitions :defs and a vector of tests :tests, then
does these steps in order. Short-circuits and returns a false value if previous steps fail.
Returns a true value on success.
1. Generates specs and types for the definitions
2. Ensure generated specs and types are identical to :expected-types and :expected-specs, respectively (when provided).
These are either vectors of annotations, or a string that contains vectors
of annotations that will be `read` in the correct namespace (useful to aid keyword namespace
resolution in specs).
If one of these is not provided, the respective annotations are pprint'ed so they can
be easily copied into the test.
3. Evaluates generated specs.
4. Exercises spec aliases.
5. Exercises spec'd functions.
6. Instruments all spec'd functions.
7. Runs :test code again under spec instrumentation."
[& {:keys [defs tests expected-specs expected-types] :as opts}]
`(infer-test* '~opts))
(defn *-from-infer-results [ns config]
(binding [*ann-for-ns* (constantly *ns*)
*debug* (if-let [[_ debug] (find config :debug)]
debug
*debug*)
unparse-type (if (:spec? config)
unparse-spec'
unparse-type')]
(infer/infer-anns *ns* config)))
(deftest test-infer-test
(is (infer-test :defs [(defn blah [a] (inc a))]
:tests [(blah 1)]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))
(testing "detects Exception always thrown in :defs"
(is (not
(infer-test :defs [(throw (Exception.))]
:tests []
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))))
(testing "detects Exception always thrown in :tests"
(is (not
(infer-test :defs [(defn blah [a] (inc a))]
:tests [(throw (Exception.))]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret int?)]
:expected-types [(declare) (t/ann blah [t/Int :-> t/Int])]))))
(testing "detects bad exercise-fn"
(is (not
(infer-test :defs [(defn blah [a]
(assert (zero? a)))]
:tests [(blah 0)]
:expected-specs [(s/fdef blah :args (s/cat :a int?) :ret nil?)]
:expected-types [(declare) (t/ann blah [t/Int :-> nil])]))))
(testing "detects bad exercise-fn"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])]))))
(testing "detects bad provided specs"
(is (not
(infer-test :defs [(defn blah [f])]
:tests [(blah identity)]
:expected-specs [(s/fdef blah :args (s/cat :asdf ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [AnyFunction :-> nil])]))))
(testing "detects bad provided specs (wrong quantity)"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef food :args (s/cat :asdf ifn?) :ret nil?)
(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])]))))
(testing "detects bad provided types"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [t/Int :-> nil])]))))
(testing "detects bad provided types (wrong quantity)"
(is (not
(infer-test :defs [(defn blah [f]
(f))]
:tests [(blah (constantly nil))]
:expected-specs [(s/fdef blah :args (s/cat :f ifn?) :ret nil?)]
:expected-types [(declare) (t/ann blah [[:-> nil] :-> nil])
(t/ann food [t/Int :-> nil])]))))
)
(deftest HMap-infer-test
(is (infer-test :defs [(defn takes-map [m]
{:pre [(or (some-> m :a #{1})
true)]}
(mapv identity m))]
:tests [(takes-map {})
(takes-map {:a 1})
(takes-map {:b 1})]
:expected-specs "[(s/def ::ABMap (s/keys :opt-un [::a ::b]))
(s/def ::a int?)
(s/def ::b int?)
(s/fdef
takes-map
:args
(s/cat :m ::ABMap)
:ret
(s/coll-of (s/tuple #{:b :a} int?) :into vector?))]"
:expected-types [(declare ABMap)
(t/defalias ABMap (t/HMap :optional {:a t/Int, :b t/Int}))
(t/ann
takes-map
[ABMap
:->
(t/Vec '[(t/U ':a ':b) t/Int])])]))
(is (infer-test :defs [(defn gives-map []
{:a 1})]
:tests [(require '[clojure.walk :as w])
(w/prewalk identity (gives-map))]))
(is (infer-test :defs [(require '[clojure.walk :as w])
(defn takes-map [m]
(w/prewalk identity m))]
:tests [(takes-map {})
(takes-map {:a {:b 1}})
(takes-map {:b {:a 1}})]
))
(is (infer-test :defs [(require '[clojure.walk :as w])
(defn takes-op [a]
(w/prewalk identity a))]
:tests [(takes-op {:Op :val :val 'blah})
(takes-op {:Op :if
:test {:Op :val :val 'blah}
:then {:Op :val :val 'blah}
:else {:Op :val :val 'blah}})]))
(is (infer-test :defs [(require '[clojure.walk :as w])
(def Op-multi-spec nil)
(defn takes-op [a]
(w/prewalk identity a))]
:tests [(takes-op {:Op :val :val 'blah})
(takes-op {:Op :if
:test {:Op :val :val 'blah}
:then {:Op :val :val 'blah}
:else {:Op :val :val 'blah}})]))
(is (infer-test :defs [(require '[clojure.walk :as w])
(def Op-multi-spec nil)
(defn takes-op [a]
(mapv identity a))]
:tests [(takes-op {:Op :val :val 'blah
:children {:foo [:a :b :c]}
})
(takes-op {:Op :if
:children {:foo [:a :b :c]}})]))
)
|
6d1ca832233966570ef9c9226bd5d442e204fea662e78ca9961349572b5904ae | archimag/cliki2 | articles.lisp | articles.lisp
(in-package #:cliki2)
(restas:define-route view-article (":title")
(or (let ((article (article-with-title title)))
(when (and article
(not (string= (article-title article) title)))
(restas:redirect 'view-article
:title (article-title article)))
article)
(list :article-not-found-page
:title title)))
(restas:define-route view-article-source ("raw/:title"
:content-type "text/plain")
(article-content (check-article title)))
(restas:define-route edit-article ("edit/:title")
(check-article-edit-access)
(list :edit-article-page
:title title
:article (article-with-title title)))
(restas:define-route save-article ("edit/:title"
:method :post
:requirement (check-edit-command "save"))
(check-article-edit-access)
(add-revision (or (article-with-title title)
(make-instance 'article :title title))
(hunchentoot:post-parameter "summary")
(hunchentoot:post-parameter "content"))
(restas:redirect 'view-article :title title))
(restas:define-route preview-article ("edit/:title"
:method :post
:requirement (check-edit-command "preview"))
(check-article-edit-access)
(list :preview-article-page
:title title
:content (hunchentoot:post-parameter "content")))
(restas:define-route cancel-edit-article ("edit/:title"
:method :post
:requirement (check-edit-command "cancel"))
(check-article-edit-access)
(restas:redirect 'view-article
:title title))
(restas:define-route view-article-history ("history/:(title)")
(list :article-history-page
:article (check-article title)))
(restas:define-route view-article-history/post ("history/:(title)"
:method :post)
(restas:redirect 'compare-article-revisions
:title title
:origin-date (parse-integer (hunchentoot:post-parameter "old"))
:modified-date (parse-integer (hunchentoot:post-parameter "diff"))))
(defun find-article-revision (article date)
(find date
(article-revisions article)
:key #'revision-date))
(restas:define-route compare-article-revisions ("history/:(title)/:(origin-date)-:(modified-date)"
:parse-vars (list :origin-date #'parse-integer
:modified-date #'parse-integer))
(let* ((article (check-article title)))
(list :revisions-diff-page
:article article
:origin (find-article-revision article origin-date)
:modified (find-article-revision article modified-date))))
(restas:define-route view-article-revision ("history/:title/:date"
:parse-vars (list :date #'parse-integer))
(let ((article (check-article title)))
(list :article-revision-page
:article article
:revision (find-article-revision article date))))
| null | https://raw.githubusercontent.com/archimag/cliki2/f0b6910f040907c70fd842ed76472af2d645c984/src/routes/articles.lisp | lisp | articles.lisp
(in-package #:cliki2)
(restas:define-route view-article (":title")
(or (let ((article (article-with-title title)))
(when (and article
(not (string= (article-title article) title)))
(restas:redirect 'view-article
:title (article-title article)))
article)
(list :article-not-found-page
:title title)))
(restas:define-route view-article-source ("raw/:title"
:content-type "text/plain")
(article-content (check-article title)))
(restas:define-route edit-article ("edit/:title")
(check-article-edit-access)
(list :edit-article-page
:title title
:article (article-with-title title)))
(restas:define-route save-article ("edit/:title"
:method :post
:requirement (check-edit-command "save"))
(check-article-edit-access)
(add-revision (or (article-with-title title)
(make-instance 'article :title title))
(hunchentoot:post-parameter "summary")
(hunchentoot:post-parameter "content"))
(restas:redirect 'view-article :title title))
(restas:define-route preview-article ("edit/:title"
:method :post
:requirement (check-edit-command "preview"))
(check-article-edit-access)
(list :preview-article-page
:title title
:content (hunchentoot:post-parameter "content")))
(restas:define-route cancel-edit-article ("edit/:title"
:method :post
:requirement (check-edit-command "cancel"))
(check-article-edit-access)
(restas:redirect 'view-article
:title title))
(restas:define-route view-article-history ("history/:(title)")
(list :article-history-page
:article (check-article title)))
(restas:define-route view-article-history/post ("history/:(title)"
:method :post)
(restas:redirect 'compare-article-revisions
:title title
:origin-date (parse-integer (hunchentoot:post-parameter "old"))
:modified-date (parse-integer (hunchentoot:post-parameter "diff"))))
(defun find-article-revision (article date)
(find date
(article-revisions article)
:key #'revision-date))
(restas:define-route compare-article-revisions ("history/:(title)/:(origin-date)-:(modified-date)"
:parse-vars (list :origin-date #'parse-integer
:modified-date #'parse-integer))
(let* ((article (check-article title)))
(list :revisions-diff-page
:article article
:origin (find-article-revision article origin-date)
:modified (find-article-revision article modified-date))))
(restas:define-route view-article-revision ("history/:title/:date"
:parse-vars (list :date #'parse-integer))
(let ((article (check-article title)))
(list :article-revision-page
:article article
:revision (find-article-revision article date))))
| |
96668e52b4db6e523398790e2d4e5d1534b7632379d5bd88cb3d88d3d00f4905 | Clozure/ccl-tests | ceiling-aux.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Tue Aug 19 06:52:02 2003
;;;; Contains: Aux. functions for CEILING
(in-package :cl-test)
(defun ceiling.1-fn ()
(loop for n = (- (random 2000000000)
1000000000)
for d = (1+ (random 10000))
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(= n n2)
(integerp r)
(< (- d) r 1))
collect (list n d q r n2)))
(defun ceiling.2-fn ()
(loop for num = (random 1000000000)
for denom = (1+ (random 1000))
for n = (/ num denom)
for d = (1+ (random 10000))
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(<= r 0)
(< (- d) r)
(= n n2))
collect (list n d q r n2)))
(defun ceiling.3-fn (width)
(loop for n = (- (random width) (/ width 2))
for vals = (multiple-value-list (ceiling n))
for (q r) = vals
for n2 = (+ q r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(= n n2)
(<= 0 (- r))
(< (- r) 1)
)
collect (list n q r n2)))
(defun ceiling.7-fn ()
(loop for numerator = (- (random 10000000000) 5000000000)
for denominator = (1+ (random 100000))
for n = (/ numerator denominator)
for vals = (multiple-value-list (ceiling n))
for (q r) = vals
for n2 = (+ q r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(= n n2)
(<= 0 (- r))
(< (- r) 1)
)
collect (list n q r n2)))
(defun ceiling.8-fn ()
(loop for num1 = (- (random 10000000000) 5000000000)
for den1 = (1+ (random 100000))
for n = (/ num1 den1)
for num2 = (- (1+ (random 1000000)))
for den2 = (1+ (random 1000000))
for d = (/ num2 den2)
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(<= 0 r)
(< r (- d))
(= n n2))
collect (list n q d r n2)))
(defun ceiling.9-fn ()
(loop for num1 = (- (random 1000000000000000) 500000000000000)
for den1 = (1+ (random 10000000000))
for n = (/ num1 den1)
for num2 = (- (1+ (random 1000000000)))
for den2 = (1+ (random 10000000))
for d = (/ num2 den2)
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(<= 0 r)
(< r (- d))
(= n n2))
collect (list n q d r n2)))
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/ceiling-aux.lsp | lisp | -*- Mode: Lisp -*-
Contains: Aux. functions for CEILING | Author :
Created : Tue Aug 19 06:52:02 2003
(in-package :cl-test)
(defun ceiling.1-fn ()
(loop for n = (- (random 2000000000)
1000000000)
for d = (1+ (random 10000))
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(= n n2)
(integerp r)
(< (- d) r 1))
collect (list n d q r n2)))
(defun ceiling.2-fn ()
(loop for num = (random 1000000000)
for denom = (1+ (random 1000))
for n = (/ num denom)
for d = (1+ (random 10000))
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(<= r 0)
(< (- d) r)
(= n n2))
collect (list n d q r n2)))
(defun ceiling.3-fn (width)
(loop for n = (- (random width) (/ width 2))
for vals = (multiple-value-list (ceiling n))
for (q r) = vals
for n2 = (+ q r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(= n n2)
(<= 0 (- r))
(< (- r) 1)
)
collect (list n q r n2)))
(defun ceiling.7-fn ()
(loop for numerator = (- (random 10000000000) 5000000000)
for denominator = (1+ (random 100000))
for n = (/ numerator denominator)
for vals = (multiple-value-list (ceiling n))
for (q r) = vals
for n2 = (+ q r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(= n n2)
(<= 0 (- r))
(< (- r) 1)
)
collect (list n q r n2)))
(defun ceiling.8-fn ()
(loop for num1 = (- (random 10000000000) 5000000000)
for den1 = (1+ (random 100000))
for n = (/ num1 den1)
for num2 = (- (1+ (random 1000000)))
for den2 = (1+ (random 1000000))
for d = (/ num2 den2)
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(<= 0 r)
(< r (- d))
(= n n2))
collect (list n q d r n2)))
(defun ceiling.9-fn ()
(loop for num1 = (- (random 1000000000000000) 500000000000000)
for den1 = (1+ (random 10000000000))
for n = (/ num1 den1)
for num2 = (- (1+ (random 1000000000)))
for den2 = (1+ (random 10000000))
for d = (/ num2 den2)
for vals = (multiple-value-list (ceiling n d))
for (q r) = vals
for n2 = (+ (* q d) r)
repeat 1000
unless (and (eql (length vals) 2)
(integerp q)
(rationalp r)
(<= 0 r)
(< r (- d))
(= n n2))
collect (list n q d r n2)))
|
92ae4990fcf4c521f54f25bc60c2bbd4df3bdd3e78615ba822427f1bb4a90f53 | felixmulder/haskell-in-production | Log.hs | module Log
( Loggable(..)
, Log(..)
, Logger(..)
) where
import Prelude
import Control.Monad.Reader (ReaderT, lift)
import Control.Monad.Reader.Class (asks)
import Data.Has (Has(..))
import Data.Text (Text)
import GHC.Stack (HasCallStack)
class Loggable a where
fromLoggable :: a -> Text
class Monad m => Log m where
logLn :: HasCallStack => Loggable a => a -> m ()
data Logger m = Logger
{ dologLn :: HasCallStack => Text -> m ()
}
instance
( Has (Logger m) r
, Monad m
) => Log (ReaderT r m) where
logLn a =
asks getter >>= \(Logger doLog) -> lift . doLog . fromLoggable $ a
instance Loggable Text where
fromLoggable = id
| null | https://raw.githubusercontent.com/felixmulder/haskell-in-production/ff3431f01342b8689d3449007759706d0bba6488/part-1-testable-components/src/Log.hs | haskell | module Log
( Loggable(..)
, Log(..)
, Logger(..)
) where
import Prelude
import Control.Monad.Reader (ReaderT, lift)
import Control.Monad.Reader.Class (asks)
import Data.Has (Has(..))
import Data.Text (Text)
import GHC.Stack (HasCallStack)
class Loggable a where
fromLoggable :: a -> Text
class Monad m => Log m where
logLn :: HasCallStack => Loggable a => a -> m ()
data Logger m = Logger
{ dologLn :: HasCallStack => Text -> m ()
}
instance
( Has (Logger m) r
, Monad m
) => Log (ReaderT r m) where
logLn a =
asks getter >>= \(Logger doLog) -> lift . doLog . fromLoggable $ a
instance Loggable Text where
fromLoggable = id
| |
a6480c1b79656c9952754dc207d863deadee5be56c73c2ddb4bc4c844c1cd496 | rescript-lang/rescript-compiler | belt_Option.ml | Copyright ( C ) 2017 , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let keepU opt p = match opt with
| Some x as some when (p x [@bs]) -> some
| _ -> None
let keep opt p = keepU opt (fun[@bs] x -> p x)
let forEachU opt f = match opt with
| Some x -> (f x [@bs])
| None -> ()
let forEach opt f = forEachU opt (fun[@bs] x -> f x)
let getExn = function
| Some x -> x
| None -> raise Not_found
external getUnsafe : 'a option -> 'a = "%identity"
let mapWithDefaultU opt default f = match opt with
| Some x -> (f x [@bs])
| None -> default
let mapWithDefault opt default f = mapWithDefaultU opt default (fun[@bs] x -> f x)
let mapU opt f = match opt with
| Some x -> Some (f x [@bs])
| None -> None
let map opt f = mapU opt (fun[@bs] x -> f x)
let flatMapU opt f = match opt with
| Some x -> (f x [@bs])
| None -> None
let flatMap opt f = flatMapU opt (fun[@bs] x -> f x)
let getWithDefault opt default = match opt with
| Some x -> x
| None -> default
let orElse opt other = match opt with
| Some _ as some -> some
| None -> other
let isSome = function
| Some _ -> true
| None -> false
let isNone x = x = None
let eqU a b f =
match a with
| Some a ->
begin match b with
| None -> false
| Some b -> f a b [@bs]
end
| None -> b = None
let eq a b f = eqU a b (fun[@bs] x y -> f x y)
let cmpU a b f = match (a, b) with
| (Some a, Some b) -> f a b [@bs]
| (None, Some _) -> -1
| (Some _, None) -> 1
| (None, None) -> 0
let cmp a b f = cmpU a b (fun[@bs] x y -> f x y)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/f716534c90d57017cad7e3176fa5d0b8b26fc88f/jscomp/others/belt_Option.ml | ocaml | Copyright ( C ) 2017 , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let keepU opt p = match opt with
| Some x as some when (p x [@bs]) -> some
| _ -> None
let keep opt p = keepU opt (fun[@bs] x -> p x)
let forEachU opt f = match opt with
| Some x -> (f x [@bs])
| None -> ()
let forEach opt f = forEachU opt (fun[@bs] x -> f x)
let getExn = function
| Some x -> x
| None -> raise Not_found
external getUnsafe : 'a option -> 'a = "%identity"
let mapWithDefaultU opt default f = match opt with
| Some x -> (f x [@bs])
| None -> default
let mapWithDefault opt default f = mapWithDefaultU opt default (fun[@bs] x -> f x)
let mapU opt f = match opt with
| Some x -> Some (f x [@bs])
| None -> None
let map opt f = mapU opt (fun[@bs] x -> f x)
let flatMapU opt f = match opt with
| Some x -> (f x [@bs])
| None -> None
let flatMap opt f = flatMapU opt (fun[@bs] x -> f x)
let getWithDefault opt default = match opt with
| Some x -> x
| None -> default
let orElse opt other = match opt with
| Some _ as some -> some
| None -> other
let isSome = function
| Some _ -> true
| None -> false
let isNone x = x = None
let eqU a b f =
match a with
| Some a ->
begin match b with
| None -> false
| Some b -> f a b [@bs]
end
| None -> b = None
let eq a b f = eqU a b (fun[@bs] x y -> f x y)
let cmpU a b f = match (a, b) with
| (Some a, Some b) -> f a b [@bs]
| (None, Some _) -> -1
| (Some _, None) -> 1
| (None, None) -> 0
let cmp a b f = cmpU a b (fun[@bs] x y -> f x y)
| |
f9583e566a7a1deb7d00106b183d9cb7621014893d75a8416ce1ef760ca63e79 | CryptoKami/cryptokami-core | Assurance.hs | -- | Assurance levels info.
module Pos.Wallet.Web.Assurance
( AssuranceLevel (..)
, assuredBlockDepth
) where
import Pos.Core.Common (BlockCount)
import Pos.Wallet.Web.ClientTypes (CWalletAssurance (..))
data AssuranceLevel
= HighAssurance
-- | For given assurance level, recommended transaction depth to assure that
-- transaction won't be canceled by some fork.
--
-- Values are taken from this table:
-- -assurance/
assuredBlockDepth :: AssuranceLevel -> CWalletAssurance -> BlockCount
assuredBlockDepth HighAssurance CWANormal = 9
assuredBlockDepth HighAssurance CWAStrict = 15
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/wallet/src/Pos/Wallet/Web/Assurance.hs | haskell | | Assurance levels info.
| For given assurance level, recommended transaction depth to assure that
transaction won't be canceled by some fork.
Values are taken from this table:
-assurance/ |
module Pos.Wallet.Web.Assurance
( AssuranceLevel (..)
, assuredBlockDepth
) where
import Pos.Core.Common (BlockCount)
import Pos.Wallet.Web.ClientTypes (CWalletAssurance (..))
data AssuranceLevel
= HighAssurance
assuredBlockDepth :: AssuranceLevel -> CWalletAssurance -> BlockCount
assuredBlockDepth HighAssurance CWANormal = 9
assuredBlockDepth HighAssurance CWAStrict = 15
|
71c9fcb7dff4d1045834c0d5016c4151b85712b575a766577125a7218c4d91df | rsnikhil/Forvis_RISCV-ISA-Spec | Main_TandemVerifier.hs | module Main_TandemVerifier (main_TandemVerifier) where
-- ================================================================
-- This is the 'main' function for the use-case where we use the
-- formal spec as a tandem verifier. Here, it receives commands
on stdin and produces responses on stdout .
-- ================================================================
Standard Haskell imports
import System.IO
import Numeric (showHex, readHex)
import System.Exit
-- Project imports
import BitManipulation
import ArchDefs
import ArchState
import RunProgram
-- ================================================================
main_TandemVerifier :: IO ()
main_TandemVerifier = do
Parse commands from stdin
all_input <- getContents
let ws = words all_input
ws' = skip_until_cmd_start ws
cmds = split_cmds [] ws'
-- Create initial architectural state
let initial_PC = 0
addr_byte_list = []
astate = mkArchState RV64 initial_PC addr_byte_list
putStrLn "Initial arch state"
print_ArchState "____" astate
-- Process the commands against the architectural state
astate1 <- process_cmds astate cmds
putStrLn "Final arch state"
print_ArchState "____" astate1
-- ================================================================
Parsing stdin into commands
cmd_start :: String
cmd_start = "$"
-- Establishes invariant for arg of split_cmds
namely : list of strings is either empty , or first string is the command - start word
skip_until_cmd_start :: [String] -> [String]
skip_until_cmd_start [] = []
skip_until_cmd_start (x:xs) | x == cmd_start = (x:xs)
Invariant : ' cmds ' is either empty or starts with the cmd_start word
-- Split list of lines (line = list of words) into commands
where each command begins with cmd_start word .
The word is dropped .
split_cmds :: [[String]] -> [String] -> [[String]]
split_cmds cmds [] = cmds
split_cmds cmds (x:xs) | x == cmd_start = collect_cmd cmds [] xs
| True = error ("split_cmds arg does not start with cmd_start word: " ++ cmd_start)
The word has been consumed by split_cmds
Collect the rest of the commmand ( until next cmd_start word or end of input )
-- and resume splitting rest of input into commands
collect_cmd :: [[String]] -> [String] -> [String] -> [[String]]
collect_cmd cmds cmd [] = cmds ++ [cmd]
collect_cmd cmds cmd (x:xs) | x == cmd_start = split_cmds (cmds ++ [cmd]) ("$":xs)
| True = collect_cmd cmds (cmd ++ [x]) xs
-- ================================================================
-- Processing commands against the architectural state
process_cmds :: ArchState -> [[String]] -> IO ArchState
process_cmds astate [] = return astate
process_cmds astate (cmd:cmds) = do
astate1 <- process_cmd astate cmd
hFlush stdout
process_cmds astate1 cmds
-- ================
-- Process one command
process_cmd :: ArchState -> [String] -> IO ArchState
-- ================
-- Read/Write PC.
process_cmd astate ["read_PC"] = do
putStrLn (" Doing read_PC")
let pc_val = archstate_pc_read astate
putStrLn ("OK " ++ show pc_val)
return astate
process_cmd astate ["write_PC", v_s] = do
putStrLn (" Doing write_PC " ++ v_s)
let v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_pc_write astate v
putStrLn "OK"
return astate1
-- ================
-- Read/Write GPR
process_cmd astate ["read_GPR", r_s] = do
putStrLn (" Doing read_GPR " ++ r_s)
let r = toEnum (fromIntegral (read_hex 5 r_s))
gpr_val = archstate_gpr_read astate r
putStrLn ("OK " ++ show gpr_val)
return astate
process_cmd astate ["write_GPR", r_s, v_s] = do
putStrLn (" Doing write_GPR " ++ r_s ++ " " ++ v_s)
let r = toEnum (fromIntegral (read_hex 5 r_s))
v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_gpr_write astate r v
putStrLn "OK"
return astate1
-- ================
-- Read/Write CSR
process_cmd astate ["read_CSR", csr_addr_s] = do
putStrLn (" Doing read_CSR " ++ csr_addr_s)
let csr_addr = fromIntegral (read_hex 12 csr_addr_s)
csr_val = archstate_csr_read astate csr_addr
putStrLn ("OK " ++ show csr_val)
return astate
process_cmd astate ["write_CSR", csr_addr_s, v_s] = do
putStrLn (" Doing write_CSR " ++ csr_addr_s ++ " " ++ v_s)
let csr_addr = fromIntegral (read_hex 12 csr_addr_s)
v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_csr_write astate csr_addr v
putStrLn "OK"
return astate1
-- ================
-- Read mem bytes
-- TODO: Read all data before responding, check for errors
process_cmd astate ["read_mem_8", n_s, addr_s] = do
putStrLn (" Doing read_mem_8 " ++ n_s ++ " " ++ addr_s)
putStr "OK"
let n = read_hex 32 n_s
addr = read_hex 32 addr_s
read_bytes :: ArchState -> Integer -> IO ()
read_bytes astate j | j == n = return ()
| True = do
let addr_j = fromIntegral (addr + j)
(res_j, astate') = archstate_mem_read8 astate addr_j
case res_j of
LoadResult_Err cause ->
do
putStrLn ("ERROR: read_mem_8 encountered LoadResult_Err: " ++ show cause)
exitWith (ExitFailure 1)
LoadResult_Ok val_j ->
do
putStr (" " ++ (showHex val_j ""))
read_bytes astate' (j+1)
read_bytes astate 0
putStrLn ""
return astate
-- ================
-- Write mem bytes
-- TODO: Check for errors
process_cmd astate ("write_mem_8": addr_s: val_ss) = do
putStr (" Doing write_mem_8 " ++ addr_s)
mapM_ (\val_s -> putStr (" " ++ val_s)) val_ss
putStrLn ""
let addr = read_hex 32 addr_s
write_bytes :: ArchState -> Integer -> [String] -> IO ArchState
write_bytes astate j [] = return astate
write_bytes astate j (val_s:val_ss) = do
let addr_j = addr + j
val_j = read_hex 32 val_s
putStrLn ("(" ++ (showHex addr_j "") ++ "," ++ (showHex val_j "") ++ ")")
astate1 <- archstate_mem_write8 astate (fromIntegral addr_j) (fromIntegral val_j)
write_bytes astate1 (j+1) val_ss
astate1 <- write_bytes astate 0 val_ss
putStrLn "OK"
return astate1
-- ================
-- Read mem 32b words
-- TODO: Read all data before responding, check for errors
process_cmd astate ["read_mem_32", n_s, addr_s] = do
putStrLn (" Doing read_mem_32 " ++ n_s ++ " " ++ addr_s)
putStr "OK"
let n = read_hex 32 n_s
addr = read_hex 32 addr_s
read_words :: ArchState -> Integer -> IO ()
read_words astate j | j == n = return ()
| True = do
let addr_j = fromIntegral (addr + j*4)
(res_j, astate') = archstate_mem_read32 astate addr_j
case res_j of
LoadResult_Err cause ->
do
putStrLn ("ERROR: read_mem_32 encountered LoadResult_Err: " ++ show cause)
exitWith (ExitFailure 1)
LoadResult_Ok val_j ->
do
putStr (" " ++ (showHex val_j ""))
read_words astate' (j+1)
read_words astate 0
putStrLn ""
return astate
-- ================
-- Write mem 32b words
-- TODO: Check for errors
process_cmd astate ("write_mem_32": addr_s: val_ss) = do
putStr (" Doing write_mem_32 " ++ addr_s)
mapM_ (\val_s -> putStr (" " ++ val_s)) val_ss
putStrLn ""
let addr = read_hex 32 addr_s
write_words :: ArchState -> Integer -> [String] -> IO ArchState
write_words astate j [] = return astate
write_words astate j (val_s:val_ss) = do
let addr_j = addr + j*4
val_j = read_hex 32 val_s
putStrLn ("(" ++ (showHex addr_j "") ++ "," ++ (showHex val_j "") ++ ")")
astate1 <- archstate_mem_write32 astate (fromIntegral addr_j) (fromIntegral val_j)
write_words astate1 (j+1) val_ss
astate1 <- write_words astate 0 val_ss
putStrLn "OK"
return astate1
-- ================
Execute N instructions and optionally return Tandem - Verification packets
TODO : return Tandem - Verification packets
process_cmd astate ["exec", n_s, tv_s] = do
putStrLn (" Doing exec " ++ n_s ++ " " ++ tv_s)
let n = read n_s
astate1 <- runProgram (fromIntegral n) astate
let stop_reason = archstate_stop_read astate1
putStrLn ("OK " ++ (show stop_reason))
return astate1
-- ================
-- Read/Write trace verbosity
-- TODO: Clip to legal values
process_cmd astate ["read_verbosity"] = do
putStrLn (" Doing read_verbosity")
let verbosity = archstate_verbosity_read astate
putStrLn ("OK " ++ show (verbosity))
return astate
process_cmd astate ["write_verbosity", v_s] = do
putStrLn (" Doing write_verbosity " ++ v_s)
let verbosity = read_hex 1 v_s
astate1 <- archstate_verbosity_write astate (fromIntegral verbosity)
putStrLn "OK"
return astate1
-- ================
-- Unrecognized command
process_cmd astate cmd = do
putStr " Doing "
mapM_ (\val_s -> putStr (" " ++ val_s)) cmd
putStrLn ""
putStrLn "NOT_OK"
return astate
| null | https://raw.githubusercontent.com/rsnikhil/Forvis_RISCV-ISA-Spec/0c5590a12f4b39644d0497fa6285ad5e33003dfc/ZZ_OLD/v1/src/Main_TandemVerifier.hs | haskell | ================================================================
This is the 'main' function for the use-case where we use the
formal spec as a tandem verifier. Here, it receives commands
================================================================
Project imports
================================================================
Create initial architectural state
Process the commands against the architectural state
================================================================
Establishes invariant for arg of split_cmds
Split list of lines (line = list of words) into commands
and resume splitting rest of input into commands
================================================================
Processing commands against the architectural state
================
Process one command
================
Read/Write PC.
================
Read/Write GPR
================
Read/Write CSR
================
Read mem bytes
TODO: Read all data before responding, check for errors
================
Write mem bytes
TODO: Check for errors
================
Read mem 32b words
TODO: Read all data before responding, check for errors
================
Write mem 32b words
TODO: Check for errors
================
================
Read/Write trace verbosity
TODO: Clip to legal values
================
Unrecognized command | module Main_TandemVerifier (main_TandemVerifier) where
on stdin and produces responses on stdout .
Standard Haskell imports
import System.IO
import Numeric (showHex, readHex)
import System.Exit
import BitManipulation
import ArchDefs
import ArchState
import RunProgram
main_TandemVerifier :: IO ()
main_TandemVerifier = do
Parse commands from stdin
all_input <- getContents
let ws = words all_input
ws' = skip_until_cmd_start ws
cmds = split_cmds [] ws'
let initial_PC = 0
addr_byte_list = []
astate = mkArchState RV64 initial_PC addr_byte_list
putStrLn "Initial arch state"
print_ArchState "____" astate
astate1 <- process_cmds astate cmds
putStrLn "Final arch state"
print_ArchState "____" astate1
Parsing stdin into commands
cmd_start :: String
cmd_start = "$"
namely : list of strings is either empty , or first string is the command - start word
skip_until_cmd_start :: [String] -> [String]
skip_until_cmd_start [] = []
skip_until_cmd_start (x:xs) | x == cmd_start = (x:xs)
Invariant : ' cmds ' is either empty or starts with the cmd_start word
where each command begins with cmd_start word .
The word is dropped .
split_cmds :: [[String]] -> [String] -> [[String]]
split_cmds cmds [] = cmds
split_cmds cmds (x:xs) | x == cmd_start = collect_cmd cmds [] xs
| True = error ("split_cmds arg does not start with cmd_start word: " ++ cmd_start)
The word has been consumed by split_cmds
Collect the rest of the commmand ( until next cmd_start word or end of input )
collect_cmd :: [[String]] -> [String] -> [String] -> [[String]]
collect_cmd cmds cmd [] = cmds ++ [cmd]
collect_cmd cmds cmd (x:xs) | x == cmd_start = split_cmds (cmds ++ [cmd]) ("$":xs)
| True = collect_cmd cmds (cmd ++ [x]) xs
process_cmds :: ArchState -> [[String]] -> IO ArchState
process_cmds astate [] = return astate
process_cmds astate (cmd:cmds) = do
astate1 <- process_cmd astate cmd
hFlush stdout
process_cmds astate1 cmds
process_cmd :: ArchState -> [String] -> IO ArchState
process_cmd astate ["read_PC"] = do
putStrLn (" Doing read_PC")
let pc_val = archstate_pc_read astate
putStrLn ("OK " ++ show pc_val)
return astate
process_cmd astate ["write_PC", v_s] = do
putStrLn (" Doing write_PC " ++ v_s)
let v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_pc_write astate v
putStrLn "OK"
return astate1
process_cmd astate ["read_GPR", r_s] = do
putStrLn (" Doing read_GPR " ++ r_s)
let r = toEnum (fromIntegral (read_hex 5 r_s))
gpr_val = archstate_gpr_read astate r
putStrLn ("OK " ++ show gpr_val)
return astate
process_cmd astate ["write_GPR", r_s, v_s] = do
putStrLn (" Doing write_GPR " ++ r_s ++ " " ++ v_s)
let r = toEnum (fromIntegral (read_hex 5 r_s))
v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_gpr_write astate r v
putStrLn "OK"
return astate1
process_cmd astate ["read_CSR", csr_addr_s] = do
putStrLn (" Doing read_CSR " ++ csr_addr_s)
let csr_addr = fromIntegral (read_hex 12 csr_addr_s)
csr_val = archstate_csr_read astate csr_addr
putStrLn ("OK " ++ show csr_val)
return astate
process_cmd astate ["write_CSR", csr_addr_s, v_s] = do
putStrLn (" Doing write_CSR " ++ csr_addr_s ++ " " ++ v_s)
let csr_addr = fromIntegral (read_hex 12 csr_addr_s)
v = fromIntegral (read_hex 32 v_s)
astate1 <- archstate_csr_write astate csr_addr v
putStrLn "OK"
return astate1
process_cmd astate ["read_mem_8", n_s, addr_s] = do
putStrLn (" Doing read_mem_8 " ++ n_s ++ " " ++ addr_s)
putStr "OK"
let n = read_hex 32 n_s
addr = read_hex 32 addr_s
read_bytes :: ArchState -> Integer -> IO ()
read_bytes astate j | j == n = return ()
| True = do
let addr_j = fromIntegral (addr + j)
(res_j, astate') = archstate_mem_read8 astate addr_j
case res_j of
LoadResult_Err cause ->
do
putStrLn ("ERROR: read_mem_8 encountered LoadResult_Err: " ++ show cause)
exitWith (ExitFailure 1)
LoadResult_Ok val_j ->
do
putStr (" " ++ (showHex val_j ""))
read_bytes astate' (j+1)
read_bytes astate 0
putStrLn ""
return astate
process_cmd astate ("write_mem_8": addr_s: val_ss) = do
putStr (" Doing write_mem_8 " ++ addr_s)
mapM_ (\val_s -> putStr (" " ++ val_s)) val_ss
putStrLn ""
let addr = read_hex 32 addr_s
write_bytes :: ArchState -> Integer -> [String] -> IO ArchState
write_bytes astate j [] = return astate
write_bytes astate j (val_s:val_ss) = do
let addr_j = addr + j
val_j = read_hex 32 val_s
putStrLn ("(" ++ (showHex addr_j "") ++ "," ++ (showHex val_j "") ++ ")")
astate1 <- archstate_mem_write8 astate (fromIntegral addr_j) (fromIntegral val_j)
write_bytes astate1 (j+1) val_ss
astate1 <- write_bytes astate 0 val_ss
putStrLn "OK"
return astate1
process_cmd astate ["read_mem_32", n_s, addr_s] = do
putStrLn (" Doing read_mem_32 " ++ n_s ++ " " ++ addr_s)
putStr "OK"
let n = read_hex 32 n_s
addr = read_hex 32 addr_s
read_words :: ArchState -> Integer -> IO ()
read_words astate j | j == n = return ()
| True = do
let addr_j = fromIntegral (addr + j*4)
(res_j, astate') = archstate_mem_read32 astate addr_j
case res_j of
LoadResult_Err cause ->
do
putStrLn ("ERROR: read_mem_32 encountered LoadResult_Err: " ++ show cause)
exitWith (ExitFailure 1)
LoadResult_Ok val_j ->
do
putStr (" " ++ (showHex val_j ""))
read_words astate' (j+1)
read_words astate 0
putStrLn ""
return astate
process_cmd astate ("write_mem_32": addr_s: val_ss) = do
putStr (" Doing write_mem_32 " ++ addr_s)
mapM_ (\val_s -> putStr (" " ++ val_s)) val_ss
putStrLn ""
let addr = read_hex 32 addr_s
write_words :: ArchState -> Integer -> [String] -> IO ArchState
write_words astate j [] = return astate
write_words astate j (val_s:val_ss) = do
let addr_j = addr + j*4
val_j = read_hex 32 val_s
putStrLn ("(" ++ (showHex addr_j "") ++ "," ++ (showHex val_j "") ++ ")")
astate1 <- archstate_mem_write32 astate (fromIntegral addr_j) (fromIntegral val_j)
write_words astate1 (j+1) val_ss
astate1 <- write_words astate 0 val_ss
putStrLn "OK"
return astate1
Execute N instructions and optionally return Tandem - Verification packets
TODO : return Tandem - Verification packets
process_cmd astate ["exec", n_s, tv_s] = do
putStrLn (" Doing exec " ++ n_s ++ " " ++ tv_s)
let n = read n_s
astate1 <- runProgram (fromIntegral n) astate
let stop_reason = archstate_stop_read astate1
putStrLn ("OK " ++ (show stop_reason))
return astate1
process_cmd astate ["read_verbosity"] = do
putStrLn (" Doing read_verbosity")
let verbosity = archstate_verbosity_read astate
putStrLn ("OK " ++ show (verbosity))
return astate
process_cmd astate ["write_verbosity", v_s] = do
putStrLn (" Doing write_verbosity " ++ v_s)
let verbosity = read_hex 1 v_s
astate1 <- archstate_verbosity_write astate (fromIntegral verbosity)
putStrLn "OK"
return astate1
process_cmd astate cmd = do
putStr " Doing "
mapM_ (\val_s -> putStr (" " ++ val_s)) cmd
putStrLn ""
putStrLn "NOT_OK"
return astate
|
3ec34be878b27191b1dccea797dfa76fbd1b54b1323a26abbbf05048aae125bc | michalkonecny/aern2 | Integration.hs | module AERN2.PPoly.Integration where
import MixedTypesNumPrelude
import Data.List
import AERN2.MP.Ball
import AERN2.MP.Dyadic
import AERN2.Interval
import AERN2.Poly.Cheb
import AERN2.PPoly.Type
import AERN2.RealFun.Operations
integral :: PPoly -> MPBall -> MPBall -> MPBall
integral (PPoly ps dom) l r =
0.5*(domR - domL) *
foldl' (+)
(mpBall 0)
[pieceIntegral i p | (i,p) <- ppoly_pieces f, intersectsLR i]
where
(Interval domL domR) = dom
lI = fromDomToUnitInterval dom (setPrecision (ac2prec $ getAccuracyGuide f) l)
rI = fromDomToUnitInterval dom (setPrecision (ac2prec $ getAccuracyGuide f) r) -- TODO: properly work out required endpoint precision
unit = Interval (dyadic $ -1) (dyadic 1)
f = PPoly ps unit
lrInterval = Interval (mpBall lI) (mpBall rI)
intersectsLR (Interval a b) =
lrInterval `intersects` Interval (mpBall a) (mpBall b)
&& (b == lI) /= Just True
&& (a == rI) /= Just True
pieceIntegral (Interval a b) p =
let
cp = centre p
q = primitive_function cp
a' = max a lI
b' = min b rI
eps = (mpBall $ radius p)*(b' - a')
err = hullMPBall (-eps) eps
in
(evalDf q cp b' - evalDf q cp a') + err -- TODO: eval direct?
instance CanIntegrateOverDom PPoly DyadicInterval where
type IntegralOverDomType PPoly DyadicInterval = MPBall
integrateOverDom f (Interval l r) =
integral f (mpBall l) (mpBall r)
| null | https://raw.githubusercontent.com/michalkonecny/aern2/1c8f12dfcb287bd8e3353802a94865d7c2c121ec/aern2-fun-univariate/src/AERN2/PPoly/Integration.hs | haskell | TODO: properly work out required endpoint precision
TODO: eval direct? | module AERN2.PPoly.Integration where
import MixedTypesNumPrelude
import Data.List
import AERN2.MP.Ball
import AERN2.MP.Dyadic
import AERN2.Interval
import AERN2.Poly.Cheb
import AERN2.PPoly.Type
import AERN2.RealFun.Operations
integral :: PPoly -> MPBall -> MPBall -> MPBall
integral (PPoly ps dom) l r =
0.5*(domR - domL) *
foldl' (+)
(mpBall 0)
[pieceIntegral i p | (i,p) <- ppoly_pieces f, intersectsLR i]
where
(Interval domL domR) = dom
lI = fromDomToUnitInterval dom (setPrecision (ac2prec $ getAccuracyGuide f) l)
unit = Interval (dyadic $ -1) (dyadic 1)
f = PPoly ps unit
lrInterval = Interval (mpBall lI) (mpBall rI)
intersectsLR (Interval a b) =
lrInterval `intersects` Interval (mpBall a) (mpBall b)
&& (b == lI) /= Just True
&& (a == rI) /= Just True
pieceIntegral (Interval a b) p =
let
cp = centre p
q = primitive_function cp
a' = max a lI
b' = min b rI
eps = (mpBall $ radius p)*(b' - a')
err = hullMPBall (-eps) eps
in
instance CanIntegrateOverDom PPoly DyadicInterval where
type IntegralOverDomType PPoly DyadicInterval = MPBall
integrateOverDom f (Interval l r) =
integral f (mpBall l) (mpBall r)
|
ef37cd42dd4b84aec5c6ad1d3314302add249433714e4a644c3ba09cdc91f0c2 | lambe-lang/mitch | transpiler.mli | val run : 'a Mitch_lang.Term.t -> (Mitch_ir.Objcode.t list, string) result
| null | https://raw.githubusercontent.com/lambe-lang/mitch/2c47f6627c3a219c31afd078836e9e7be3e26719/lib/mitch/system/s01_transpiler/transpiler.mli | ocaml | val run : 'a Mitch_lang.Term.t -> (Mitch_ir.Objcode.t list, string) result
| |
8fcf8a562073630523e199cb50c3ffb7fa161ca5a58e3fe3e4bb22a2dcb282fd | rd--/hsc3 | pv_PlayBuf.help.hs | pv_PlayBuf ;
let rec_buf = control kr "rec" 10
fft_buf = localBufId 'α' 1024 1
x = mouseX kr (-1) 1 Linear 0.2
c = X.pv_PlayBuf fft_buf rec_buf x 50 1
in ifft c 1 0
-- pv_PlayBuf
let rec_buf = control kr "rec" 10
fft_buf = localBufId 'β' 1024 1
n = range (-1) 2 (lfNoise2Id 'γ' kr 0.2)
c = X.pv_PlayBuf fft_buf rec_buf n 0 1
in ifft c 1 0
| null | https://raw.githubusercontent.com/rd--/hsc3/60cb422f0e2049f00b7e15076b2667b85ad8f638/Help/Ugen/pv_PlayBuf.help.hs | haskell | pv_PlayBuf | pv_PlayBuf ;
let rec_buf = control kr "rec" 10
fft_buf = localBufId 'α' 1024 1
x = mouseX kr (-1) 1 Linear 0.2
c = X.pv_PlayBuf fft_buf rec_buf x 50 1
in ifft c 1 0
let rec_buf = control kr "rec" 10
fft_buf = localBufId 'β' 1024 1
n = range (-1) 2 (lfNoise2Id 'γ' kr 0.2)
c = X.pv_PlayBuf fft_buf rec_buf n 0 1
in ifft c 1 0
|
538faa597db20e5e24ddd9a594e8c871e5322187677f1f05e5ac3d7e75a24012 | haguenau/wyrd | tmkStruct.ml | module Geom = struct
type t = {
mutable x: int;
mutable y: int;
mutable w: int;
mutable h: int;
}
let null () =
{ x = 0; y = 0; w = 0; h = 0 }
let record (x,y,w,h) g =
g.x <- x;
g.y <- y;
g.w <- w;
g.h <- h
end
module State = struct
type t = bool * bool * bool
(* focus, selected, sensitive *)
let normal : t = (false, false, true)
let to_int (f,s,a) =
if a then (if f then 1 else if s then 2 else 0) else 3
let to_int_max = 3
let set_focus (_,s,a) f = (f,s,a)
let set_selected (f,_,a) s = (f,s,a)
let set_sensitive (f,s,_) a = (f,s,a)
let has_focus (f,_,_) = f
let is_selected (_,s,_) = s
let is_sensitive (_,_,a) = a
end
module Direction = struct
type t =
| Previous
| Next
| Left
| Right
| Up
| Down
end
module Class = struct
type t = {
name : string;
parents : t list
}
let all_classes = Hashtbl.create 127
let create n p =
let c = { name = n; parents = p } in
Hashtbl.add all_classes n c;
c
let get = Hashtbl.find all_classes
let rec is_a p c =
(c == p) ||
(List.exists (is_a p) c.parents)
end
module Toplevel = struct
type t =
| Activate
| Desactivate
| Key of int
type 'w m =
| Give_focus of 'w
end
module Cache = struct
type 'a t = 'a Weak.t * (unit -> 'a)
let create f =
let t = Weak.create 1 in
((t,f) : _ t)
let get ((t,f) : _ t) =
match Weak.get t 0 with
| Some v -> v
| None ->
let v = f () in
Weak.set t 0 (Some v);
v
let clear ((t,_) : _ t) =
Weak.set t 0 None
end
module Once = struct
type t = {
mutable already: bool;
queue: (unit -> unit) Queue.t;
func: (unit -> unit)
}
let create q =
{ already = true; queue = q; func = ignore }
let deliver o () =
()
let add o f =
if not o.already then (
o.already <- true;
Queue.add (deliver o) o.queue
)
end
| null | https://raw.githubusercontent.com/haguenau/wyrd/490ce39ad9ecf36969eb74b9f882f85a1ef14ba3/curses/tmk/tmkStruct.ml | ocaml | focus, selected, sensitive | module Geom = struct
type t = {
mutable x: int;
mutable y: int;
mutable w: int;
mutable h: int;
}
let null () =
{ x = 0; y = 0; w = 0; h = 0 }
let record (x,y,w,h) g =
g.x <- x;
g.y <- y;
g.w <- w;
g.h <- h
end
module State = struct
type t = bool * bool * bool
let normal : t = (false, false, true)
let to_int (f,s,a) =
if a then (if f then 1 else if s then 2 else 0) else 3
let to_int_max = 3
let set_focus (_,s,a) f = (f,s,a)
let set_selected (f,_,a) s = (f,s,a)
let set_sensitive (f,s,_) a = (f,s,a)
let has_focus (f,_,_) = f
let is_selected (_,s,_) = s
let is_sensitive (_,_,a) = a
end
module Direction = struct
type t =
| Previous
| Next
| Left
| Right
| Up
| Down
end
module Class = struct
type t = {
name : string;
parents : t list
}
let all_classes = Hashtbl.create 127
let create n p =
let c = { name = n; parents = p } in
Hashtbl.add all_classes n c;
c
let get = Hashtbl.find all_classes
let rec is_a p c =
(c == p) ||
(List.exists (is_a p) c.parents)
end
module Toplevel = struct
type t =
| Activate
| Desactivate
| Key of int
type 'w m =
| Give_focus of 'w
end
module Cache = struct
type 'a t = 'a Weak.t * (unit -> 'a)
let create f =
let t = Weak.create 1 in
((t,f) : _ t)
let get ((t,f) : _ t) =
match Weak.get t 0 with
| Some v -> v
| None ->
let v = f () in
Weak.set t 0 (Some v);
v
let clear ((t,_) : _ t) =
Weak.set t 0 None
end
module Once = struct
type t = {
mutable already: bool;
queue: (unit -> unit) Queue.t;
func: (unit -> unit)
}
let create q =
{ already = true; queue = q; func = ignore }
let deliver o () =
()
let add o f =
if not o.already then (
o.already <- true;
Queue.add (deliver o) o.queue
)
end
|
39b4f1c1419deb32aaa38bdc673449b69935f54f97ac8b952feec2866130d451 | eponai/sulolive | main.cljs | (ns env.ios.main
(:require [eponai.mobile.ios.core :as core]))
(core/init {:server-address ""})
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/sulo-native/env/prod/env/ios/main.cljs | clojure | (ns env.ios.main
(:require [eponai.mobile.ios.core :as core]))
(core/init {:server-address ""})
| |
c8b42769888b24055964424ead0236bd83df5fd5937f162b396187f096d62d36 | VictorNicollet/Ohm | couchDB.mli | Ohm is © 2012
exception CouchDB_Error
type implementation
class virtual ctx : object ('self)
method couchDB : implementation
method virtual time : float
end
val ctx_decay : #ctx -> ctx
class init_ctx : object
method couchDB : implementation
method time : float
end
type database
module type CONFIG = sig
val host : string
val port : int
val database : string
end
module Parser : sig
type 'a t
end
module type READ_TABLE = sig
val database : database
type id
type elt
val get : id -> (#ctx, elt option) Run.t
val using : id -> (elt -> 'a) -> (#ctx,'a option) Run.t
val parse : id -> 'a Parser.t -> (#ctx,'a option) Run.t
val all_ids : count:int -> id option -> (#ctx,id list * id option) Run.t
end
module type TABLE = sig
include READ_TABLE
val create : elt -> (#ctx,id) Run.t
val ensure : id -> elt Lazy.t -> (#ctx,elt) Run.t
val delete : id -> (#ctx,unit) Run.t
val delete_if : id -> (elt -> bool) -> (#ctx,unit) Run.t
val replace : id -> (elt option -> elt) -> (#ctx,unit) Run.t
val update : id -> (elt -> elt) -> (#ctx,unit) Run.t
val set : id -> elt -> (#ctx,unit) Run.t
module Raw : sig
val put : id -> elt -> (#ctx,[> `ok | `collision]) Run.t
val delete : id -> (#ctx,[> `ok | `collision]) Run.t
val transaction :
id
-> (id -> (#ctx as 'ctx,'a * [`put of elt | `keep | `delete]) Run.t)
-> ('ctx,'a) Run.t
end
type ('ctx,'a) update = elt option -> ('ctx,'a * [`put of elt | `keep | `delete]) Run.t
val transact : id -> (#ctx as 'ctx,'a) update -> ('ctx,'a) Run.t
end
module type DATABASE = TABLE with type id = Id.t and type elt = Json_type.t
module Database :
functor (Config:CONFIG) ->
DATABASE
module type ID = sig
type t
val to_id : t -> Id.t
val of_id : Id.t -> t
end
module Table :
functor (Database:DATABASE) ->
functor (Id:ID) ->
functor (Type:Fmt.FMT) ->
TABLE with type id = Id.t and type elt = Type.t
module ReadTable :
functor (Database:DATABASE) ->
functor (Id:ID) ->
functor (Type:Fmt.READ_FMT) ->
READ_TABLE with type id = Id.t and type elt = Type.t
module type DESIGN = sig
module Database : DATABASE
val name : string
end
module type MAP_DEF = sig
module Key : Fmt.FMT
module Value : Fmt.READ_FMT
module Design : DESIGN
val name : string
val map : string
end
module type DOC_DEF = sig
include MAP_DEF
module Doc : Fmt.READ_FMT
end
module type REDUCE_DEF = sig
include MAP_DEF
val reduce : string
val group : bool
val level : int option
end
module type MAP_VIEW = sig
type map_key
type map_value
type map_iv = <
id : Id.t ;
value : map_value
> ;;
val by_key : map_key -> (#ctx, map_iv list) Run.t
type map_kiv = <
id : Id.t ;
key : map_key ;
value : map_value
>
val query :
?startkey:map_key
-> ?startid:Id.t
-> ?endkey:map_key
-> ?endid:Id.t
-> ?limit:int
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, map_kiv list) Run.t
end
module MapView :
functor(Def:MAP_DEF) ->
MAP_VIEW with type map_key = Def.Key.t
and type map_value = Def.Value.t
module type DOC_VIEW = sig
type doc_key
type doc_value
type doc_doc
type doc_ivd = <
id : Id.t ;
value : doc_value ;
doc : doc_doc
>
val doc : doc_key -> (#ctx, doc_ivd list) Run.t
type doc_kivd = <
key : doc_key ;
id : Id.t ;
value : doc_value ;
doc : doc_doc
>
val doc_query :
?startkey:doc_key
-> ?startid:Id.t
-> ?endkey:doc_key
-> ?endid:Id.t
-> ?limit:int
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, doc_kivd list) Run.t
val doc_query_first :
?startkey:doc_key
-> ?startid:Id.t
-> ?endkey:doc_key
-> ?endid:Id.t
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, doc_kivd option) Run.t
end
module DocView :
functor(Def:DOC_DEF) ->
DOC_VIEW with type doc_key = Def.Key.t
and type doc_value = Def.Value.t
and type doc_doc = Def.Doc.t
module type REDUCE_VIEW = sig
type reduce_key
type reduce_value
val reduce : reduce_key -> (#ctx, reduce_value option) Run.t
val reduce_query :
?startkey:reduce_key
-> ?endkey:reduce_key
-> ?limit:int
-> ?endinclusive:bool
-> unit
-> (#ctx, (reduce_key * reduce_value) list) Run.t
end
module ReduceView :
functor (Def:REDUCE_DEF) ->
REDUCE_VIEW with type reduce_key = Def.Key.t
and type reduce_value = Def.Value.t
module Convenience : sig
module type LOCAL_CONFIG = sig
val db : string
end
module Config : functor(Config:LOCAL_CONFIG) -> CONFIG
module Database : functor(Config:LOCAL_CONFIG) -> DATABASE
module Table :
functor (Config:LOCAL_CONFIG) ->
functor (Id:ID) ->
functor (Type:Fmt.FMT) ->
sig
module Tbl : TABLE with type id = Id.t and type elt = Type.t
module Design : DESIGN
end
end
| null | https://raw.githubusercontent.com/VictorNicollet/Ohm/ca90c162f6c49927c893114491f29d44aaf71feb/src/couchDB.mli | ocaml | Ohm is © 2012
exception CouchDB_Error
type implementation
class virtual ctx : object ('self)
method couchDB : implementation
method virtual time : float
end
val ctx_decay : #ctx -> ctx
class init_ctx : object
method couchDB : implementation
method time : float
end
type database
module type CONFIG = sig
val host : string
val port : int
val database : string
end
module Parser : sig
type 'a t
end
module type READ_TABLE = sig
val database : database
type id
type elt
val get : id -> (#ctx, elt option) Run.t
val using : id -> (elt -> 'a) -> (#ctx,'a option) Run.t
val parse : id -> 'a Parser.t -> (#ctx,'a option) Run.t
val all_ids : count:int -> id option -> (#ctx,id list * id option) Run.t
end
module type TABLE = sig
include READ_TABLE
val create : elt -> (#ctx,id) Run.t
val ensure : id -> elt Lazy.t -> (#ctx,elt) Run.t
val delete : id -> (#ctx,unit) Run.t
val delete_if : id -> (elt -> bool) -> (#ctx,unit) Run.t
val replace : id -> (elt option -> elt) -> (#ctx,unit) Run.t
val update : id -> (elt -> elt) -> (#ctx,unit) Run.t
val set : id -> elt -> (#ctx,unit) Run.t
module Raw : sig
val put : id -> elt -> (#ctx,[> `ok | `collision]) Run.t
val delete : id -> (#ctx,[> `ok | `collision]) Run.t
val transaction :
id
-> (id -> (#ctx as 'ctx,'a * [`put of elt | `keep | `delete]) Run.t)
-> ('ctx,'a) Run.t
end
type ('ctx,'a) update = elt option -> ('ctx,'a * [`put of elt | `keep | `delete]) Run.t
val transact : id -> (#ctx as 'ctx,'a) update -> ('ctx,'a) Run.t
end
module type DATABASE = TABLE with type id = Id.t and type elt = Json_type.t
module Database :
functor (Config:CONFIG) ->
DATABASE
module type ID = sig
type t
val to_id : t -> Id.t
val of_id : Id.t -> t
end
module Table :
functor (Database:DATABASE) ->
functor (Id:ID) ->
functor (Type:Fmt.FMT) ->
TABLE with type id = Id.t and type elt = Type.t
module ReadTable :
functor (Database:DATABASE) ->
functor (Id:ID) ->
functor (Type:Fmt.READ_FMT) ->
READ_TABLE with type id = Id.t and type elt = Type.t
module type DESIGN = sig
module Database : DATABASE
val name : string
end
module type MAP_DEF = sig
module Key : Fmt.FMT
module Value : Fmt.READ_FMT
module Design : DESIGN
val name : string
val map : string
end
module type DOC_DEF = sig
include MAP_DEF
module Doc : Fmt.READ_FMT
end
module type REDUCE_DEF = sig
include MAP_DEF
val reduce : string
val group : bool
val level : int option
end
module type MAP_VIEW = sig
type map_key
type map_value
type map_iv = <
id : Id.t ;
value : map_value
> ;;
val by_key : map_key -> (#ctx, map_iv list) Run.t
type map_kiv = <
id : Id.t ;
key : map_key ;
value : map_value
>
val query :
?startkey:map_key
-> ?startid:Id.t
-> ?endkey:map_key
-> ?endid:Id.t
-> ?limit:int
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, map_kiv list) Run.t
end
module MapView :
functor(Def:MAP_DEF) ->
MAP_VIEW with type map_key = Def.Key.t
and type map_value = Def.Value.t
module type DOC_VIEW = sig
type doc_key
type doc_value
type doc_doc
type doc_ivd = <
id : Id.t ;
value : doc_value ;
doc : doc_doc
>
val doc : doc_key -> (#ctx, doc_ivd list) Run.t
type doc_kivd = <
key : doc_key ;
id : Id.t ;
value : doc_value ;
doc : doc_doc
>
val doc_query :
?startkey:doc_key
-> ?startid:Id.t
-> ?endkey:doc_key
-> ?endid:Id.t
-> ?limit:int
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, doc_kivd list) Run.t
val doc_query_first :
?startkey:doc_key
-> ?startid:Id.t
-> ?endkey:doc_key
-> ?endid:Id.t
-> ?descending:bool
-> ?endinclusive:bool
-> unit
-> (#ctx, doc_kivd option) Run.t
end
module DocView :
functor(Def:DOC_DEF) ->
DOC_VIEW with type doc_key = Def.Key.t
and type doc_value = Def.Value.t
and type doc_doc = Def.Doc.t
module type REDUCE_VIEW = sig
type reduce_key
type reduce_value
val reduce : reduce_key -> (#ctx, reduce_value option) Run.t
val reduce_query :
?startkey:reduce_key
-> ?endkey:reduce_key
-> ?limit:int
-> ?endinclusive:bool
-> unit
-> (#ctx, (reduce_key * reduce_value) list) Run.t
end
module ReduceView :
functor (Def:REDUCE_DEF) ->
REDUCE_VIEW with type reduce_key = Def.Key.t
and type reduce_value = Def.Value.t
module Convenience : sig
module type LOCAL_CONFIG = sig
val db : string
end
module Config : functor(Config:LOCAL_CONFIG) -> CONFIG
module Database : functor(Config:LOCAL_CONFIG) -> DATABASE
module Table :
functor (Config:LOCAL_CONFIG) ->
functor (Id:ID) ->
functor (Type:Fmt.FMT) ->
sig
module Tbl : TABLE with type id = Id.t and type elt = Type.t
module Design : DESIGN
end
end
| |
3d15263d890452a5b0310734632fc941d4d1a2bff5d1a506956e95a056383d6d | helium/miner | miner_poc_SUITE.erl | -module(miner_poc_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("blockchain/include/blockchain_vars.hrl").
-export([
groups/0, all/0, init_per_group/2, end_per_group/2
]).
-export([
init_per_testcase/2,
end_per_testcase/2,
basic_test/1,
basic_test_light_gateway/1,
poc_dist_v1_test/1,
poc_dist_v2_test/1,
poc_dist_v4_test/1,
poc_dist_v4_partitioned_test/1,
poc_dist_v5_test/1,
poc_dist_v5_partitioned_test/1,
poc_dist_v5_partitioned_lying_test/1,
poc_dist_v6_test/1,
poc_dist_v6_partitioned_test/1,
poc_dist_v6_partitioned_lying_test/1,
poc_dist_v7_test/1,
poc_dist_v7_partitioned_test/1,
poc_dist_v7_partitioned_lying_test/1,
poc_dist_v8_test/1,
poc_dist_v8_partitioned_test/1,
poc_dist_v8_partitioned_lying_test/1,
no_status_v8_test/1,
restart_test/1,
poc_dist_v10_test/1,
poc_dist_v10_partitioned_test/1,
poc_dist_v10_partitioned_lying_test/1,
poc_dist_v11_test/1,
poc_dist_v11_cn_test/1,
poc_dist_v11_partitioned_test/1,
poc_dist_v11_partitioned_lying_test/1
]).
-define(SFLOCS, [631210968910285823, 631210968909003263, 631210968912894463, 631210968907949567]).
-define(NYLOCS, [631243922668565503, 631243922671147007, 631243922895615999, 631243922665907711]).
-define(AUSTINLOCS1, [631781084745290239, 631781089167934463, 631781054839691775, 631781050465723903]).
-define(AUSTINLOCS2, [631781452049762303, 631781453390764543, 631781452924144639, 631781452838965759]).
-define(LALOCS, [631236297173835263, 631236292179769855, 631236329165333503, 631236328049271807]).
-define(CNLOCS1, [
631649369216118271, %% spare-tortilla-raccoon
631649369235022335, %% kind-tangerine-octopus
631649369177018879, %% damp-hemp-pangolin
631649369175419391 %% fierce-lipstick-poodle
]).
-define(CNLOCS2, [
631649369213830655, %% raspy-parchment-pike
631649369205533183, %% fresh-gingham-porpoise
innocent - irish - pheasant
631649368709059071 %% glorious-eggshell-finch
]).
%%--------------------------------------------------------------------
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
groups() ->
[ {poc_target_v3,
[],
all()
},
{poc_target_v4,
[],
v11_test_cases()
}].
%%--------------------------------------------------------------------
@public
%% @doc
%% Running tests for this suite
%% @end
%%--------------------------------------------------------------------
all() ->
[
basic_test,
basic_test_light_gateway,
poc_dist_v8_test,
poc_dist_v8_partitioned_test,
poc_dist_v8_partitioned_lying_test,
poc_dist_v10_test,
poc_dist_v10_partitioned_test,
poc_dist_v10_partitioned_lying_test,
poc_dist_v11_test,
poc_dist_v11_cn_test,
poc_dist_v11_partitioned_test,
poc_dist_v11_partitioned_lying_test,
uncomment when poc placement enforcement starts .
%% no_status_v8_test,
restart_test].
v11_test_cases() ->
[
basic_test,
basic_test_light_gateway,
poc_dist_v11_test,
poc_dist_v11_cn_test,
poc_dist_v11_partitioned_test,
poc_dist_v11_partitioned_lying_test,
uncomment when poc placement enforcement starts .
%% no_status_v8_test,
restart_test].
init_per_group(poc_target_v3, Config) ->
[
{poc_targeting_vars, #{}}
| Config
];
init_per_group(poc_target_v4, Config) ->
[
{poc_targeting_vars, #{h3dex_gc_width => 10,
poc_targeting_version => 4,
poc_target_pool_size => 2,
poc_hexing_type => hex_h3dex,
hip17_interactivity_blocks => 20}}
| Config
].
init_per_testcase(basic_test = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(basic_test_light_gateway = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(restart_test = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(TestCase, Config0) ->
miner_ct_utils:init_per_testcase(?MODULE, TestCase, Config0).
end_per_testcase(TestCase, Config) when TestCase == basic_test;
TestCase == basic_test_light_gateway ->
catch gen_statem:stop(miner_poc_statem),
case ?config(tc_status, Config) of
ok ->
%% test passed, we can cleanup
BaseDir = ?config(base_dir, Config),
os:cmd("rm -rf "++ BaseDir),
ok;
_ ->
%% leave results alone for analysis
ok
end;
end_per_testcase(restart_test, Config) ->
catch gen_statem:stop(miner_poc_statem),
case ?config(tc_status, Config) of
ok ->
%% test passed, we can cleanup
BaseDir = ?config(base_dir, Config),
os:cmd("rm -rf "++BaseDir),
ok;
_ ->
%% leave results alone for analysis
ok
end;
end_per_testcase(TestCase, Config) ->
gen_server:stop(miner_fake_radio_backplane),
miner_ct_utils:end_per_testcase(TestCase, Config).
end_per_group(_, _Config) ->
ok.
%%--------------------------------------------------------------------
%% TEST CASES
%%--------------------------------------------------------------------
poc_dist_v1_test(Config) ->
ct:pal("Config ~p", [Config]),
%% Dont think it matters if v1 takes all the other common vars
%% Just don't set any poc_version here
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v1_test, Config, CommonPOCVars).
poc_dist_v2_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v2_test, Config, maps:put(?poc_version, 2, CommonPOCVars)).
poc_dist_v4_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v4_test, Config, maps:put(?poc_version, 4, CommonPOCVars)).
poc_dist_v4_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v4_partitioned_test, Config, maps:put(?poc_version, 4, CommonPOCVars)).
poc_dist_v5_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v5_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_partitioned_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v5_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_partitioned_lying_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v6_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v6_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_partitioned_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v6_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_partitioned_lying_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v7_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v7_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_partitioned_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v7_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_partitioned_lying_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v8_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v8_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_partitioned_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v8_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_partitioned_lying_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
no_status_v8_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_test, Config, maps:merge(CommonPOCVars, ExtraVars), false).
poc_dist_v10_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v10_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_partitioned_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v10_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_partitioned_lying_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v11_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_cn_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_cn_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_partitioned_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_partitioned_lying_test, Config, maps:merge(CombinedVars, ExtraVars)).
basic_test(Config) ->
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
% Now add genesis
% Generate fake blockchains (just the keys)
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
% Create genesis block
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
VarsTx = miner_ct_utils:make_vars(VarsKeys, #{?poc_challenge_interval => 20}),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
% All these point are in a line one after the other (except last)
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
% Assert the Gateways location
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
{ok, Statem} = miner_poc_statem:start_link(#{delay => 5}),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
?assertEqual(Chain, erlang:element(3, erlang:element(2, sys:get_state(Statem)))), % Blockchain is = to Chain
State is requesting
% Mock submit_txn to actually add the block
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
meck:new(miner_onion, [passthrough]),
meck:expect(miner_onion, dial_framed_stream, fun(_, _, _) ->
{ok, self()}
end),
meck:new(miner_onion_handler, [passthrough]),
meck:expect(miner_onion_handler, send, fun(Stream, _Onion) ->
?assertEqual(self(), Stream)
end),
meck:new(blockchain_txn_poc_receipts_v1, [passthrough]),
meck:expect(blockchain_txn_poc_receipts_v1, is_valid, fun(_, _) -> ok end),
?assertEqual(30, erlang:element(15, erlang:element(2, sys:get_state(Statem)))),
% Add some block to start process
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process + 1 block with txn
true = miner_ct_utils:wait_until(fun() -> {ok, 5} =:= blockchain:height(Chain) end),
% Moving threw targeting and challenging
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{receiving, _} -> true;
_Other -> false
end
end),
Send 7 receipts and add blocks to pass timeout
?assertEqual(0, maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem))))),
Challengees = erlang:element(9, erlang:element(2, sys:get_state(Statem))),
ok = send_receipts(LatLongs, Challengees),
timer:sleep(100),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem)))),
?assert(maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem)))) > 0), % Get responses
% Passing receiving_timeout
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 20)
),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem))),
?assertEqual(0, erlang:element(12, erlang:element(2, sys:get_state(Statem)))), % Get receiving_timeout
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{waiting, _} -> true;
{submitting, _} -> true;
{requesting, _} -> true;
{_Other, _} -> false
end
end),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
?assert(meck:validate(miner_onion)),
meck:unload(miner_onion),
?assert(meck:validate(miner_onion_handler)),
meck:unload(miner_onion_handler),
?assert(meck:validate(blockchain_txn_poc_receipts_v1)),
meck:unload(blockchain_txn_poc_receipts_v1),
ok = gen_statem:stop(Statem),
ok.
basic_test_light_gateway(Config) ->
%% same test as above but this time we change the local gateway from full mode to light mode
%% this is done before we start the POC statem
when the POC statem is started it should default to requesting
and remain in requesting even after it has exceeded the poc interval
%% light gateways will never move out of requesting state
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
% Now add genesis
% Generate fake blockchains (just the keys)
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
% Create genesis block
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
ExtraVars = #{?poc_challenge_interval => 20},
ct:pal("extra vars: ~p", [ExtraVars]),
VarsTx = miner_ct_utils:make_vars(VarsKeys, ExtraVars),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
% All these point are in a line one after the other (except last)
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
% Assert the Gateways location
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
Chain = blockchain_worker:blockchain(),
Ledger = blockchain:ledger(Chain),
%% update the local gateway to light mode
we do this before we start the poc statem
%% thereafter it should never move out of requesting state
Ledger1 = blockchain_ledger_v1:new_context(Ledger),
{ok, GWInfo} = blockchain_gateway_cache:get(Address, Ledger1),
GWInfo2 = blockchain_ledger_gateway_v2:mode(light, GWInfo),
blockchain_ledger_v1:update_gateway(GWInfo2, Address, Ledger1),
ok = blockchain_ledger_v1:commit_context(Ledger1),
{ok, Statem} = miner_poc_statem:start_link(#{delay => 5}),
%% assert default states
ct:pal("got state ~p", [sys:get_state(Statem)]),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
State is requesting
% Mock submit_txn to add blocks
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
% Add some block to start process
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process ( no POC req txn will have been submitted by the statem )
true = miner_ct_utils:wait_until(fun() -> ct:pal("height: ~p", [blockchain:height(Chain)]), {ok, 4} =:= blockchain:height(Chain) end),
% confirm we DO NOT move from receiving state
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{requesting, _} -> true;
_Other -> ct:pal("got other state ~p", [_Other]), false
end
end),
?assertEqual(requesting, erlang:element(6, erlang:element(2, sys:get_state(Statem)))),
% Passing poc interval
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 25)
),
% confirm we remain in requesting state
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
ok = gen_statem:stop(Statem),
ok.
restart_test(Config) ->
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
% Now add genesis
% Generate fake blockchains (just the keys)
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
% Create genesis block
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
VarsTx = miner_ct_utils:make_vars(VarsKeys, #{?poc_challenge_interval => 20}),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
% All these point are in a line one after the other (except last)
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
% Assert the Gateways location
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
{ok, Statem0} = miner_poc_statem:start_link(#{delay => 5,
base_dir => BaseDir}),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem0))),
?assertEqual(Chain, erlang:element(3, erlang:element(2, sys:get_state(Statem0)))), % Blockchain is = to Chain
State is requesting
% Mock submit_txn to actually add the block
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
meck:new(miner_onion, [passthrough]),
meck:expect(miner_onion, dial_framed_stream, fun(_, _, _) ->
{ok, self()}
end),
meck:new(miner_onion_handler, [passthrough]),
meck:expect(miner_onion_handler, send, fun(Stream, _Onion) ->
?assertEqual(self(), Stream)
end),
meck:new(blockchain_txn_poc_receipts_v1, [passthrough]),
meck:expect(blockchain_txn_poc_receipts_v1, is_valid, fun(_, _) -> ok end),
?assertEqual(30, erlang:element(15, erlang:element(2, sys:get_state(Statem0)))),
% Add some block to start process
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process + 1 block with txn
true = miner_ct_utils:wait_until(fun() -> {ok, 5} =:= blockchain:height(Chain) end),
%% Moving through targeting and challenging
true = miner_ct_utils:wait_until(
fun() ->
case sys:get_state(Statem0) of
{receiving, _} -> true;
_Other ->
ct:pal("other state ~p", [_Other]),
false
end
end),
% KILLING STATEM AND RESTARTING
ok = gen_statem:stop(Statem0),
{ok, Statem1} = miner_poc_statem:start_link(#{delay => 5,
base_dir => BaseDir}),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem1)))),
Send 7 receipts and add blocks to pass timeout
?assertEqual(0, maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem1))))),
Challengees = erlang:element(9, erlang:element(2, sys:get_state(Statem1))),
ok = send_receipts(LatLongs, Challengees),
timer:sleep(100),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem1)))),
?assert(maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem1)))) > 0), % Get responses
% Passing receiving_timeout
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 10)
),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
?assertEqual(0, erlang:element(12, erlang:element(2, sys:get_state(Statem1)))), % Get receiving_timeout
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
true = miner_ct_utils:wait_until(
fun() ->
case sys:get_state(Statem1) of
{waiting, _} -> true;
{submitting, _} -> true;
{requesting, _} -> true;
{_Other, _} ->
ct:pal("other state ~p", [_Other]),
false
end
end),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
?assert(meck:validate(miner_onion)),
meck:unload(miner_onion),
?assert(meck:validate(miner_onion_handler)),
meck:unload(miner_onion_handler),
?assert(meck:validate(blockchain_txn_poc_receipts_v1)),
meck:unload(blockchain_txn_poc_receipts_v1),
ok = gen_statem:stop(Statem1),
ok.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
send_receipts(LatLongs, Challengees) ->
lists:foreach(
fun({_LatLong, {PrivKey, PubKey}}) ->
Address = libp2p_crypto:pubkey_to_bin(PubKey),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
{Mega, Sec, Micro} = os:timestamp(),
Timestamp = Mega * 1000000 * 1000000 + Sec * 1000000 + Micro,
case lists:keyfind(Address, 1, Challengees) of
{Address, LayerData} ->
Receipt = blockchain_poc_receipt_v1:new(Address, Timestamp, 0, LayerData, radio),
SignedReceipt = blockchain_poc_receipt_v1:sign(Receipt, SigFun),
miner_poc_statem:receipt(make_ref(), SignedReceipt, "/ip4/127.0.0.1/tcp/1234");
_ ->
ok
end
end,
LatLongs
).
new_random_key(Curve) ->
#{secret := PrivKey, public := PubKey} = libp2p_crypto:generate_keys(Curve),
{PrivKey, PubKey}.
run_dist_with_params(TestCase, Config, VarMap) ->
run_dist_with_params(TestCase, Config, VarMap, true).
run_dist_with_params(TestCase, Config, VarMap, Status) ->
ok = setup_dist_test(TestCase, Config, VarMap, Status),
%% Execute the test
ok = exec_dist_test(TestCase, Config, VarMap, Status),
%% show the final receipt counter
Miners = ?config(miners, Config),
FinalReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("FinalReceiptMap: ~p", [FinalReceiptMap]),
ct:pal("FinalReceiptCounter: ~p", [receipt_counter(FinalReceiptMap)]),
%% The test endeth here
ok.
exec_dist_test(poc_dist_v11_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v11_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v10_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v10_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v8_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v8_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v7_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v7_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v6_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v6_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v5_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v5_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v11_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v11_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v10_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v10_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v8_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v8_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v7_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v7_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v6_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v6_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v5_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v5_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v4_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v4_partitioned_test, Config, VarMap);
exec_dist_test(TestCase, Config, VarMap, Status) ->
Miners = ?config(miners, Config),
%% Print scores before we begin the test
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
%% check that every miner has issued a challenge
case Status of
%% expect failure and exit
false ->
?assertEqual(false, check_all_miners_can_challenge(Miners));
true ->
?assert(check_all_miners_can_challenge(Miners)),
%% Check that the receipts are growing ONLY for poc_v4
More specifically , first receipt can have a single element path ( beacon )
but subsequent ones must have more than one element in the path , reason being
the first receipt would have added witnesses and we should be able to make
%% a next hop.
case maps:get(?poc_version, VarMap, 1) of
V when V >= 10 ->
There are no paths in v11 or v10 for that matter , so we 'll consolidate
the checks for both poc - v10 and poc - v11 here
true = miner_ct_utils:wait_until(
fun() ->
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C1 = check_multiple_requests(Miners),
%% Check if we have some receipts
C2 = maps:size(challenger_receipts_map(find_receipts(Miners))) > 0,
Check there are some poc rewards
RewardsMD = get_rewards_md(Config),
ct:pal("RewardsMD: ~p", [RewardsMD]),
C3 = check_non_empty_poc_rewards(take_poc_challengee_and_witness_rewards(RewardsMD)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3
end,
300, 1000),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok;
V when V > 3 ->
true = miner_ct_utils:wait_until(
fun() ->
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C1 = check_multiple_requests(Miners),
%% Now we can check whether we have path growth
C2 = (check_eventual_path_growth(TestCase, Miners) orelse
check_subsequent_path_growth(challenger_receipts_map(find_receipts(Miners)))),
Check there are some poc rewards
C3 = check_poc_rewards(get_rewards(Config)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3
end,
120, 1000),
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok;
_ ->
%% By this point, we have ensured that every miner
%% has a valid request at least once, we just check
%% that we have N (length(Miners)) receipts.
?assert(check_atleast_k_receipts(Miners, length(Miners))),
ok
end
end,
ok.
setup_dist_test(TestCase, Config, VarMap, Status) ->
Miners = ?config(miners, Config),
{_, Locations} = lists:unzip(initialize_chain(Miners, TestCase, Config, VarMap)),
GenesisBlock = miner_ct_utils:get_genesis_block(Miners, Config),
ok = miner_ct_utils:load_genesis_block(GenesisBlock, Miners, Config),
the radio ports used to be fetched from as part of init_per_testcase
%% but the port is only opened now after a chain is up and been consulted to
%% determine if validators are running POCs
So now we have wait until the chain is up and has opened the port
true = miner_ct_utils:wait_for_lora_port(Miners, miner_lora, 30),
RadioPorts = lists:map(
fun(Miner) ->
{ok, RandomPort} = ct_rpc:call(Miner, miner_lora, port, []),
ct:pal("~p is listening for packet forwarder on ~p", [Miner, RandomPort]),
RandomPort
end,
Miners),
RadioPorts = [ P || { _ , { _ TP , P , _ JRPCP } } < - MinersAndPorts ] ,
{ok, _FakeRadioPid} = miner_fake_radio_backplane:start_link(maps:get(?poc_version, VarMap), 45000,
lists:zip(RadioPorts, Locations), Status),
miner_fake_radio_backplane ! go,
wait till height 10
ok = miner_ct_utils:wait_for_gte(height, Miners, 10, all, 30),
ok.
gen_locations(poc_dist_v11_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v10_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v8_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v7_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v6_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v5_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v11_partitioned_test, _, _) ->
%% These are taken from the ledger
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v10_partitioned_test, _, _) ->
%% These are taken from the ledger
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v8_partitioned_test, _, _) ->
%% These are taken from the ledger
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v7_partitioned_test, _, _) ->
%% These are taken from the ledger
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v6_partitioned_test, _, _) ->
%% These are taken from the ledger
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v5_partitioned_test, _, _) ->
%% These are taken from the ledger
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v4_partitioned_test, _, _) ->
%% These are taken from the ledger
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v8_test, _, _) ->
%% Actual locations are the same as the claimed locations for the dist test
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(poc_dist_v11_cn_test, _, _) ->
%% Actual locations are the same as the claimed locations for the dist test
{?CNLOCS1 ++ ?CNLOCS2, ?CNLOCS1 ++ ?CNLOCS2};
gen_locations(poc_dist_v11_test, _, _) ->
%% Actual locations are the same as the claimed locations for the dist test
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(poc_dist_v10_test, _, _) ->
%% Actual locations are the same as the claimed locations for the dist test
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(_TestCase, Addresses, VarMap) ->
LocationJitter = case maps:get(?poc_version, VarMap, 1) of
V when V > 3 ->
100;
_ ->
1000000
end,
Locs = lists:foldl(
fun(I, Acc) ->
[h3:from_geo({37.780586, -122.469470 + I/LocationJitter}, 13)|Acc]
end,
[],
lists:seq(1, length(Addresses))
),
{Locs, Locs}.
initialize_chain(Miners, TestCase, Config, VarMap) ->
Addresses = ?config(addresses, Config),
N = ?config(num_consensus_members, Config),
Curve = ?config(dkg_curve, Config),
Keys = libp2p_crypto:generate_keys(ecc_compact),
InitialVars = miner_ct_utils:make_vars(Keys, VarMap),
InitialPaymentTransactions = [blockchain_txn_coinbase_v1:new(Addr, 5000) || Addr <- Addresses],
{ActualLocations, ClaimedLocations} = gen_locations(TestCase, Addresses, VarMap),
AddressesWithLocations = lists:zip(Addresses, ActualLocations),
AddressesWithClaimedLocations = lists:zip(Addresses, ClaimedLocations),
InitialGenGatewayTxns = [blockchain_txn_gen_gateway_v1:new(Addr, Addr, Loc, 0) || {Addr, Loc} <- AddressesWithLocations],
InitialTransactions = InitialVars ++ InitialPaymentTransactions ++ InitialGenGatewayTxns,
{ok, DKGCompletedNodes} = miner_ct_utils:initial_dkg(Miners, InitialTransactions, Addresses, N, Curve),
%% integrate genesis block
_GenesisLoadResults = miner_ct_utils:integrate_genesis_block(hd(DKGCompletedNodes), Miners -- DKGCompletedNodes),
AddressesWithClaimedLocations.
find_requests(Miners) ->
[M | _] = Miners,
Chain = ct_rpc:call(M, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(M, blockchain, blocks, [Chain]),
lists:flatten(lists:foldl(fun({_Hash, Block}, Acc) ->
Txns = blockchain_block:transactions(Block),
Requests = lists:filter(fun(T) ->
blockchain_txn:type(T) == blockchain_txn_poc_request_v1
end,
Txns),
[Requests | Acc]
end,
[],
maps:to_list(Blocks))).
find_receipts(Miners) ->
[M | _] = Miners,
Chain = ct_rpc:call(M, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(M, blockchain, blocks, [Chain]),
lists:flatten(lists:foldl(fun({_Hash, Block}, Acc) ->
Txns = blockchain_block:transactions(Block),
Height = blockchain_block:height(Block),
Receipts = lists:filter(fun(T) ->
blockchain_txn:type(T) == blockchain_txn_poc_receipts_v1
end,
Txns),
TaggedReceipts = lists:map(fun(R) ->
{Height, R}
end,
Receipts),
TaggedReceipts ++ Acc
end,
[],
maps:to_list(Blocks))).
challenger_receipts_map(Receipts) ->
ReceiptMap = lists:foldl(
fun({_Height, Receipt}=R, Acc) ->
{ok, Challenger} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(blockchain_txn_poc_receipts_v1:challenger(Receipt))),
case maps:get(Challenger, Acc, undefined) of
undefined ->
maps:put(Challenger, [R], Acc);
List ->
maps:put(Challenger, lists:keysort(1, [R | List]), Acc)
end
end,
#{},
Receipts),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
ReceiptMap.
request_counter(TotalRequests) ->
lists:foldl(fun(Req, Acc) ->
{ok, Challenger} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(blockchain_txn_poc_request_v1:challenger(Req))),
case maps:get(Challenger, Acc, undefined) of
undefined ->
maps:put(Challenger, 1, Acc);
N when N > 0 ->
maps:put(Challenger, N + 1, Acc);
_ ->
maps:put(Challenger, 1, Acc)
end
end,
#{},
TotalRequests).
check_all_miners_can_challenge(Miners) ->
N = length(Miners),
RequestCounter = request_counter(find_requests(Miners)),
ct:pal("RequestCounter: ~p~n", [RequestCounter]),
N == maps:size(RequestCounter).
check_eventual_path_growth(TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
check_growing_paths(TestCase, ReceiptMap, active_gateways(Miners), false).
check_partitioned_path_growth(_TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
check_subsequent_path_growth(ReceiptMap).
check_partitioned_lying_path_growth(_TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
not check_subsequent_path_growth(ReceiptMap).
check_growing_paths(TestCase, ReceiptMap, ActiveGateways, PartitionFlag) ->
Results = lists:foldl(fun({_Challenger, TaggedReceipts}, Acc) ->
[{_, FirstReceipt} | Rest] = TaggedReceipts,
It 's possible that the first receipt itself has multiple elements path , I think
RemainingGrowthCond = case PartitionFlag of
true ->
check_remaining_partitioned_grow(TestCase, Rest, ActiveGateways);
false ->
check_remaining_grow(Rest)
end,
Res = length(blockchain_txn_poc_receipts_v1:path(FirstReceipt)) >= 1 andalso RemainingGrowthCond,
[Res | Acc]
end,
[],
maps:to_list(ReceiptMap)),
lists:all(fun(R) -> R == true end, Results) andalso maps:size(ReceiptMap) == maps:size(ActiveGateways).
check_remaining_grow([]) ->
false;
check_remaining_grow(TaggedReceipts) ->
Res = lists:map(fun({_, Receipt}) ->
length(blockchain_txn_poc_receipts_v1:path(Receipt)) > 1
end,
TaggedReceipts),
%% It's possible that even some of the remaining receipts have single path
%% but there should eventually be some which have multi element paths
lists:any(fun(R) -> R == true end, Res).
check_remaining_partitioned_grow(_TestCase, [], _ActiveGateways) ->
false;
check_remaining_partitioned_grow(TestCase, TaggedReceipts, ActiveGateways) ->
Res = lists:map(fun({_, Receipt}) ->
Path = blockchain_txn_poc_receipts_v1:path(Receipt),
PathLength = length(Path),
ct:pal("PathLength: ~p", [PathLength]),
PathLength > 1 andalso PathLength =< 4 andalso check_partitions(TestCase, Path, ActiveGateways)
end,
TaggedReceipts),
ct:pal("Res: ~p", [Res]),
%% It's possible that even some of the remaining receipts have single path
%% but there should eventually be some which have multi element paths
lists:any(fun(R) -> R == true end, Res).
check_partitions(TestCase, Path, ActiveGateways) ->
PathLocs = sets:from_list(lists:foldl(fun(Element, Acc) ->
Challengee = blockchain_poc_path_element_v1:challengee(Element),
ChallengeeGw = maps:get(Challengee, ActiveGateways),
ChallengeeLoc = blockchain_ledger_gateway_v2:location(ChallengeeGw),
[ChallengeeLoc | Acc]
end,
[],
Path)),
{LocSet1, LocSet2} = location_sets(TestCase),
case sets:is_subset(PathLocs, LocSet1) of
true ->
%% Path is in LocSet1, check that it's not in LocSet2
sets:is_disjoint(PathLocs, LocSet2);
false ->
Path is not in LocSet1 , check that it 's only in LocSet2
sets:is_subset(PathLocs, LocSet2) andalso sets:is_disjoint(PathLocs, LocSet1)
end.
check_multiple_requests(Miners) ->
RequestCounter = request_counter(find_requests(Miners)),
ct:pal("RequestCounter: ~p", [RequestCounter]),
lists:sum(maps:values(RequestCounter)) > length(Miners).
check_atleast_k_receipts(Miners, K) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
TotalReceipts = lists:foldl(fun(ReceiptList, Acc) ->
length(ReceiptList) + Acc
end,
0,
maps:values(ReceiptMap)),
ct:pal("TotalReceipts: ~p", [TotalReceipts]),
TotalReceipts >= K.
receipt_counter(ReceiptMap) ->
lists:foldl(fun({Name, ReceiptList}, Acc) ->
Counts = lists:map(fun({Height, ReceiptTxn}) ->
{Height, length(blockchain_txn_poc_receipts_v1:path(ReceiptTxn))}
end,
ReceiptList),
maps:put(Name, Counts, Acc)
end,
#{},
maps:to_list(ReceiptMap)).
active_gateways([Miner | _]=_Miners) ->
%% Get active gateways to get the locations
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Ledger = ct_rpc:call(Miner, blockchain, ledger, [Chain]),
ct_rpc:call(Miner, blockchain_ledger_v1, active_gateways, [Ledger]).
gateway_scores(Config) ->
[Miner | _] = ?config(miners, Config),
Addresses = ?config(addresses, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Ledger = ct_rpc:call(Miner, blockchain, ledger, [Chain]),
lists:foldl(fun(Address, Acc) ->
{ok, S} = ct_rpc:call(Miner, blockchain_ledger_v1, gateway_score, [Address, Ledger]),
{ok, Name} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(Address)),
maps:put(Name, S, Acc)
end,
#{},
Addresses).
common_poc_vars(Config) ->
N = ?config(num_consensus_members, Config),
BlockTime = ?config(block_time, Config),
Interval = ?config(election_interval, Config),
BatchSize = ?config(batch_size, Config),
Curve = ?config(dkg_curve, Config),
Do n't put the poc version here
%% Add it to the map in the tests above
#{?block_time => BlockTime,
?election_interval => Interval,
?num_consensus_members => N,
?batch_size => BatchSize,
?dkg_curve => Curve,
TODO validators
?poc_challenge_interval => 15,
?poc_v4_exclusion_cells => 10,
?poc_v4_parent_res => 11,
?poc_v4_prob_bad_rssi => 0.01,
?poc_v4_prob_count_wt => 0.3,
?poc_v4_prob_good_rssi => 1.0,
?poc_v4_prob_no_rssi => 0.5,
?poc_v4_prob_rssi_wt => 0.3,
?poc_v4_prob_time_wt => 0.3,
?poc_v4_randomness_wt => 0.1,
?poc_v4_target_challenge_age => 300,
?poc_v4_target_exclusion_cells => 6000,
?poc_v4_target_prob_edge_wt => 0.2,
?poc_v4_target_prob_score_wt => 0.8,
?poc_v4_target_score_curve => 5,
?poc_target_hex_parent_res => 5,
?poc_v5_target_prob_randomness_wt => 0.0,
?poc_witness_consideration_limit => 20}.
do_common_partition_checks(TestCase, Config, VarMap) ->
Miners = ?config(miners, Config),
%% Print scores before we begin the test
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
true = miner_ct_utils:wait_until(
fun() ->
case maps:get(poc_version, VarMap, 1) of
V when V >= 10 ->
There is no path to check , so do both poc - v10 and poc - v11 checks here
%% Check that every miner has issued a challenge
C1 = check_all_miners_can_challenge(Miners),
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C2 = check_multiple_requests(Miners),
Check there are some poc rewards
RewardsMD = get_rewards_md(Config),
ct:pal("RewardsMD: ~p", [RewardsMD]),
C3 = check_non_empty_poc_rewards(take_poc_challengee_and_witness_rewards(RewardsMD)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3;
_ ->
%% Check that every miner has issued a challenge
C1 = check_all_miners_can_challenge(Miners),
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C2 = check_multiple_requests(Miners),
Since we have two static location partitioned networks , we
%% can assert that the subsequent path lengths must never be greater
%% than 4.
C3 = check_partitioned_path_growth(TestCase, Miners),
Check there are some poc rewards
C4 = check_poc_rewards(get_rewards(Config)),
ct:pal("all can challenge: ~p, multiple requests: ~p, paths grow: ~p, rewards given: ~p", [C1, C2, C3, C4]),
C1 andalso C2 andalso C3 andalso C4
end
end, 60, 5000),
%% Print scores after execution
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok.
balances(Config) ->
[Miner | _] = ?config(miners, Config),
Addresses = ?config(addresses, Config),
[miner_ct_utils:get_balance(Miner, Addr) || Addr <- Addresses].
take_poc_challengee_and_witness_rewards(RewardsMD) ->
only take poc_challengee and poc_witness rewards
POCRewards = lists:foldl(
fun({Ht, MDMap}, Acc) ->
[{Ht, maps:with([poc_challengee, poc_witness], MDMap)} | Acc]
end,
[],
RewardsMD),
ct:pal("POCRewards: ~p", [POCRewards]),
POCRewards.
check_non_empty_poc_rewards(POCRewards) ->
lists:any(
fun({_Ht, #{poc_challengee := R1, poc_witness := R2}}) ->
maps:size(R1) > 0 andalso maps:size(R2) > 0
end,
POCRewards).
get_rewards_md(Config) ->
%% NOTE: It's possible that the calculations below may blow up
%% since we are folding the entire chain here and some subsequent
%% ledger_at call in rewards_metadata blows up. Investigate
[Miner | _] = ?config(miners, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
{ok, Head} = ct_rpc:call(Miner, blockchain, head_block, [Chain]),
Filter = fun(T) -> blockchain_txn:type(T) == blockchain_txn_rewards_v2 end,
Fun = fun(Block, Acc) ->
case blockchain_utils:find_txn(Block, Filter) of
[T] ->
Start = blockchain_txn_rewards_v2:start_epoch(T),
End = blockchain_txn_rewards_v2:end_epoch(T),
MDRes = ct_rpc:call(Miner, blockchain_txn_rewards_v2, calculate_rewards_metadata, [
Start,
End,
Chain
]),
case MDRes of
{ok, MD} ->
[{blockchain_block:height(Block), MD} | Acc];
_ ->
Acc
end;
_ ->
Acc
end
end,
Res = ct_rpc:call(Miner, blockchain, fold_chain, [Fun, [], Head, Chain]),
Res.
get_rewards(Config) ->
%% default to rewards_v1
get_rewards(Config, blockchain_txn_rewards_v1).
get_rewards(Config, RewardType) ->
[Miner | _] = ?config(miners, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(Miner, blockchain, blocks, [Chain]),
maps:fold(fun(_, Block, Acc) ->
case blockchain_block:transactions(Block) of
[] ->
Acc;
Ts ->
Rewards = lists:filter(fun(T) ->
blockchain_txn:type(T) == RewardType
end,
Ts),
lists:flatten([Rewards | Acc])
end
end,
[],
Blocks).
check_poc_rewards(RewardsTxns) ->
%% Get all rewards types
RewardTypes = lists:foldl(fun(RewardTxn, Acc) ->
Types = [blockchain_txn_reward_v1:type(R) || R <- blockchain_txn_rewards_v1:rewards(RewardTxn)],
lists:flatten([Types | Acc])
end,
[],
RewardsTxns),
lists:any(fun(T) ->
T == poc_challengees orelse T == poc_witnesses
end,
RewardTypes).
do_common_partition_lying_checks(TestCase, Config, VarMap) ->
Miners = ?config(miners, Config),
%% Print scores before we begin the test
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
%% Print scores before we begin the test
InitialBalances = balances(Config),
ct:pal("InitialBalances: ~p", [InitialBalances]),
true = miner_ct_utils:wait_until(
fun() ->
case maps:get(poc_version, VarMap, 1) of
V when V > 10 ->
%% Check that every miner has issued a challenge
C1 = check_all_miners_can_challenge(Miners),
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C2 = check_multiple_requests(Miners),
%% TODO: What to check when the partitioned nodes are lying about their locations
C1 andalso C2;
_ ->
%% Check that every miner has issued a challenge
C1 = check_all_miners_can_challenge(Miners),
%% Check that we have at least more than one request
If we have only one request , there 's no guarantee
%% that the paths would eventually grow
C2 = check_multiple_requests(Miners),
Since we have two static location partitioned networks , where
%% both are lying about their distances, the paths should
never get longer than 1
C3 = check_partitioned_lying_path_growth(TestCase, Miners),
C1 andalso C2 andalso C3
end
end,
40, 5000),
%% Print scores after execution
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
%% Print rewards
Rewards = get_rewards(Config),
ct:pal("Rewards: ~p", [Rewards]),
%% Print balances after execution
FinalBalances = balances(Config),
ct:pal("FinalBalances: ~p", [FinalBalances]),
%% There should be no poc_witness or poc_challengees rewards
?assert(not check_poc_rewards(Rewards)),
ok.
extra_vars(poc_v11) ->
POCVars = maps:merge(extra_vars(poc_v10), miner_poc_test_utils:poc_v11_vars()),
RewardVars = #{reward_version => 5, rewards_txn_version => 2, poc_witness_consideration_limit => 20},
maps:merge(POCVars, RewardVars);
extra_vars(poc_v10) ->
maps:merge(extra_poc_vars(),
#{?poc_version => 10,
?data_aggregation_version => 2,
?consensus_percent => 0.06,
?dc_percent => 0.325,
?poc_challengees_percent => 0.18,
?poc_challengers_percent => 0.0095,
?poc_witnesses_percent => 0.0855,
?securities_percent => 0.34,
?reward_version => 5,
?rewards_txn_version => 2
});
extra_vars(poc_v8) ->
maps:merge(extra_poc_vars(), #{?poc_version => 8});
extra_vars(_) ->
{error, poc_v8_and_above_only}.
location_sets(poc_dist_v11_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(poc_dist_v10_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(poc_dist_v8_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(_TestCase) ->
{sets:from_list(?SFLOCS), sets:from_list(?NYLOCS)}.
extra_poc_vars() ->
#{?poc_good_bucket_low => -132,
?poc_good_bucket_high => -80,
?poc_v5_target_prob_randomness_wt => 1.0,
?poc_v4_target_prob_edge_wt => 0.0,
?poc_v4_target_prob_score_wt => 0.0,
?poc_v4_prob_rssi_wt => 0.0,
?poc_v4_prob_time_wt => 0.0,
?poc_v4_randomness_wt => 0.5,
?poc_v4_prob_count_wt => 0.0,
?poc_centrality_wt => 0.5,
?poc_max_hop_cells => 2000}.
check_subsequent_path_growth(ReceiptMap) ->
PathLengths = [ length(blockchain_txn_poc_receipts_v1:path(Txn)) || {_, Txn} <- lists:flatten(maps:values(ReceiptMap)) ],
ct:pal("PathLengths: ~p", [PathLengths]),
lists:any(fun(L) -> L > 1 end, PathLengths).
| null | https://raw.githubusercontent.com/helium/miner/fadfdf74ece9d3c515c03725d1c1c0f0e7c51abd/test/miner_poc_SUITE.erl | erlang | spare-tortilla-raccoon
kind-tangerine-octopus
damp-hemp-pangolin
fierce-lipstick-poodle
raspy-parchment-pike
fresh-gingham-porpoise
glorious-eggshell-finch
--------------------------------------------------------------------
COMMON TEST CALLBACK FUNCTIONS
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Running tests for this suite
@end
--------------------------------------------------------------------
no_status_v8_test,
no_status_v8_test,
test passed, we can cleanup
leave results alone for analysis
test passed, we can cleanup
leave results alone for analysis
--------------------------------------------------------------------
TEST CASES
--------------------------------------------------------------------
Dont think it matters if v1 takes all the other common vars
Just don't set any poc_version here
Now add genesis
Generate fake blockchains (just the keys)
Create genesis block
All these point are in a line one after the other (except last)
Assert the Gateways location
Blockchain is = to Chain
Mock submit_txn to actually add the block
Add some block to start process
Moving threw targeting and challenging
Get responses
Passing receiving_timeout
Get receiving_timeout
same test as above but this time we change the local gateway from full mode to light mode
this is done before we start the POC statem
light gateways will never move out of requesting state
Now add genesis
Generate fake blockchains (just the keys)
Create genesis block
All these point are in a line one after the other (except last)
Assert the Gateways location
update the local gateway to light mode
thereafter it should never move out of requesting state
assert default states
Mock submit_txn to add blocks
Add some block to start process
confirm we DO NOT move from receiving state
Passing poc interval
confirm we remain in requesting state
Now add genesis
Generate fake blockchains (just the keys)
Create genesis block
All these point are in a line one after the other (except last)
Assert the Gateways location
Blockchain is = to Chain
Mock submit_txn to actually add the block
Add some block to start process
Moving through targeting and challenging
KILLING STATEM AND RESTARTING
Get responses
Passing receiving_timeout
Get receiving_timeout
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------
Execute the test
show the final receipt counter
The test endeth here
Print scores before we begin the test
check that every miner has issued a challenge
expect failure and exit
Check that the receipts are growing ONLY for poc_v4
a next hop.
Check that we have at least more than one request
that the paths would eventually grow
Check if we have some receipts
Check that we have at least more than one request
that the paths would eventually grow
Now we can check whether we have path growth
By this point, we have ensured that every miner
has a valid request at least once, we just check
that we have N (length(Miners)) receipts.
but the port is only opened now after a chain is up and been consulted to
determine if validators are running POCs
These are taken from the ledger
These are taken from the ledger
These are taken from the ledger
These are taken from the ledger
These are taken from the ledger
These are taken from the ledger
These are taken from the ledger
Actual locations are the same as the claimed locations for the dist test
Actual locations are the same as the claimed locations for the dist test
Actual locations are the same as the claimed locations for the dist test
Actual locations are the same as the claimed locations for the dist test
integrate genesis block
It's possible that even some of the remaining receipts have single path
but there should eventually be some which have multi element paths
It's possible that even some of the remaining receipts have single path
but there should eventually be some which have multi element paths
Path is in LocSet1, check that it's not in LocSet2
Get active gateways to get the locations
Add it to the map in the tests above
Print scores before we begin the test
Check that every miner has issued a challenge
Check that we have at least more than one request
that the paths would eventually grow
Check that every miner has issued a challenge
Check that we have at least more than one request
that the paths would eventually grow
can assert that the subsequent path lengths must never be greater
than 4.
Print scores after execution
NOTE: It's possible that the calculations below may blow up
since we are folding the entire chain here and some subsequent
ledger_at call in rewards_metadata blows up. Investigate
default to rewards_v1
Get all rewards types
Print scores before we begin the test
Print scores before we begin the test
Check that every miner has issued a challenge
Check that we have at least more than one request
that the paths would eventually grow
TODO: What to check when the partitioned nodes are lying about their locations
Check that every miner has issued a challenge
Check that we have at least more than one request
that the paths would eventually grow
both are lying about their distances, the paths should
Print scores after execution
Print rewards
Print balances after execution
There should be no poc_witness or poc_challengees rewards | -module(miner_poc_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("blockchain/include/blockchain_vars.hrl").
-export([
groups/0, all/0, init_per_group/2, end_per_group/2
]).
-export([
init_per_testcase/2,
end_per_testcase/2,
basic_test/1,
basic_test_light_gateway/1,
poc_dist_v1_test/1,
poc_dist_v2_test/1,
poc_dist_v4_test/1,
poc_dist_v4_partitioned_test/1,
poc_dist_v5_test/1,
poc_dist_v5_partitioned_test/1,
poc_dist_v5_partitioned_lying_test/1,
poc_dist_v6_test/1,
poc_dist_v6_partitioned_test/1,
poc_dist_v6_partitioned_lying_test/1,
poc_dist_v7_test/1,
poc_dist_v7_partitioned_test/1,
poc_dist_v7_partitioned_lying_test/1,
poc_dist_v8_test/1,
poc_dist_v8_partitioned_test/1,
poc_dist_v8_partitioned_lying_test/1,
no_status_v8_test/1,
restart_test/1,
poc_dist_v10_test/1,
poc_dist_v10_partitioned_test/1,
poc_dist_v10_partitioned_lying_test/1,
poc_dist_v11_test/1,
poc_dist_v11_cn_test/1,
poc_dist_v11_partitioned_test/1,
poc_dist_v11_partitioned_lying_test/1
]).
-define(SFLOCS, [631210968910285823, 631210968909003263, 631210968912894463, 631210968907949567]).
-define(NYLOCS, [631243922668565503, 631243922671147007, 631243922895615999, 631243922665907711]).
-define(AUSTINLOCS1, [631781084745290239, 631781089167934463, 631781054839691775, 631781050465723903]).
-define(AUSTINLOCS2, [631781452049762303, 631781453390764543, 631781452924144639, 631781452838965759]).
-define(LALOCS, [631236297173835263, 631236292179769855, 631236329165333503, 631236328049271807]).
-define(CNLOCS1, [
]).
-define(CNLOCS2, [
innocent - irish - pheasant
]).
groups() ->
[ {poc_target_v3,
[],
all()
},
{poc_target_v4,
[],
v11_test_cases()
}].
@public
all() ->
[
basic_test,
basic_test_light_gateway,
poc_dist_v8_test,
poc_dist_v8_partitioned_test,
poc_dist_v8_partitioned_lying_test,
poc_dist_v10_test,
poc_dist_v10_partitioned_test,
poc_dist_v10_partitioned_lying_test,
poc_dist_v11_test,
poc_dist_v11_cn_test,
poc_dist_v11_partitioned_test,
poc_dist_v11_partitioned_lying_test,
uncomment when poc placement enforcement starts .
restart_test].
v11_test_cases() ->
[
basic_test,
basic_test_light_gateway,
poc_dist_v11_test,
poc_dist_v11_cn_test,
poc_dist_v11_partitioned_test,
poc_dist_v11_partitioned_lying_test,
uncomment when poc placement enforcement starts .
restart_test].
init_per_group(poc_target_v3, Config) ->
[
{poc_targeting_vars, #{}}
| Config
];
init_per_group(poc_target_v4, Config) ->
[
{poc_targeting_vars, #{h3dex_gc_width => 10,
poc_targeting_version => 4,
poc_target_pool_size => 2,
poc_hexing_type => hex_h3dex,
hip17_interactivity_blocks => 20}}
| Config
].
init_per_testcase(basic_test = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(basic_test_light_gateway = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(restart_test = TestCase, Config) ->
miner_ct_utils:init_base_dir_config(?MODULE, TestCase, Config);
init_per_testcase(TestCase, Config0) ->
miner_ct_utils:init_per_testcase(?MODULE, TestCase, Config0).
end_per_testcase(TestCase, Config) when TestCase == basic_test;
TestCase == basic_test_light_gateway ->
catch gen_statem:stop(miner_poc_statem),
case ?config(tc_status, Config) of
ok ->
BaseDir = ?config(base_dir, Config),
os:cmd("rm -rf "++ BaseDir),
ok;
_ ->
ok
end;
end_per_testcase(restart_test, Config) ->
catch gen_statem:stop(miner_poc_statem),
case ?config(tc_status, Config) of
ok ->
BaseDir = ?config(base_dir, Config),
os:cmd("rm -rf "++BaseDir),
ok;
_ ->
ok
end;
end_per_testcase(TestCase, Config) ->
gen_server:stop(miner_fake_radio_backplane),
miner_ct_utils:end_per_testcase(TestCase, Config).
end_per_group(_, _Config) ->
ok.
poc_dist_v1_test(Config) ->
ct:pal("Config ~p", [Config]),
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v1_test, Config, CommonPOCVars).
poc_dist_v2_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v2_test, Config, maps:put(?poc_version, 2, CommonPOCVars)).
poc_dist_v4_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v4_test, Config, maps:put(?poc_version, 4, CommonPOCVars)).
poc_dist_v4_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v4_partitioned_test, Config, maps:put(?poc_version, 4, CommonPOCVars)).
poc_dist_v5_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v5_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_partitioned_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v5_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v5_partitioned_lying_test, Config, maps:put(?poc_version, 5, CommonPOCVars)).
poc_dist_v6_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v6_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_partitioned_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v6_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v6_partitioned_lying_test, Config, maps:put(?poc_version, 6, CommonPOCVars)).
poc_dist_v7_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v7_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_partitioned_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v7_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
run_dist_with_params(poc_dist_v7_partitioned_lying_test, Config, maps:put(?poc_version, 7, CommonPOCVars)).
poc_dist_v8_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v8_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_partitioned_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v8_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_partitioned_lying_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
no_status_v8_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v8),
run_dist_with_params(poc_dist_v8_test, Config, maps:merge(CommonPOCVars, ExtraVars), false).
poc_dist_v10_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v10_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_partitioned_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v10_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
ExtraVars = extra_vars(poc_v10),
run_dist_with_params(poc_dist_v10_partitioned_lying_test, Config, maps:merge(CommonPOCVars, ExtraVars)).
poc_dist_v11_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_cn_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_cn_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_partitioned_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_partitioned_test, Config, maps:merge(CombinedVars, ExtraVars)).
poc_dist_v11_partitioned_lying_test(Config) ->
CommonPOCVars = common_poc_vars(Config),
POCTargetingVars = ?config(poc_targeting_vars, Config),
CombinedVars = maps:merge(CommonPOCVars, POCTargetingVars),
ExtraVars = extra_vars(poc_v11),
run_dist_with_params(poc_dist_v11_partitioned_lying_test, Config, maps:merge(CombinedVars, ExtraVars)).
basic_test(Config) ->
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
VarsTx = miner_ct_utils:make_vars(VarsKeys, #{?poc_challenge_interval => 20}),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
{ok, Statem} = miner_poc_statem:start_link(#{delay => 5}),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
State is requesting
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
meck:new(miner_onion, [passthrough]),
meck:expect(miner_onion, dial_framed_stream, fun(_, _, _) ->
{ok, self()}
end),
meck:new(miner_onion_handler, [passthrough]),
meck:expect(miner_onion_handler, send, fun(Stream, _Onion) ->
?assertEqual(self(), Stream)
end),
meck:new(blockchain_txn_poc_receipts_v1, [passthrough]),
meck:expect(blockchain_txn_poc_receipts_v1, is_valid, fun(_, _) -> ok end),
?assertEqual(30, erlang:element(15, erlang:element(2, sys:get_state(Statem)))),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process + 1 block with txn
true = miner_ct_utils:wait_until(fun() -> {ok, 5} =:= blockchain:height(Chain) end),
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{receiving, _} -> true;
_Other -> false
end
end),
Send 7 receipts and add blocks to pass timeout
?assertEqual(0, maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem))))),
Challengees = erlang:element(9, erlang:element(2, sys:get_state(Statem))),
ok = send_receipts(LatLongs, Challengees),
timer:sleep(100),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem)))),
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 20)
),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem))),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{waiting, _} -> true;
{submitting, _} -> true;
{requesting, _} -> true;
{_Other, _} -> false
end
end),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
?assert(meck:validate(miner_onion)),
meck:unload(miner_onion),
?assert(meck:validate(miner_onion_handler)),
meck:unload(miner_onion_handler),
?assert(meck:validate(blockchain_txn_poc_receipts_v1)),
meck:unload(blockchain_txn_poc_receipts_v1),
ok = gen_statem:stop(Statem),
ok.
basic_test_light_gateway(Config) ->
when the POC statem is started it should default to requesting
and remain in requesting even after it has exceeded the poc interval
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
ExtraVars = #{?poc_challenge_interval => 20},
ct:pal("extra vars: ~p", [ExtraVars]),
VarsTx = miner_ct_utils:make_vars(VarsKeys, ExtraVars),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
Chain = blockchain_worker:blockchain(),
Ledger = blockchain:ledger(Chain),
we do this before we start the poc statem
Ledger1 = blockchain_ledger_v1:new_context(Ledger),
{ok, GWInfo} = blockchain_gateway_cache:get(Address, Ledger1),
GWInfo2 = blockchain_ledger_gateway_v2:mode(light, GWInfo),
blockchain_ledger_v1:update_gateway(GWInfo2, Address, Ledger1),
ok = blockchain_ledger_v1:commit_context(Ledger1),
{ok, Statem} = miner_poc_statem:start_link(#{delay => 5}),
ct:pal("got state ~p", [sys:get_state(Statem)]),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
State is requesting
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process ( no POC req txn will have been submitted by the statem )
true = miner_ct_utils:wait_until(fun() -> ct:pal("height: ~p", [blockchain:height(Chain)]), {ok, 4} =:= blockchain:height(Chain) end),
true = miner_ct_utils:wait_until(fun() ->
case sys:get_state(Statem) of
{requesting, _} -> true;
_Other -> ct:pal("got other state ~p", [_Other]), false
end
end),
?assertEqual(requesting, erlang:element(6, erlang:element(2, sys:get_state(Statem)))),
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 25)
),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem))),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
ok = gen_statem:stop(Statem),
ok.
restart_test(Config) ->
BaseDir = ?config(base_dir, Config),
{PrivKey, PubKey} = new_random_key(ecc_compact),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
ECDHFun = libp2p_crypto:mk_ecdh_fun(PrivKey),
Opts = [
{key, {PubKey, SigFun, ECDHFun}},
{seed_nodes, []},
{port, 0},
{num_consensus_members, 7},
{base_dir, BaseDir}
],
{ok, _Sup} = blockchain_sup:start_link(Opts),
?assert(erlang:is_pid(blockchain_swarm:swarm())),
RandomKeys = miner_ct_utils:generate_keys(6),
Address = blockchain_swarm:pubkey_bin(),
ConsensusMembers = [
{Address, {PubKey, PrivKey, libp2p_crypto:mk_sig_fun(PrivKey)}}
] ++ RandomKeys,
Balance = 5000,
ConbaseTxns = [blockchain_txn_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
ConbaseDCTxns = [blockchain_txn_dc_coinbase_v1:new(Addr, Balance)
|| {Addr, _} <- ConsensusMembers],
GenConsensusGroupTx = blockchain_txn_consensus_group_v1:new([Addr || {Addr, _} <- ConsensusMembers], <<>>, 1, 0),
VarsKeys = libp2p_crypto:generate_keys(ecc_compact),
VarsTx = miner_ct_utils:make_vars(VarsKeys, #{?poc_challenge_interval => 20}),
Txs = ConbaseTxns ++ ConbaseDCTxns ++ [GenConsensusGroupTx] ++ VarsTx,
GenesisBlock = blockchain_block_v1:new_genesis_block(Txs),
ok = blockchain_worker:integrate_genesis_block(GenesisBlock),
Chain = blockchain_worker:blockchain(),
{ok, HeadBlock} = blockchain:head_block(Chain),
?assertEqual(blockchain_block:hash_block(GenesisBlock), blockchain_block:hash_block(HeadBlock)),
?assertEqual({ok, GenesisBlock}, blockchain:head_block(Chain)),
?assertEqual({ok, blockchain_block:hash_block(GenesisBlock)}, blockchain:genesis_hash(Chain)),
?assertEqual({ok, GenesisBlock}, blockchain:genesis_block(Chain)),
?assertEqual({ok, 1}, blockchain:height(Chain)),
LatLongs = [
{{37.780586, -122.469471}, {PrivKey, PubKey}},
{{37.780959, -122.467496}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.78101, -122.465372}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781179, -122.463226}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781281, -122.461038}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781349, -122.458892}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781468, -122.456617}, miner_ct_utils:new_random_key(ecc_compact)},
{{37.781637, -122.4543}, miner_ct_utils:new_random_key(ecc_compact)}
],
Add a Gateway
AddGatewayTxs = miner_ct_utils:build_gateways(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AddGatewayTxs),
true = miner_ct_utils:wait_until(fun() -> {ok, 2} =:= blockchain:height(Chain) end),
AssertLocaltionTxns = miner_ct_utils:build_asserts(LatLongs, {PrivKey, PubKey}),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, AssertLocaltionTxns),
true = miner_ct_utils:wait_until(fun() -> {ok, 3} =:= blockchain:height(Chain) end),
{ok, Statem0} = miner_poc_statem:start_link(#{delay => 5,
base_dir => BaseDir}),
?assertEqual(requesting, erlang:element(1, sys:get_state(Statem0))),
State is requesting
meck:new(blockchain_worker, [passthrough]),
meck:expect(blockchain_worker, submit_txn, fun(Txn, _) ->
miner_ct_utils:add_block(Chain, ConsensusMembers, [Txn])
end),
meck:new(miner_onion, [passthrough]),
meck:expect(miner_onion, dial_framed_stream, fun(_, _, _) ->
{ok, self()}
end),
meck:new(miner_onion_handler, [passthrough]),
meck:expect(miner_onion_handler, send, fun(Stream, _Onion) ->
?assertEqual(self(), Stream)
end),
meck:new(blockchain_txn_poc_receipts_v1, [passthrough]),
meck:expect(blockchain_txn_poc_receipts_v1, is_valid, fun(_, _) -> ok end),
?assertEqual(30, erlang:element(15, erlang:element(2, sys:get_state(Statem0)))),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
3 previous blocks + 1 block to start process + 1 block with txn
true = miner_ct_utils:wait_until(fun() -> {ok, 5} =:= blockchain:height(Chain) end),
true = miner_ct_utils:wait_until(
fun() ->
case sys:get_state(Statem0) of
{receiving, _} -> true;
_Other ->
ct:pal("other state ~p", [_Other]),
false
end
end),
ok = gen_statem:stop(Statem0),
{ok, Statem1} = miner_poc_statem:start_link(#{delay => 5,
base_dir => BaseDir}),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem1)))),
Send 7 receipts and add blocks to pass timeout
?assertEqual(0, maps:size(erlang:element(11, erlang:element(2, sys:get_state(Statem1))))),
Challengees = erlang:element(9, erlang:element(2, sys:get_state(Statem1))),
ok = send_receipts(LatLongs, Challengees),
timer:sleep(100),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
?assertEqual(receiving, erlang:element(6, erlang:element(2, sys:get_state(Statem1)))),
lists:foreach(
fun(_) ->
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
timer:sleep(100)
end,
lists:seq(1, 10)
),
?assertEqual(receiving, erlang:element(1, sys:get_state(Statem1))),
ok = miner_ct_utils:add_block(Chain, ConsensusMembers, []),
true = miner_ct_utils:wait_until(
fun() ->
case sys:get_state(Statem1) of
{waiting, _} -> true;
{submitting, _} -> true;
{requesting, _} -> true;
{_Other, _} ->
ct:pal("other state ~p", [_Other]),
false
end
end),
?assert(meck:validate(blockchain_worker)),
meck:unload(blockchain_worker),
?assert(meck:validate(miner_onion)),
meck:unload(miner_onion),
?assert(meck:validate(miner_onion_handler)),
meck:unload(miner_onion_handler),
?assert(meck:validate(blockchain_txn_poc_receipts_v1)),
meck:unload(blockchain_txn_poc_receipts_v1),
ok = gen_statem:stop(Statem1),
ok.
send_receipts(LatLongs, Challengees) ->
lists:foreach(
fun({_LatLong, {PrivKey, PubKey}}) ->
Address = libp2p_crypto:pubkey_to_bin(PubKey),
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
{Mega, Sec, Micro} = os:timestamp(),
Timestamp = Mega * 1000000 * 1000000 + Sec * 1000000 + Micro,
case lists:keyfind(Address, 1, Challengees) of
{Address, LayerData} ->
Receipt = blockchain_poc_receipt_v1:new(Address, Timestamp, 0, LayerData, radio),
SignedReceipt = blockchain_poc_receipt_v1:sign(Receipt, SigFun),
miner_poc_statem:receipt(make_ref(), SignedReceipt, "/ip4/127.0.0.1/tcp/1234");
_ ->
ok
end
end,
LatLongs
).
new_random_key(Curve) ->
#{secret := PrivKey, public := PubKey} = libp2p_crypto:generate_keys(Curve),
{PrivKey, PubKey}.
run_dist_with_params(TestCase, Config, VarMap) ->
run_dist_with_params(TestCase, Config, VarMap, true).
run_dist_with_params(TestCase, Config, VarMap, Status) ->
ok = setup_dist_test(TestCase, Config, VarMap, Status),
ok = exec_dist_test(TestCase, Config, VarMap, Status),
Miners = ?config(miners, Config),
FinalReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("FinalReceiptMap: ~p", [FinalReceiptMap]),
ct:pal("FinalReceiptCounter: ~p", [receipt_counter(FinalReceiptMap)]),
ok.
exec_dist_test(poc_dist_v11_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v11_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v10_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v10_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v8_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v8_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v7_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v7_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v6_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v6_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v5_partitioned_lying_test, Config, VarMap, _Status) ->
do_common_partition_lying_checks(poc_dist_v5_partitioned_lying_test, Config, VarMap);
exec_dist_test(poc_dist_v11_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v11_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v10_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v10_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v8_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v8_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v7_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v7_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v6_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v6_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v5_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v5_partitioned_test, Config, VarMap);
exec_dist_test(poc_dist_v4_partitioned_test, Config, VarMap, _Status) ->
do_common_partition_checks(poc_dist_v4_partitioned_test, Config, VarMap);
exec_dist_test(TestCase, Config, VarMap, Status) ->
Miners = ?config(miners, Config),
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
case Status of
false ->
?assertEqual(false, check_all_miners_can_challenge(Miners));
true ->
?assert(check_all_miners_can_challenge(Miners)),
More specifically , first receipt can have a single element path ( beacon )
but subsequent ones must have more than one element in the path , reason being
the first receipt would have added witnesses and we should be able to make
case maps:get(?poc_version, VarMap, 1) of
V when V >= 10 ->
There are no paths in v11 or v10 for that matter , so we 'll consolidate
the checks for both poc - v10 and poc - v11 here
true = miner_ct_utils:wait_until(
fun() ->
If we have only one request , there 's no guarantee
C1 = check_multiple_requests(Miners),
C2 = maps:size(challenger_receipts_map(find_receipts(Miners))) > 0,
Check there are some poc rewards
RewardsMD = get_rewards_md(Config),
ct:pal("RewardsMD: ~p", [RewardsMD]),
C3 = check_non_empty_poc_rewards(take_poc_challengee_and_witness_rewards(RewardsMD)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3
end,
300, 1000),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok;
V when V > 3 ->
true = miner_ct_utils:wait_until(
fun() ->
If we have only one request , there 's no guarantee
C1 = check_multiple_requests(Miners),
C2 = (check_eventual_path_growth(TestCase, Miners) orelse
check_subsequent_path_growth(challenger_receipts_map(find_receipts(Miners)))),
Check there are some poc rewards
C3 = check_poc_rewards(get_rewards(Config)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3
end,
120, 1000),
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok;
_ ->
?assert(check_atleast_k_receipts(Miners, length(Miners))),
ok
end
end,
ok.
setup_dist_test(TestCase, Config, VarMap, Status) ->
Miners = ?config(miners, Config),
{_, Locations} = lists:unzip(initialize_chain(Miners, TestCase, Config, VarMap)),
GenesisBlock = miner_ct_utils:get_genesis_block(Miners, Config),
ok = miner_ct_utils:load_genesis_block(GenesisBlock, Miners, Config),
the radio ports used to be fetched from as part of init_per_testcase
So now we have wait until the chain is up and has opened the port
true = miner_ct_utils:wait_for_lora_port(Miners, miner_lora, 30),
RadioPorts = lists:map(
fun(Miner) ->
{ok, RandomPort} = ct_rpc:call(Miner, miner_lora, port, []),
ct:pal("~p is listening for packet forwarder on ~p", [Miner, RandomPort]),
RandomPort
end,
Miners),
RadioPorts = [ P || { _ , { _ TP , P , _ JRPCP } } < - MinersAndPorts ] ,
{ok, _FakeRadioPid} = miner_fake_radio_backplane:start_link(maps:get(?poc_version, VarMap), 45000,
lists:zip(RadioPorts, Locations), Status),
miner_fake_radio_backplane ! go,
wait till height 10
ok = miner_ct_utils:wait_for_gte(height, Miners, 10, all, 30),
ok.
gen_locations(poc_dist_v11_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v10_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v8_partitioned_lying_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, lists:duplicate(4, hd(?AUSTINLOCS1)) ++ lists:duplicate(4, hd(?LALOCS))};
gen_locations(poc_dist_v7_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v6_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v5_partitioned_lying_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, lists:duplicate(4, hd(?SFLOCS)) ++ lists:duplicate(4, hd(?NYLOCS))};
gen_locations(poc_dist_v11_partitioned_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v10_partitioned_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v8_partitioned_test, _, _) ->
{?AUSTINLOCS1 ++ ?LALOCS, ?AUSTINLOCS1 ++ ?LALOCS};
gen_locations(poc_dist_v7_partitioned_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v6_partitioned_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v5_partitioned_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v4_partitioned_test, _, _) ->
{?SFLOCS ++ ?NYLOCS, ?SFLOCS ++ ?NYLOCS};
gen_locations(poc_dist_v8_test, _, _) ->
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(poc_dist_v11_cn_test, _, _) ->
{?CNLOCS1 ++ ?CNLOCS2, ?CNLOCS1 ++ ?CNLOCS2};
gen_locations(poc_dist_v11_test, _, _) ->
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(poc_dist_v10_test, _, _) ->
{?AUSTINLOCS1 ++ ?AUSTINLOCS2, ?AUSTINLOCS1 ++ ?AUSTINLOCS2};
gen_locations(_TestCase, Addresses, VarMap) ->
LocationJitter = case maps:get(?poc_version, VarMap, 1) of
V when V > 3 ->
100;
_ ->
1000000
end,
Locs = lists:foldl(
fun(I, Acc) ->
[h3:from_geo({37.780586, -122.469470 + I/LocationJitter}, 13)|Acc]
end,
[],
lists:seq(1, length(Addresses))
),
{Locs, Locs}.
initialize_chain(Miners, TestCase, Config, VarMap) ->
Addresses = ?config(addresses, Config),
N = ?config(num_consensus_members, Config),
Curve = ?config(dkg_curve, Config),
Keys = libp2p_crypto:generate_keys(ecc_compact),
InitialVars = miner_ct_utils:make_vars(Keys, VarMap),
InitialPaymentTransactions = [blockchain_txn_coinbase_v1:new(Addr, 5000) || Addr <- Addresses],
{ActualLocations, ClaimedLocations} = gen_locations(TestCase, Addresses, VarMap),
AddressesWithLocations = lists:zip(Addresses, ActualLocations),
AddressesWithClaimedLocations = lists:zip(Addresses, ClaimedLocations),
InitialGenGatewayTxns = [blockchain_txn_gen_gateway_v1:new(Addr, Addr, Loc, 0) || {Addr, Loc} <- AddressesWithLocations],
InitialTransactions = InitialVars ++ InitialPaymentTransactions ++ InitialGenGatewayTxns,
{ok, DKGCompletedNodes} = miner_ct_utils:initial_dkg(Miners, InitialTransactions, Addresses, N, Curve),
_GenesisLoadResults = miner_ct_utils:integrate_genesis_block(hd(DKGCompletedNodes), Miners -- DKGCompletedNodes),
AddressesWithClaimedLocations.
find_requests(Miners) ->
[M | _] = Miners,
Chain = ct_rpc:call(M, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(M, blockchain, blocks, [Chain]),
lists:flatten(lists:foldl(fun({_Hash, Block}, Acc) ->
Txns = blockchain_block:transactions(Block),
Requests = lists:filter(fun(T) ->
blockchain_txn:type(T) == blockchain_txn_poc_request_v1
end,
Txns),
[Requests | Acc]
end,
[],
maps:to_list(Blocks))).
find_receipts(Miners) ->
[M | _] = Miners,
Chain = ct_rpc:call(M, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(M, blockchain, blocks, [Chain]),
lists:flatten(lists:foldl(fun({_Hash, Block}, Acc) ->
Txns = blockchain_block:transactions(Block),
Height = blockchain_block:height(Block),
Receipts = lists:filter(fun(T) ->
blockchain_txn:type(T) == blockchain_txn_poc_receipts_v1
end,
Txns),
TaggedReceipts = lists:map(fun(R) ->
{Height, R}
end,
Receipts),
TaggedReceipts ++ Acc
end,
[],
maps:to_list(Blocks))).
challenger_receipts_map(Receipts) ->
ReceiptMap = lists:foldl(
fun({_Height, Receipt}=R, Acc) ->
{ok, Challenger} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(blockchain_txn_poc_receipts_v1:challenger(Receipt))),
case maps:get(Challenger, Acc, undefined) of
undefined ->
maps:put(Challenger, [R], Acc);
List ->
maps:put(Challenger, lists:keysort(1, [R | List]), Acc)
end
end,
#{},
Receipts),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
ReceiptMap.
request_counter(TotalRequests) ->
lists:foldl(fun(Req, Acc) ->
{ok, Challenger} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(blockchain_txn_poc_request_v1:challenger(Req))),
case maps:get(Challenger, Acc, undefined) of
undefined ->
maps:put(Challenger, 1, Acc);
N when N > 0 ->
maps:put(Challenger, N + 1, Acc);
_ ->
maps:put(Challenger, 1, Acc)
end
end,
#{},
TotalRequests).
check_all_miners_can_challenge(Miners) ->
N = length(Miners),
RequestCounter = request_counter(find_requests(Miners)),
ct:pal("RequestCounter: ~p~n", [RequestCounter]),
N == maps:size(RequestCounter).
check_eventual_path_growth(TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
check_growing_paths(TestCase, ReceiptMap, active_gateways(Miners), false).
check_partitioned_path_growth(_TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
check_subsequent_path_growth(ReceiptMap).
check_partitioned_lying_path_growth(_TestCase, Miners) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
ct:pal("ReceiptMap: ~p", [ReceiptMap]),
not check_subsequent_path_growth(ReceiptMap).
check_growing_paths(TestCase, ReceiptMap, ActiveGateways, PartitionFlag) ->
Results = lists:foldl(fun({_Challenger, TaggedReceipts}, Acc) ->
[{_, FirstReceipt} | Rest] = TaggedReceipts,
It 's possible that the first receipt itself has multiple elements path , I think
RemainingGrowthCond = case PartitionFlag of
true ->
check_remaining_partitioned_grow(TestCase, Rest, ActiveGateways);
false ->
check_remaining_grow(Rest)
end,
Res = length(blockchain_txn_poc_receipts_v1:path(FirstReceipt)) >= 1 andalso RemainingGrowthCond,
[Res | Acc]
end,
[],
maps:to_list(ReceiptMap)),
lists:all(fun(R) -> R == true end, Results) andalso maps:size(ReceiptMap) == maps:size(ActiveGateways).
check_remaining_grow([]) ->
false;
check_remaining_grow(TaggedReceipts) ->
Res = lists:map(fun({_, Receipt}) ->
length(blockchain_txn_poc_receipts_v1:path(Receipt)) > 1
end,
TaggedReceipts),
lists:any(fun(R) -> R == true end, Res).
check_remaining_partitioned_grow(_TestCase, [], _ActiveGateways) ->
false;
check_remaining_partitioned_grow(TestCase, TaggedReceipts, ActiveGateways) ->
Res = lists:map(fun({_, Receipt}) ->
Path = blockchain_txn_poc_receipts_v1:path(Receipt),
PathLength = length(Path),
ct:pal("PathLength: ~p", [PathLength]),
PathLength > 1 andalso PathLength =< 4 andalso check_partitions(TestCase, Path, ActiveGateways)
end,
TaggedReceipts),
ct:pal("Res: ~p", [Res]),
lists:any(fun(R) -> R == true end, Res).
check_partitions(TestCase, Path, ActiveGateways) ->
PathLocs = sets:from_list(lists:foldl(fun(Element, Acc) ->
Challengee = blockchain_poc_path_element_v1:challengee(Element),
ChallengeeGw = maps:get(Challengee, ActiveGateways),
ChallengeeLoc = blockchain_ledger_gateway_v2:location(ChallengeeGw),
[ChallengeeLoc | Acc]
end,
[],
Path)),
{LocSet1, LocSet2} = location_sets(TestCase),
case sets:is_subset(PathLocs, LocSet1) of
true ->
sets:is_disjoint(PathLocs, LocSet2);
false ->
Path is not in LocSet1 , check that it 's only in LocSet2
sets:is_subset(PathLocs, LocSet2) andalso sets:is_disjoint(PathLocs, LocSet1)
end.
check_multiple_requests(Miners) ->
RequestCounter = request_counter(find_requests(Miners)),
ct:pal("RequestCounter: ~p", [RequestCounter]),
lists:sum(maps:values(RequestCounter)) > length(Miners).
check_atleast_k_receipts(Miners, K) ->
ReceiptMap = challenger_receipts_map(find_receipts(Miners)),
TotalReceipts = lists:foldl(fun(ReceiptList, Acc) ->
length(ReceiptList) + Acc
end,
0,
maps:values(ReceiptMap)),
ct:pal("TotalReceipts: ~p", [TotalReceipts]),
TotalReceipts >= K.
receipt_counter(ReceiptMap) ->
lists:foldl(fun({Name, ReceiptList}, Acc) ->
Counts = lists:map(fun({Height, ReceiptTxn}) ->
{Height, length(blockchain_txn_poc_receipts_v1:path(ReceiptTxn))}
end,
ReceiptList),
maps:put(Name, Counts, Acc)
end,
#{},
maps:to_list(ReceiptMap)).
active_gateways([Miner | _]=_Miners) ->
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Ledger = ct_rpc:call(Miner, blockchain, ledger, [Chain]),
ct_rpc:call(Miner, blockchain_ledger_v1, active_gateways, [Ledger]).
gateway_scores(Config) ->
[Miner | _] = ?config(miners, Config),
Addresses = ?config(addresses, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Ledger = ct_rpc:call(Miner, blockchain, ledger, [Chain]),
lists:foldl(fun(Address, Acc) ->
{ok, S} = ct_rpc:call(Miner, blockchain_ledger_v1, gateway_score, [Address, Ledger]),
{ok, Name} = erl_angry_purple_tiger:animal_name(libp2p_crypto:bin_to_b58(Address)),
maps:put(Name, S, Acc)
end,
#{},
Addresses).
common_poc_vars(Config) ->
N = ?config(num_consensus_members, Config),
BlockTime = ?config(block_time, Config),
Interval = ?config(election_interval, Config),
BatchSize = ?config(batch_size, Config),
Curve = ?config(dkg_curve, Config),
Do n't put the poc version here
#{?block_time => BlockTime,
?election_interval => Interval,
?num_consensus_members => N,
?batch_size => BatchSize,
?dkg_curve => Curve,
TODO validators
?poc_challenge_interval => 15,
?poc_v4_exclusion_cells => 10,
?poc_v4_parent_res => 11,
?poc_v4_prob_bad_rssi => 0.01,
?poc_v4_prob_count_wt => 0.3,
?poc_v4_prob_good_rssi => 1.0,
?poc_v4_prob_no_rssi => 0.5,
?poc_v4_prob_rssi_wt => 0.3,
?poc_v4_prob_time_wt => 0.3,
?poc_v4_randomness_wt => 0.1,
?poc_v4_target_challenge_age => 300,
?poc_v4_target_exclusion_cells => 6000,
?poc_v4_target_prob_edge_wt => 0.2,
?poc_v4_target_prob_score_wt => 0.8,
?poc_v4_target_score_curve => 5,
?poc_target_hex_parent_res => 5,
?poc_v5_target_prob_randomness_wt => 0.0,
?poc_witness_consideration_limit => 20}.
do_common_partition_checks(TestCase, Config, VarMap) ->
Miners = ?config(miners, Config),
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
true = miner_ct_utils:wait_until(
fun() ->
case maps:get(poc_version, VarMap, 1) of
V when V >= 10 ->
There is no path to check , so do both poc - v10 and poc - v11 checks here
C1 = check_all_miners_can_challenge(Miners),
If we have only one request , there 's no guarantee
C2 = check_multiple_requests(Miners),
Check there are some poc rewards
RewardsMD = get_rewards_md(Config),
ct:pal("RewardsMD: ~p", [RewardsMD]),
C3 = check_non_empty_poc_rewards(take_poc_challengee_and_witness_rewards(RewardsMD)),
ct:pal("C1: ~p, C2: ~p, C3: ~p", [C1, C2, C3]),
C1 andalso C2 andalso C3;
_ ->
C1 = check_all_miners_can_challenge(Miners),
If we have only one request , there 's no guarantee
C2 = check_multiple_requests(Miners),
Since we have two static location partitioned networks , we
C3 = check_partitioned_path_growth(TestCase, Miners),
Check there are some poc rewards
C4 = check_poc_rewards(get_rewards(Config)),
ct:pal("all can challenge: ~p, multiple requests: ~p, paths grow: ~p, rewards given: ~p", [C1, C2, C3, C4]),
C1 andalso C2 andalso C3 andalso C4
end
end, 60, 5000),
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
FinalRewards = get_rewards(Config),
ct:pal("FinalRewards: ~p", [FinalRewards]),
ok.
balances(Config) ->
[Miner | _] = ?config(miners, Config),
Addresses = ?config(addresses, Config),
[miner_ct_utils:get_balance(Miner, Addr) || Addr <- Addresses].
take_poc_challengee_and_witness_rewards(RewardsMD) ->
only take poc_challengee and poc_witness rewards
POCRewards = lists:foldl(
fun({Ht, MDMap}, Acc) ->
[{Ht, maps:with([poc_challengee, poc_witness], MDMap)} | Acc]
end,
[],
RewardsMD),
ct:pal("POCRewards: ~p", [POCRewards]),
POCRewards.
check_non_empty_poc_rewards(POCRewards) ->
lists:any(
fun({_Ht, #{poc_challengee := R1, poc_witness := R2}}) ->
maps:size(R1) > 0 andalso maps:size(R2) > 0
end,
POCRewards).
get_rewards_md(Config) ->
[Miner | _] = ?config(miners, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
{ok, Head} = ct_rpc:call(Miner, blockchain, head_block, [Chain]),
Filter = fun(T) -> blockchain_txn:type(T) == blockchain_txn_rewards_v2 end,
Fun = fun(Block, Acc) ->
case blockchain_utils:find_txn(Block, Filter) of
[T] ->
Start = blockchain_txn_rewards_v2:start_epoch(T),
End = blockchain_txn_rewards_v2:end_epoch(T),
MDRes = ct_rpc:call(Miner, blockchain_txn_rewards_v2, calculate_rewards_metadata, [
Start,
End,
Chain
]),
case MDRes of
{ok, MD} ->
[{blockchain_block:height(Block), MD} | Acc];
_ ->
Acc
end;
_ ->
Acc
end
end,
Res = ct_rpc:call(Miner, blockchain, fold_chain, [Fun, [], Head, Chain]),
Res.
get_rewards(Config) ->
get_rewards(Config, blockchain_txn_rewards_v1).
get_rewards(Config, RewardType) ->
[Miner | _] = ?config(miners, Config),
Chain = ct_rpc:call(Miner, blockchain_worker, blockchain, []),
Blocks = ct_rpc:call(Miner, blockchain, blocks, [Chain]),
maps:fold(fun(_, Block, Acc) ->
case blockchain_block:transactions(Block) of
[] ->
Acc;
Ts ->
Rewards = lists:filter(fun(T) ->
blockchain_txn:type(T) == RewardType
end,
Ts),
lists:flatten([Rewards | Acc])
end
end,
[],
Blocks).
check_poc_rewards(RewardsTxns) ->
RewardTypes = lists:foldl(fun(RewardTxn, Acc) ->
Types = [blockchain_txn_reward_v1:type(R) || R <- blockchain_txn_rewards_v1:rewards(RewardTxn)],
lists:flatten([Types | Acc])
end,
[],
RewardsTxns),
lists:any(fun(T) ->
T == poc_challengees orelse T == poc_witnesses
end,
RewardTypes).
do_common_partition_lying_checks(TestCase, Config, VarMap) ->
Miners = ?config(miners, Config),
InitialScores = gateway_scores(Config),
ct:pal("InitialScores: ~p", [InitialScores]),
InitialBalances = balances(Config),
ct:pal("InitialBalances: ~p", [InitialBalances]),
true = miner_ct_utils:wait_until(
fun() ->
case maps:get(poc_version, VarMap, 1) of
V when V > 10 ->
C1 = check_all_miners_can_challenge(Miners),
If we have only one request , there 's no guarantee
C2 = check_multiple_requests(Miners),
C1 andalso C2;
_ ->
C1 = check_all_miners_can_challenge(Miners),
If we have only one request , there 's no guarantee
C2 = check_multiple_requests(Miners),
Since we have two static location partitioned networks , where
never get longer than 1
C3 = check_partitioned_lying_path_growth(TestCase, Miners),
C1 andalso C2 andalso C3
end
end,
40, 5000),
FinalScores = gateway_scores(Config),
ct:pal("FinalScores: ~p", [FinalScores]),
Rewards = get_rewards(Config),
ct:pal("Rewards: ~p", [Rewards]),
FinalBalances = balances(Config),
ct:pal("FinalBalances: ~p", [FinalBalances]),
?assert(not check_poc_rewards(Rewards)),
ok.
extra_vars(poc_v11) ->
POCVars = maps:merge(extra_vars(poc_v10), miner_poc_test_utils:poc_v11_vars()),
RewardVars = #{reward_version => 5, rewards_txn_version => 2, poc_witness_consideration_limit => 20},
maps:merge(POCVars, RewardVars);
extra_vars(poc_v10) ->
maps:merge(extra_poc_vars(),
#{?poc_version => 10,
?data_aggregation_version => 2,
?consensus_percent => 0.06,
?dc_percent => 0.325,
?poc_challengees_percent => 0.18,
?poc_challengers_percent => 0.0095,
?poc_witnesses_percent => 0.0855,
?securities_percent => 0.34,
?reward_version => 5,
?rewards_txn_version => 2
});
extra_vars(poc_v8) ->
maps:merge(extra_poc_vars(), #{?poc_version => 8});
extra_vars(_) ->
{error, poc_v8_and_above_only}.
location_sets(poc_dist_v11_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(poc_dist_v10_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(poc_dist_v8_partitioned_test) ->
{sets:from_list(?AUSTINLOCS1), sets:from_list(?LALOCS)};
location_sets(_TestCase) ->
{sets:from_list(?SFLOCS), sets:from_list(?NYLOCS)}.
extra_poc_vars() ->
#{?poc_good_bucket_low => -132,
?poc_good_bucket_high => -80,
?poc_v5_target_prob_randomness_wt => 1.0,
?poc_v4_target_prob_edge_wt => 0.0,
?poc_v4_target_prob_score_wt => 0.0,
?poc_v4_prob_rssi_wt => 0.0,
?poc_v4_prob_time_wt => 0.0,
?poc_v4_randomness_wt => 0.5,
?poc_v4_prob_count_wt => 0.0,
?poc_centrality_wt => 0.5,
?poc_max_hop_cells => 2000}.
check_subsequent_path_growth(ReceiptMap) ->
PathLengths = [ length(blockchain_txn_poc_receipts_v1:path(Txn)) || {_, Txn} <- lists:flatten(maps:values(ReceiptMap)) ],
ct:pal("PathLengths: ~p", [PathLengths]),
lists:any(fun(L) -> L > 1 end, PathLengths).
|
6a4883b003da346c24f46bf5ad84ef176e5d679082d25a50edf1276b769e3143 | fossas/fossa-cli | PipenvSpec.hs | # LANGUAGE DataKinds #
# LANGUAGE QuasiQuotes #
module Analysis.Python.PipenvSpec (spec) where
import Analysis.FixtureExpectationUtils
import Analysis.FixtureUtils
import Path
import Strategy.Python.Pipenv qualified as Pipenv
import Test.Hspec
import Types (DiscoveredProjectType (PipenvProjectType), GraphBreadth (Complete))
pipenv :: AnalysisTestFixture (Pipenv.PipenvProject)
pipenv =
AnalysisTestFixture
"pipenv"
Pipenv.discover
LocalEnvironment
Nothing
$ FixtureArtifact
""
[reldir|python/pipenv/pipenv/|]
[reldir|pipenv-2021.11.23/|]
spec :: Spec
spec = do
testSuiteDepResultSummary pipenv PipenvProjectType (DependencyResultsSummary 90 90 0 1 Complete)
| null | https://raw.githubusercontent.com/fossas/fossa-cli/e1edac30265888f0252c58598f63b9e475813128/integration-test/Analysis/Python/PipenvSpec.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE QuasiQuotes #
module Analysis.Python.PipenvSpec (spec) where
import Analysis.FixtureExpectationUtils
import Analysis.FixtureUtils
import Path
import Strategy.Python.Pipenv qualified as Pipenv
import Test.Hspec
import Types (DiscoveredProjectType (PipenvProjectType), GraphBreadth (Complete))
pipenv :: AnalysisTestFixture (Pipenv.PipenvProject)
pipenv =
AnalysisTestFixture
"pipenv"
Pipenv.discover
LocalEnvironment
Nothing
$ FixtureArtifact
""
[reldir|python/pipenv/pipenv/|]
[reldir|pipenv-2021.11.23/|]
spec :: Spec
spec = do
testSuiteDepResultSummary pipenv PipenvProjectType (DependencyResultsSummary 90 90 0 1 Complete)
| |
f47b8d3feb2b7150663d82145b908f13cc9a4e2a7ad959f7d5d186d1af7e98b8 | clojure/core.rrb-vector | rrb_vector.cljs | Copyright ( c ) and contributors . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns clojure.core.rrb-vector
"An implementation of the confluently persistent vector data
structure introduced in Bagwell, Rompf, \"RRB-Trees: Efficient
Immutable Vectors\", EPFL-REPORT-169879, September, 2011.
RRB-Trees build upon Clojure's PersistentVectors, adding logarithmic
time concatenation and slicing.
The main API entry points are clojure.core.rrb-vector/catvec,
performing vector concatenation, and clojure.core.rrb-vector/subvec,
which produces a new vector containing the appropriate subrange of
the input vector (in contrast to cljs.core/subvec, which returns a
view on the input vector).
The implementation allows for seamless interoperability with
cljs.core/PersistentVector and cljs.core.Subvec instances:
clojure.core.rrb-vector/catvec and clojure.core.rrb-vector/subvec
convert their inputs to clojure.core.rrb-vector.rrbt/Vector
instances whenever necessary (this is a very fast constant time
for it is O(log n ) , where n
is the size of the underlying vector).
clojure.core.rrb-vector also exports its own versions of vector and
vec which always produce clojure.core.rrb-vector.rrbt.Vector
instances."
{:author "Michał Marczyk"}
(:refer-clojure :exclude [vector vec subvec])
(:require [clojure.core.rrb-vector.protocols :refer [-slicev -splicev]]
[clojure.core.rrb-vector.rrbt :refer [-as-rrbt]]
clojure.core.rrb-vector.interop)
(:require-macros [clojure.core.rrb-vector.macros :refer [gen-vector-method]]))
(defn catvec
"Concatenates the given vectors in logarithmic time."
([]
[])
([v1]
v1)
([v1 v2]
(-splicev v1 v2))
([v1 v2 v3]
(-splicev (-splicev v1 v2) v3))
([v1 v2 v3 v4]
(-splicev (-splicev v1 v2) (-splicev v3 v4)))
([v1 v2 v3 v4 & vn]
(-splicev (-splicev (-splicev v1 v2) (-splicev v3 v4))
(apply catvec vn))))
(defn subvec
"Returns a new vector containing the elements of the given vector v
lying between the start (inclusive) and end (exclusive) indices in
logarithmic time. end defaults to end of vector. The resulting
vector shares structure with the original, but does not hold on to
any elements of the original vector lying outside the given index
range."
([v start]
(-slicev v start (count v)))
([v start end]
(-slicev v start end)))
(defn vector
"Creates a new vector containing the args."
([]
(gen-vector-method))
([x1]
(gen-vector-method x1))
([x1 x2]
(gen-vector-method x1 x2))
([x1 x2 x3]
(gen-vector-method x1 x2 x3))
([x1 x2 x3 x4]
(gen-vector-method x1 x2 x3 x4))
([x1 x2 x3 x4 & xn]
(into (vector x1 x2 x3 x4) xn)
#_
(loop [v (vector x1 x2 x3 x4)
xn xn]
(if xn
(recur (-conj ^not-native v (first xn))
(next xn))
v))))
(defn vec
"Returns a vector containing the contents of coll.
If coll is a vector, returns an RRB vector using the internal tree
of coll."
[coll]
(if (vector? coll)
(-as-rrbt coll)
(apply vector coll)))
| null | https://raw.githubusercontent.com/clojure/core.rrb-vector/88c2f814b47c0bbc4092dad82be2ec783ed2961f/src/main/cljs/clojure/core/rrb_vector.cljs | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) and contributors . All rights reserved .
(ns clojure.core.rrb-vector
"An implementation of the confluently persistent vector data
structure introduced in Bagwell, Rompf, \"RRB-Trees: Efficient
Immutable Vectors\", EPFL-REPORT-169879, September, 2011.
RRB-Trees build upon Clojure's PersistentVectors, adding logarithmic
time concatenation and slicing.
The main API entry points are clojure.core.rrb-vector/catvec,
performing vector concatenation, and clojure.core.rrb-vector/subvec,
which produces a new vector containing the appropriate subrange of
the input vector (in contrast to cljs.core/subvec, which returns a
view on the input vector).
The implementation allows for seamless interoperability with
cljs.core/PersistentVector and cljs.core.Subvec instances:
clojure.core.rrb-vector/catvec and clojure.core.rrb-vector/subvec
convert their inputs to clojure.core.rrb-vector.rrbt/Vector
instances whenever necessary (this is a very fast constant time
for it is O(log n ) , where n
is the size of the underlying vector).
clojure.core.rrb-vector also exports its own versions of vector and
vec which always produce clojure.core.rrb-vector.rrbt.Vector
instances."
{:author "Michał Marczyk"}
(:refer-clojure :exclude [vector vec subvec])
(:require [clojure.core.rrb-vector.protocols :refer [-slicev -splicev]]
[clojure.core.rrb-vector.rrbt :refer [-as-rrbt]]
clojure.core.rrb-vector.interop)
(:require-macros [clojure.core.rrb-vector.macros :refer [gen-vector-method]]))
(defn catvec
"Concatenates the given vectors in logarithmic time."
([]
[])
([v1]
v1)
([v1 v2]
(-splicev v1 v2))
([v1 v2 v3]
(-splicev (-splicev v1 v2) v3))
([v1 v2 v3 v4]
(-splicev (-splicev v1 v2) (-splicev v3 v4)))
([v1 v2 v3 v4 & vn]
(-splicev (-splicev (-splicev v1 v2) (-splicev v3 v4))
(apply catvec vn))))
(defn subvec
"Returns a new vector containing the elements of the given vector v
lying between the start (inclusive) and end (exclusive) indices in
logarithmic time. end defaults to end of vector. The resulting
vector shares structure with the original, but does not hold on to
any elements of the original vector lying outside the given index
range."
([v start]
(-slicev v start (count v)))
([v start end]
(-slicev v start end)))
(defn vector
"Creates a new vector containing the args."
([]
(gen-vector-method))
([x1]
(gen-vector-method x1))
([x1 x2]
(gen-vector-method x1 x2))
([x1 x2 x3]
(gen-vector-method x1 x2 x3))
([x1 x2 x3 x4]
(gen-vector-method x1 x2 x3 x4))
([x1 x2 x3 x4 & xn]
(into (vector x1 x2 x3 x4) xn)
#_
(loop [v (vector x1 x2 x3 x4)
xn xn]
(if xn
(recur (-conj ^not-native v (first xn))
(next xn))
v))))
(defn vec
"Returns a vector containing the contents of coll.
If coll is a vector, returns an RRB vector using the internal tree
of coll."
[coll]
(if (vector? coll)
(-as-rrbt coll)
(apply vector coll)))
|
8a43e3a3ce012f9bb24f2cc84983322f17f310c76989bb737735cc0a5cb17bd1 | binsec/haunted | path_predicate_env.ml | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
let to_stringmap set =
let open Formula in
Formula.VarSet.fold
(fun elt map ->
match elt with
| BlVar bl -> Basic_types.String.Set.add bl.bl_name map
| BvVar bv -> Basic_types.String.Set.add bv.bv_name map
| AxVar ax -> Basic_types.String.Set.add ax.ax_name map)
set Basic_types.String.Set.empty
module SymbVar = Basic_types.String.Map
type path_t = Formula.formula
type input_t = Formula.VarSet.t
type hybrid_mem_chunk = {
base: Formula.bv_term;
name: string;
mapping: Formula.bv_term Basic_types.Addr64.Map.t;
}
type hybrid_mem_t = hybrid_mem_chunk list
type formula = {
vars : (int * int * Formula.bv_term) SymbVar.t; (* Symbolic variables *)
varsindex: int SymbVar.t;
path : path_t; (* list of smt_expr representing constraints on the path *)
memory : Formula.ax_term; (* current memory *)
inputs : input_t; (* Free variables of the formula *)
addr_size : int; (* Size of addresses *)
(* Statistic fields *)
nb_input: int;
nb_load: int;
nb_store: int;
nb_let: int;
nb_op: int;
nb_constraint: int;
(* Optimisation fields *)
global_counter: int; (* Used to identify in a uniq manner every entries of the formula *)
optim_cst_prop: bool;
optim_rebase: bool;
aux_optim_rebase : bool; (* Rebase var1=var2 by var2 (not wished in some cases) *)
optim_row: bool; (* Read over Write (for memory) *)
optim_row_k : int;
optim_rowplus: bool;
hybrid_memory: hybrid_mem_t;
optim_eq_prop: bool;
optim_map: (int * (Formula.def * Formula.bl_term list)) SymbVar.t;
(* Map for optimisation (quick access to some variable expression) (int allow to keep the order of elements) *)
pushed_variable: Basic_types.String.Set.t; (* Hold variable+inputs already sent to the solver (use for incremental solving) *)
last_constraint: Formula.bl_term;
}
let empty_formula ?(cst_pro=false) ?(rebase=false) ?(row=false)
?(aux_rebase=true) ?(row_plus=false) ?(eq_prop=true) addr_size =
let inputs = Formula.(VarSet.singleton (AxVar (ax_var "memory" addr_size 8))) in
{vars = SymbVar.empty;
varsindex = SymbVar.add "memory" 0 SymbVar.empty;
path = Formula.empty;
inputs;
addr_size;
memory = Formula.(mk_ax_var (ax_var "memory" addr_size 8));
nb_input = 0;
nb_load = 0;
nb_store = 0;
nb_let = 0;
nb_op = 0;
nb_constraint = 0;
global_counter = 0;
optim_cst_prop = cst_pro;
optim_rebase = rebase;
aux_optim_rebase = aux_rebase;
optim_rowplus = row_plus;
optim_row = row;
optim_row_k = 150;
optim_eq_prop = eq_prop;
optim_map = SymbVar.empty;
pushed_variable = Basic_types.String.Set.empty;
hybrid_memory = [];
last_constraint = Formula.mk_bl_true;
}
type t = {
mutable formula : formula;
mutable toplevel: bool;
mutable config: Config_piqi.configuration;
mutable analysis: dse_analysis_sig_t;
}
and dse_analysis_sig_t =
< get_taint : unit -> Tainting.tainting_engine;
is_taint_computed : unit -> bool;
get_current_dbacodeaddress : unit -> Dba.address;
get_current_concrete_infos : unit -> Trace_type.trace_concrete_infos list;
concretize_expr_bv : Dba.Expr.t -> ?is_lhs:bool -> t -> Bitvector.t;
concretize_cond : Dba.Expr.t -> t -> bool;
expr_to_smt :
Dba.Expr.t -> ?apply_cs:bool -> t ->
Formula.bv_term * Formula.bl_term list;
compute : int;
solve_predicate :
Formula.bl_term ->
?print_stat:bool -> ?name:string -> ?push:bool -> ?pop:bool -> ?prek:int ->
?pruning:bool -> ?get_model:bool -> t ->
Formula.status * Smt_model.t * float;
exec : Dba_types.Statement.t -> t -> unit
>
let new_env analysis config ?(cst_pro=false) ?(rebase=false) ?(row=false)
?(row_plus=false) ?(eq_prop=false) (addr_size:int) =
let f = empty_formula ~cst_pro ~rebase ~row ~row_plus ~eq_prop addr_size in
{formula=f; toplevel=true; config; analysis=(analysis :> dse_analysis_sig_t)}
| null | https://raw.githubusercontent.com/binsec/haunted/7ffc5f4072950fe138f53fe953ace98fff181c73/src/dynamic/base/path_predicate_env.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
Symbolic variables
list of smt_expr representing constraints on the path
current memory
Free variables of the formula
Size of addresses
Statistic fields
Optimisation fields
Used to identify in a uniq manner every entries of the formula
Rebase var1=var2 by var2 (not wished in some cases)
Read over Write (for memory)
Map for optimisation (quick access to some variable expression) (int allow to keep the order of elements)
Hold variable+inputs already sent to the solver (use for incremental solving) | This file is part of BINSEC .
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
let to_stringmap set =
let open Formula in
Formula.VarSet.fold
(fun elt map ->
match elt with
| BlVar bl -> Basic_types.String.Set.add bl.bl_name map
| BvVar bv -> Basic_types.String.Set.add bv.bv_name map
| AxVar ax -> Basic_types.String.Set.add ax.ax_name map)
set Basic_types.String.Set.empty
module SymbVar = Basic_types.String.Map
type path_t = Formula.formula
type input_t = Formula.VarSet.t
type hybrid_mem_chunk = {
base: Formula.bv_term;
name: string;
mapping: Formula.bv_term Basic_types.Addr64.Map.t;
}
type hybrid_mem_t = hybrid_mem_chunk list
type formula = {
varsindex: int SymbVar.t;
nb_input: int;
nb_load: int;
nb_store: int;
nb_let: int;
nb_op: int;
nb_constraint: int;
optim_cst_prop: bool;
optim_rebase: bool;
optim_row_k : int;
optim_rowplus: bool;
hybrid_memory: hybrid_mem_t;
optim_eq_prop: bool;
optim_map: (int * (Formula.def * Formula.bl_term list)) SymbVar.t;
last_constraint: Formula.bl_term;
}
let empty_formula ?(cst_pro=false) ?(rebase=false) ?(row=false)
?(aux_rebase=true) ?(row_plus=false) ?(eq_prop=true) addr_size =
let inputs = Formula.(VarSet.singleton (AxVar (ax_var "memory" addr_size 8))) in
{vars = SymbVar.empty;
varsindex = SymbVar.add "memory" 0 SymbVar.empty;
path = Formula.empty;
inputs;
addr_size;
memory = Formula.(mk_ax_var (ax_var "memory" addr_size 8));
nb_input = 0;
nb_load = 0;
nb_store = 0;
nb_let = 0;
nb_op = 0;
nb_constraint = 0;
global_counter = 0;
optim_cst_prop = cst_pro;
optim_rebase = rebase;
aux_optim_rebase = aux_rebase;
optim_rowplus = row_plus;
optim_row = row;
optim_row_k = 150;
optim_eq_prop = eq_prop;
optim_map = SymbVar.empty;
pushed_variable = Basic_types.String.Set.empty;
hybrid_memory = [];
last_constraint = Formula.mk_bl_true;
}
type t = {
mutable formula : formula;
mutable toplevel: bool;
mutable config: Config_piqi.configuration;
mutable analysis: dse_analysis_sig_t;
}
and dse_analysis_sig_t =
< get_taint : unit -> Tainting.tainting_engine;
is_taint_computed : unit -> bool;
get_current_dbacodeaddress : unit -> Dba.address;
get_current_concrete_infos : unit -> Trace_type.trace_concrete_infos list;
concretize_expr_bv : Dba.Expr.t -> ?is_lhs:bool -> t -> Bitvector.t;
concretize_cond : Dba.Expr.t -> t -> bool;
expr_to_smt :
Dba.Expr.t -> ?apply_cs:bool -> t ->
Formula.bv_term * Formula.bl_term list;
compute : int;
solve_predicate :
Formula.bl_term ->
?print_stat:bool -> ?name:string -> ?push:bool -> ?pop:bool -> ?prek:int ->
?pruning:bool -> ?get_model:bool -> t ->
Formula.status * Smt_model.t * float;
exec : Dba_types.Statement.t -> t -> unit
>
let new_env analysis config ?(cst_pro=false) ?(rebase=false) ?(row=false)
?(row_plus=false) ?(eq_prop=false) (addr_size:int) =
let f = empty_formula ~cst_pro ~rebase ~row ~row_plus ~eq_prop addr_size in
{formula=f; toplevel=true; config; analysis=(analysis :> dse_analysis_sig_t)}
|
16867a9a49ad3304c6a1ffefbf88894f19ea8330baba3e853228f635115c04fe | wireapp/wire-server | Env.hs | # LANGUAGE TemplateHaskell #
-- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Galley.Env where
import Cassandra
import Control.Lens hiding ((.=))
import Data.ByteString.Conversion (toByteString')
import Data.Id
import Data.Metrics.Middleware
import Data.Misc (Fingerprint, Rsa)
import Data.Range
import qualified Galley.Aws as Aws
import Galley.Options
import qualified Galley.Queue as Q
import Imports
import Network.HTTP.Client
import Network.HTTP.Client.OpenSSL
import OpenSSL.EVP.Digest
import OpenSSL.Session as Ssl
import Ssl.Util
import System.Logger
import Util.Options
import Wire.API.MLS.Credential
import Wire.API.MLS.Keys
import Wire.API.Team.Member
data DeleteItem = TeamItem TeamId UserId (Maybe ConnId)
deriving (Eq, Ord, Show)
-- | Main application environment.
data Env = Env
{ _reqId :: RequestId,
_monitor :: Metrics,
_options :: Opts,
_applog :: Logger,
_manager :: Manager,
_federator :: Maybe Endpoint, -- FUTUREWORK: should we use a better type here? E.g. to avoid fresh connections all the time?
_brig :: Endpoint, -- FUTUREWORK: see _federator
_cstate :: ClientState,
_deleteQueue :: Q.Queue DeleteItem,
_extEnv :: ExtEnv,
_aEnv :: Maybe Aws.Env,
_mlsKeys :: SignaturePurpose -> MLSKeys
}
-- | Environment specific to the communication with external
-- service providers.
data ExtEnv = ExtEnv
{ _extGetManager :: (Manager, [Fingerprint Rsa] -> Ssl.SSL -> IO ())
}
makeLenses ''Env
makeLenses ''ExtEnv
TODO : somewhat duplicates Brig . App.initExtGetManager
initExtEnv :: IO ExtEnv
initExtEnv = do
ctx <- Ssl.context
Ssl.contextSetVerificationMode ctx Ssl.VerifyNone
Ssl.contextAddOption ctx SSL_OP_NO_SSLv2
Ssl.contextAddOption ctx SSL_OP_NO_SSLv3
Ssl.contextAddOption ctx SSL_OP_NO_TLSv1
Ssl.contextSetCiphers ctx rsaCiphers
Ssl.contextSetDefaultVerifyPaths ctx
mgr <-
newManager
(opensslManagerSettings (pure ctx))
{ managerResponseTimeout = responseTimeoutMicro 10000000,
managerConnCount = 100
}
Just sha <- getDigestByName "SHA256"
pure $ ExtEnv (mgr, mkVerify sha)
where
mkVerify sha fprs =
let pinset = map toByteString' fprs
in verifyRsaFingerprint sha pinset
reqIdMsg :: RequestId -> Msg -> Msg
reqIdMsg = ("request" .=) . unRequestId
# INLINE reqIdMsg #
currentFanoutLimit :: Opts -> Range 1 HardTruncationLimit Int32
currentFanoutLimit o = do
let optFanoutLimit = fromIntegral . fromRange $ fromMaybe defFanoutLimit (o ^. (optSettings . setMaxFanoutSize))
let maxTeamSize = fromIntegral (o ^. (optSettings . setMaxTeamSize))
unsafeRange (min maxTeamSize optFanoutLimit)
| null | https://raw.githubusercontent.com/wireapp/wire-server/f72b09756102a5c66169cca0343aa7b7e6e54491/services/galley/src/Galley/Env.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>.
| Main application environment.
FUTUREWORK: should we use a better type here? E.g. to avoid fresh connections all the time?
FUTUREWORK: see _federator
| Environment specific to the communication with external
service providers. | # LANGUAGE TemplateHaskell #
Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Galley.Env where
import Cassandra
import Control.Lens hiding ((.=))
import Data.ByteString.Conversion (toByteString')
import Data.Id
import Data.Metrics.Middleware
import Data.Misc (Fingerprint, Rsa)
import Data.Range
import qualified Galley.Aws as Aws
import Galley.Options
import qualified Galley.Queue as Q
import Imports
import Network.HTTP.Client
import Network.HTTP.Client.OpenSSL
import OpenSSL.EVP.Digest
import OpenSSL.Session as Ssl
import Ssl.Util
import System.Logger
import Util.Options
import Wire.API.MLS.Credential
import Wire.API.MLS.Keys
import Wire.API.Team.Member
data DeleteItem = TeamItem TeamId UserId (Maybe ConnId)
deriving (Eq, Ord, Show)
data Env = Env
{ _reqId :: RequestId,
_monitor :: Metrics,
_options :: Opts,
_applog :: Logger,
_manager :: Manager,
_cstate :: ClientState,
_deleteQueue :: Q.Queue DeleteItem,
_extEnv :: ExtEnv,
_aEnv :: Maybe Aws.Env,
_mlsKeys :: SignaturePurpose -> MLSKeys
}
data ExtEnv = ExtEnv
{ _extGetManager :: (Manager, [Fingerprint Rsa] -> Ssl.SSL -> IO ())
}
makeLenses ''Env
makeLenses ''ExtEnv
TODO : somewhat duplicates Brig . App.initExtGetManager
initExtEnv :: IO ExtEnv
initExtEnv = do
ctx <- Ssl.context
Ssl.contextSetVerificationMode ctx Ssl.VerifyNone
Ssl.contextAddOption ctx SSL_OP_NO_SSLv2
Ssl.contextAddOption ctx SSL_OP_NO_SSLv3
Ssl.contextAddOption ctx SSL_OP_NO_TLSv1
Ssl.contextSetCiphers ctx rsaCiphers
Ssl.contextSetDefaultVerifyPaths ctx
mgr <-
newManager
(opensslManagerSettings (pure ctx))
{ managerResponseTimeout = responseTimeoutMicro 10000000,
managerConnCount = 100
}
Just sha <- getDigestByName "SHA256"
pure $ ExtEnv (mgr, mkVerify sha)
where
mkVerify sha fprs =
let pinset = map toByteString' fprs
in verifyRsaFingerprint sha pinset
reqIdMsg :: RequestId -> Msg -> Msg
reqIdMsg = ("request" .=) . unRequestId
# INLINE reqIdMsg #
currentFanoutLimit :: Opts -> Range 1 HardTruncationLimit Int32
currentFanoutLimit o = do
let optFanoutLimit = fromIntegral . fromRange $ fromMaybe defFanoutLimit (o ^. (optSettings . setMaxFanoutSize))
let maxTeamSize = fromIntegral (o ^. (optSettings . setMaxTeamSize))
unsafeRange (min maxTeamSize optFanoutLimit)
|
3a326d1d978e6cde866e80531223f6d3ec38c2a078e28554ba7e4ae3e1927d05 | s3rius/brainbreak | AST.hs | module Compiler.Backends.C.AST where
data CType
= CTypeInt
| CTypeMap CType CType
deriving (Eq, Show)
data CVar
= CVar String CType
| MapElement CVar CVar
deriving (Eq, Show)
data CConst
= CConstString String
| CConstInt Int
| CEmptyMap CType CType
deriving (Eq, Show)
data CValue
= CValue CVar
| CValueConst CConst
deriving (Show, Eq)
data COperation
= CPrint CVar
| CRead CVar
| CDeclare CVar
| CSet CVar CValue
| CAdd CVar CVar CValue
| CDecrease CVar CVar CValue
| CLoop CVar [COperation]
deriving (Eq, Show)
data CHeader
= CInclude String
| CUsingNamespace String
deriving (Eq, Show)
data CModule =
CModule [CHeader] [COperation]
deriving (Eq, Show)
| null | https://raw.githubusercontent.com/s3rius/brainbreak/bee6439ce4f30318980c3d19cc8cfdc3b8eefbce/src/Compiler/Backends/C/AST.hs | haskell | module Compiler.Backends.C.AST where
data CType
= CTypeInt
| CTypeMap CType CType
deriving (Eq, Show)
data CVar
= CVar String CType
| MapElement CVar CVar
deriving (Eq, Show)
data CConst
= CConstString String
| CConstInt Int
| CEmptyMap CType CType
deriving (Eq, Show)
data CValue
= CValue CVar
| CValueConst CConst
deriving (Show, Eq)
data COperation
= CPrint CVar
| CRead CVar
| CDeclare CVar
| CSet CVar CValue
| CAdd CVar CVar CValue
| CDecrease CVar CVar CValue
| CLoop CVar [COperation]
deriving (Eq, Show)
data CHeader
= CInclude String
| CUsingNamespace String
deriving (Eq, Show)
data CModule =
CModule [CHeader] [COperation]
deriving (Eq, Show)
| |
45bf53a5010a9f83864679f858d118664e2198a4067b5058e8a29775b4ad78fe | bennn/dissertation | world.rkt | #lang typed/racket
(require "base-types.rkt")
(require require-typed-check)
(require/typed/check "bset.rkt"
[blocks-union (-> BSet BSet BSet)]
[blocks-max-x (-> BSet Real)]
[blocks-min-x (-> BSet Real)]
[blocks-max-y (-> BSet Real)])
(require/typed/check "tetras.rkt"
[tetra-move (-> Real Real Tetra Tetra)]
[tetra-rotate-ccw (-> Tetra Tetra)]
[tetra-rotate-cw (-> Tetra Tetra)]
[tetra-overlaps-blocks? (-> Tetra BSet Boolean)]
[tetra-change-color (-> Tetra Color Tetra)])
(require/typed/check "aux.rkt"
[list-pick-random (-> (Listof Tetra) Tetra)]
[neg-1 Negative-Fixnum]
[tetras (Listof Tetra)])
(require/typed/check "elim.rkt"
[eliminate-full-rows (-> BSet BSet)])
(require/typed/check "consts.rkt"
[board-height Integer]
[board-width Integer])
(provide world-key-move
next-world
ghost-blocks)
Add the current tetra 's blocks onto the world 's block list ,
;; and create a new tetra.
(: touchdown (-> World World))
(define (touchdown w)
(world (list-pick-random tetras)
(eliminate-full-rows (blocks-union (tetra-blocks (world-tetra w))
(world-blocks w)))))
;; Take the current tetra and move it down until it lands.
(: world-jump-down (-> World World))
(define (world-jump-down w)
(cond [(landed? w) w]
[else (world-jump-down (world (tetra-move 0 1 (world-tetra w))
(world-blocks w)))]))
;; Has the current tetra landed on blocks?
I.e. , if we move the tetra down 1 , will it touch any existing blocks ?
(: landed-on-blocks? (-> World Boolean))
(define (landed-on-blocks? w)
(tetra-overlaps-blocks? (tetra-move 0 1 (world-tetra w))
(world-blocks w)))
;; Has the current tetra landed on the floor?
(: landed-on-floor? (-> World Boolean))
(define (landed-on-floor? w)
(= (blocks-max-y (tetra-blocks (world-tetra w)))
(sub1 board-height)))
;; Has the current tetra landed?
(: landed? (-> World Boolean))
(define (landed? w)
(or (landed-on-blocks? w)
(landed-on-floor? w)))
;; Step the world, either touchdown or move the tetra down on step.
(: next-world (-> World World))
(define (next-world w)
(cond [(landed? w) (touchdown w)]
[else (world (tetra-move 0 1 (world-tetra w))
(world-blocks w))]))
;; Make a world with the new tetra *IF* if doesn't lie on top of some other
;; block or lie off the board. Otherwise, no change.
(: try-new-tetra (-> World Tetra World))
(define (try-new-tetra w new-tetra)
(cond [(or (< (blocks-min-x (tetra-blocks new-tetra)) 0)
(>= (blocks-max-x (tetra-blocks new-tetra)) board-width)
(tetra-overlaps-blocks? new-tetra (world-blocks w)))
w]
[else (world new-tetra (world-blocks w))]))
Move the Tetra by the given X & Y displacement , but only if you can .
;; Otherwise stay put.
(: world-move (-> Real Real World World))
(define (world-move dx dy w)
(try-new-tetra w (tetra-move dx dy (world-tetra w))))
Rotate the Tetra 90 degrees counterclockwise , but only if you can .
;; Otherwise stay put.
(: world-rotate-ccw (-> World World))
(define (world-rotate-ccw w)
(try-new-tetra w (tetra-rotate-ccw (world-tetra w))))
Rotate the Tetra 90 degrees clockwise , but only if you can .
;; Otherwise stay put.
(: world-rotate-cw (-> World World))
(define (world-rotate-cw w)
(try-new-tetra w (tetra-rotate-cw (world-tetra w))))
;; Gray blocks representing where the current tetra would land.
(: ghost-blocks (-> World BSet))
(define (ghost-blocks w)
(tetra-blocks (tetra-change-color (world-tetra (world-jump-down w))
'gray)))
;; Move the world according to the given key event.
(: world-key-move (-> World String World))
(define (world-key-move w k)
(cond [(equal? k "left") (world-move neg-1 0 w)]
[(equal? k "right") (world-move 1 0 w)]
[(equal? k "down") (world-jump-down w)]
[(equal? k "a") (world-rotate-ccw w)]
[(equal? k "s") (world-rotate-cw w)]
[else w]))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/tetris/typed/world.rkt | racket | and create a new tetra.
Take the current tetra and move it down until it lands.
Has the current tetra landed on blocks?
Has the current tetra landed on the floor?
Has the current tetra landed?
Step the world, either touchdown or move the tetra down on step.
Make a world with the new tetra *IF* if doesn't lie on top of some other
block or lie off the board. Otherwise, no change.
Otherwise stay put.
Otherwise stay put.
Otherwise stay put.
Gray blocks representing where the current tetra would land.
Move the world according to the given key event. | #lang typed/racket
(require "base-types.rkt")
(require require-typed-check)
(require/typed/check "bset.rkt"
[blocks-union (-> BSet BSet BSet)]
[blocks-max-x (-> BSet Real)]
[blocks-min-x (-> BSet Real)]
[blocks-max-y (-> BSet Real)])
(require/typed/check "tetras.rkt"
[tetra-move (-> Real Real Tetra Tetra)]
[tetra-rotate-ccw (-> Tetra Tetra)]
[tetra-rotate-cw (-> Tetra Tetra)]
[tetra-overlaps-blocks? (-> Tetra BSet Boolean)]
[tetra-change-color (-> Tetra Color Tetra)])
(require/typed/check "aux.rkt"
[list-pick-random (-> (Listof Tetra) Tetra)]
[neg-1 Negative-Fixnum]
[tetras (Listof Tetra)])
(require/typed/check "elim.rkt"
[eliminate-full-rows (-> BSet BSet)])
(require/typed/check "consts.rkt"
[board-height Integer]
[board-width Integer])
(provide world-key-move
next-world
ghost-blocks)
Add the current tetra 's blocks onto the world 's block list ,
(: touchdown (-> World World))
(define (touchdown w)
(world (list-pick-random tetras)
(eliminate-full-rows (blocks-union (tetra-blocks (world-tetra w))
(world-blocks w)))))
(: world-jump-down (-> World World))
(define (world-jump-down w)
(cond [(landed? w) w]
[else (world-jump-down (world (tetra-move 0 1 (world-tetra w))
(world-blocks w)))]))
I.e. , if we move the tetra down 1 , will it touch any existing blocks ?
(: landed-on-blocks? (-> World Boolean))
(define (landed-on-blocks? w)
(tetra-overlaps-blocks? (tetra-move 0 1 (world-tetra w))
(world-blocks w)))
(: landed-on-floor? (-> World Boolean))
(define (landed-on-floor? w)
(= (blocks-max-y (tetra-blocks (world-tetra w)))
(sub1 board-height)))
(: landed? (-> World Boolean))
(define (landed? w)
(or (landed-on-blocks? w)
(landed-on-floor? w)))
(: next-world (-> World World))
(define (next-world w)
(cond [(landed? w) (touchdown w)]
[else (world (tetra-move 0 1 (world-tetra w))
(world-blocks w))]))
(: try-new-tetra (-> World Tetra World))
(define (try-new-tetra w new-tetra)
(cond [(or (< (blocks-min-x (tetra-blocks new-tetra)) 0)
(>= (blocks-max-x (tetra-blocks new-tetra)) board-width)
(tetra-overlaps-blocks? new-tetra (world-blocks w)))
w]
[else (world new-tetra (world-blocks w))]))
Move the Tetra by the given X & Y displacement , but only if you can .
(: world-move (-> Real Real World World))
(define (world-move dx dy w)
(try-new-tetra w (tetra-move dx dy (world-tetra w))))
Rotate the Tetra 90 degrees counterclockwise , but only if you can .
(: world-rotate-ccw (-> World World))
(define (world-rotate-ccw w)
(try-new-tetra w (tetra-rotate-ccw (world-tetra w))))
Rotate the Tetra 90 degrees clockwise , but only if you can .
(: world-rotate-cw (-> World World))
(define (world-rotate-cw w)
(try-new-tetra w (tetra-rotate-cw (world-tetra w))))
(: ghost-blocks (-> World BSet))
(define (ghost-blocks w)
(tetra-blocks (tetra-change-color (world-tetra (world-jump-down w))
'gray)))
(: world-key-move (-> World String World))
(define (world-key-move w k)
(cond [(equal? k "left") (world-move neg-1 0 w)]
[(equal? k "right") (world-move 1 0 w)]
[(equal? k "down") (world-jump-down w)]
[(equal? k "a") (world-rotate-ccw w)]
[(equal? k "s") (world-rotate-cw w)]
[else w]))
|
781b0ab20e15bbab2aa4d5d4d8872a8b8336ea59f85e3e536b13f6adeb4e7340 | pascal-knodel/haskell-craft | E'12'42.hs | --
--
--
------------------
Exercise 12.42 .
------------------
--
--
--
module E'12'42 where
| null | https://raw.githubusercontent.com/pascal-knodel/haskell-craft/c03d6eb857abd8b4785b6de075b094ec3653c968/_/links/E'12'42.hs | haskell |
----------------
----------------
| Exercise 12.42 .
module E'12'42 where
|
9d3d013de60db6df93c5e31be06fffa75c7b4727a710f1d995f3148a01e862cc | artyom-poptsov/guile-png | PLTE.scm | ;;; PLTE.scm -- PLTE chunk.
Copyright ( C ) 2022 < >
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
PNG image palette ( PLTE ) chunk . The chunk contains from 1 to 256 palette
entries , , each a three - byte series of the form :
;;
Red : 1 byte ( 0 = black , 255 = red )
Green : 1 byte ( 0 = black , 255 = green )
Blue : 1 byte ( 0 = black , 255 = blue )
;;; Code:
(define-module (png core chunk PLTE)
#:use-module (ice-9 format)
#:use-module (rnrs bytevectors)
#:use-module (oop goops)
#:use-module (png core common)
#:use-module (png core chunk)
#:export (<png-chunk:PLTE>
png-chunk:PLTE-palette-entries-count
png-chunk:PLTE-palette-entries
png-chunk:PLTE-palette-entry
palette:red
palette:green
palette:blue
vector->PLTE-palette-entries
png-chunk-decode-PLTE))
(define-class <png-chunk:PLTE> (<png-chunk>)
This field contains a palette entries , each a three - byte vector of the form :
;;
;; #vu8(red green blue)
;;
;; <vector> of <bytevector>
(palette-entries
#:init-thunk (lambda () (make-vector 0))
#:init-keyword #:palette-entries
#:getter png-chunk:PLTE-palette-entries))
(define-method (initialize (chunk <png-chunk:PLTE>) initargs)
(next-method)
(slot-set! chunk 'type 'PLTE))
(define-method (%display (chunk <png-chunk:PLTE>) (port <port>))
(let ((type (png-chunk-type-info chunk)))
(format port "#<png-chunk:PLTE ~a (~a entr~:@p) ~a>"
(list-ref type 2)
(png-chunk:PLTE-palette-entries-count chunk)
(object-address/hex-string chunk))))
(define-method (display (chunk <png-chunk:PLTE>) (port <port>))
(%display chunk port))
(define-method (write (chunk <png-chunk:PLTE>) (port <port>))
(%display chunk port))
(define-method (png-chunk:PLTE-palette-entry (chunk <png-chunk:PLTE>)
(index <number>))
(vector-ref (png-chunk:PLTE-palette-entries chunk) index))
(define-method (png-chunk:PLTE-palette-entries-count (chunk <png-chunk:PLTE>))
(vector-length (png-chunk:PLTE-palette-entries chunk)))
(define-method (palette:red (plte <bytevector>))
(bytevector-u8-ref plte 0))
(define-method (palette:green (plte <bytevector>))
(bytevector-u8-ref plte 1))
(define-method (palette:blue (plte <bytevector>))
(bytevector-u8-ref plte 2))
(define-method (vector->PLTE-palette-entries (vec <bytevector>))
"Return a vector of palette entries, each of which is represented as a
three-byte bytevector of the following format:
Red: 1 byte (0 = black, 255 = red)
Green: 1 byte (0 = black, 255 = green)
Blue: 1 byte (0 = black, 255 = blue)
"
(let ((vlen (bytevector-length vec)))
(let loop ((offset 0)
(result '()))
(if (< offset vlen)
(loop (+ offset 3)
(cons (u8-list->bytevector
(list (bytevector-u8-ref vec (+ 0 offset))
(bytevector-u8-ref vec (+ 1 offset))
(bytevector-u8-ref vec (+ 2 offset))))
result))
(list->vector (reverse result))))))
(define-method (PLTE-palette-entries->vector (vec <vector>))
"Convert a vector VEC to a PLTE chunk data."
(let* ((data-length (vector-length vec))
(result (make-bytevector (* data-length 3) 0)))
(let loop ((index 0))
(if (< index data-length)
(let* ((v (vector-ref vec index))
(r (bytevector-u8-ref v 0))
(g (bytevector-u8-ref v 1))
(b (bytevector-u8-ref v 2)))
(bytevector-u8-set! result (+ (* index 3) 0) r)
(bytevector-u8-set! result (+ (* index 3) 1) g)
(bytevector-u8-set! result (+ (* index 3) 2) b)
(loop (+ index 1)))
result))))
(define-method (png-chunk-decode-PLTE (chunk <png-chunk>))
(let ((length (png-chunk-length chunk))
(type (png-chunk-type chunk))
(data (png-chunk-data chunk))
(crc (png-chunk-crc chunk)))
(unless (zero? (remainder (bytevector-length data) 3))
(error "Invalid PLTE chunk: data length not divisible by 3" data))
(make <png-chunk:PLTE>
#:length length
#:type type
#:data data
#:crc crc
#:palette-entries (vector->PLTE-palette-entries data))))
(define-method (png-chunk-encode (chunk <png-chunk:PLTE>))
(let* ((entries (png-chunk:PLTE-palette-entries chunk))
(count (png-chunk:PLTE-palette-entries-count chunk))
(length (* count 3))
(encoded-chunk (make <png-chunk>
#:type 'PLTE
#:length length
#:data (PLTE-palette-entries->vector entries))))
(png-chunk-crc-update! encoded-chunk)
encoded-chunk))
(define-method (png-chunk-clone (chunk <png-chunk:PLTE>))
(make <png-chunk:PLTE>
#:type (png-chunk-type chunk)
#:data (bytevector-copy (png-chunk-data chunk))
#:length (png-chunk-length chunk)
#:crc (png-chunk-crc chunk)
#:palette-entries (bytevector-copy (png-chunk:PLTE-palette-entries))))
;;; PLTE.scm ends here.
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-png/04368784524d536c07e842d07199816c2b645ef7/modules/png/core/chunk/PLTE.scm | scheme | PLTE.scm -- PLTE chunk.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
Code:
#vu8(red green blue)
<vector> of <bytevector>
PLTE.scm ends here. |
Copyright ( C ) 2022 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
PNG image palette ( PLTE ) chunk . The chunk contains from 1 to 256 palette
entries , , each a three - byte series of the form :
Red : 1 byte ( 0 = black , 255 = red )
Green : 1 byte ( 0 = black , 255 = green )
Blue : 1 byte ( 0 = black , 255 = blue )
(define-module (png core chunk PLTE)
#:use-module (ice-9 format)
#:use-module (rnrs bytevectors)
#:use-module (oop goops)
#:use-module (png core common)
#:use-module (png core chunk)
#:export (<png-chunk:PLTE>
png-chunk:PLTE-palette-entries-count
png-chunk:PLTE-palette-entries
png-chunk:PLTE-palette-entry
palette:red
palette:green
palette:blue
vector->PLTE-palette-entries
png-chunk-decode-PLTE))
(define-class <png-chunk:PLTE> (<png-chunk>)
This field contains a palette entries , each a three - byte vector of the form :
(palette-entries
#:init-thunk (lambda () (make-vector 0))
#:init-keyword #:palette-entries
#:getter png-chunk:PLTE-palette-entries))
(define-method (initialize (chunk <png-chunk:PLTE>) initargs)
(next-method)
(slot-set! chunk 'type 'PLTE))
(define-method (%display (chunk <png-chunk:PLTE>) (port <port>))
(let ((type (png-chunk-type-info chunk)))
(format port "#<png-chunk:PLTE ~a (~a entr~:@p) ~a>"
(list-ref type 2)
(png-chunk:PLTE-palette-entries-count chunk)
(object-address/hex-string chunk))))
(define-method (display (chunk <png-chunk:PLTE>) (port <port>))
(%display chunk port))
(define-method (write (chunk <png-chunk:PLTE>) (port <port>))
(%display chunk port))
(define-method (png-chunk:PLTE-palette-entry (chunk <png-chunk:PLTE>)
(index <number>))
(vector-ref (png-chunk:PLTE-palette-entries chunk) index))
(define-method (png-chunk:PLTE-palette-entries-count (chunk <png-chunk:PLTE>))
(vector-length (png-chunk:PLTE-palette-entries chunk)))
(define-method (palette:red (plte <bytevector>))
(bytevector-u8-ref plte 0))
(define-method (palette:green (plte <bytevector>))
(bytevector-u8-ref plte 1))
(define-method (palette:blue (plte <bytevector>))
(bytevector-u8-ref plte 2))
(define-method (vector->PLTE-palette-entries (vec <bytevector>))
"Return a vector of palette entries, each of which is represented as a
three-byte bytevector of the following format:
Red: 1 byte (0 = black, 255 = red)
Green: 1 byte (0 = black, 255 = green)
Blue: 1 byte (0 = black, 255 = blue)
"
(let ((vlen (bytevector-length vec)))
(let loop ((offset 0)
(result '()))
(if (< offset vlen)
(loop (+ offset 3)
(cons (u8-list->bytevector
(list (bytevector-u8-ref vec (+ 0 offset))
(bytevector-u8-ref vec (+ 1 offset))
(bytevector-u8-ref vec (+ 2 offset))))
result))
(list->vector (reverse result))))))
(define-method (PLTE-palette-entries->vector (vec <vector>))
"Convert a vector VEC to a PLTE chunk data."
(let* ((data-length (vector-length vec))
(result (make-bytevector (* data-length 3) 0)))
(let loop ((index 0))
(if (< index data-length)
(let* ((v (vector-ref vec index))
(r (bytevector-u8-ref v 0))
(g (bytevector-u8-ref v 1))
(b (bytevector-u8-ref v 2)))
(bytevector-u8-set! result (+ (* index 3) 0) r)
(bytevector-u8-set! result (+ (* index 3) 1) g)
(bytevector-u8-set! result (+ (* index 3) 2) b)
(loop (+ index 1)))
result))))
(define-method (png-chunk-decode-PLTE (chunk <png-chunk>))
(let ((length (png-chunk-length chunk))
(type (png-chunk-type chunk))
(data (png-chunk-data chunk))
(crc (png-chunk-crc chunk)))
(unless (zero? (remainder (bytevector-length data) 3))
(error "Invalid PLTE chunk: data length not divisible by 3" data))
(make <png-chunk:PLTE>
#:length length
#:type type
#:data data
#:crc crc
#:palette-entries (vector->PLTE-palette-entries data))))
(define-method (png-chunk-encode (chunk <png-chunk:PLTE>))
(let* ((entries (png-chunk:PLTE-palette-entries chunk))
(count (png-chunk:PLTE-palette-entries-count chunk))
(length (* count 3))
(encoded-chunk (make <png-chunk>
#:type 'PLTE
#:length length
#:data (PLTE-palette-entries->vector entries))))
(png-chunk-crc-update! encoded-chunk)
encoded-chunk))
(define-method (png-chunk-clone (chunk <png-chunk:PLTE>))
(make <png-chunk:PLTE>
#:type (png-chunk-type chunk)
#:data (bytevector-copy (png-chunk-data chunk))
#:length (png-chunk-length chunk)
#:crc (png-chunk-crc chunk)
#:palette-entries (bytevector-copy (png-chunk:PLTE-palette-entries))))
|
88e687cd9c2e7a9885fd2de51283304f4d98fb8fe3830d7b929328cb2947d085 | GumTreeDiff/cgum | flag_cocci.ml |
* Copyright 2014 , INRIA
*
* This file is part of Cgen . Much of it comes from Coccinelle , which is
* also available under the GPLv2 license
*
* Cgen is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , according to version 2 of the License .
*
* Cgen is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with Cgen . If not , see < / > .
*
* The authors reserve the right to distribute this or future versions of
* Cgen under other licenses .
* Copyright 2014, INRIA
* Julia Lawall
* This file is part of Cgen. Much of it comes from Coccinelle, which is
* also available under the GPLv2 license
*
* Cgen is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, according to version 2 of the License.
*
* Cgen is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cgen. If not, see </>.
*
* The authors reserve the right to distribute this or future versions of
* Cgen under other licenses.
*)
# 0 "./flag_cocci.ml"
* Copyright 2013 , * Suman , ,
* This file is part of .
*
* Cgum is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , according to version 2 of the License .
*
* is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with . If not , see < / > .
*
* The authors reserve the right to distribute this or future versions of
* Cgum under other licenses .
* Copyright 2013, Inria
* Suman Saha, Julia Lawall, Gilles Muller
* This file is part of Cgum.
*
* Cgum is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, according to version 2 of the License.
*
* Cgum is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cgum. If not, see </>.
*
* The authors reserve the right to distribute this or future versions of
* Cgum under other licenses.
*)
(* the inputs *)
let show_c = ref false
let show_cocci = ref false
(* the output *)
let show_diff = ref true
(* the derived inputs *)
let show_flow = ref false
let show_before_fixed_flow = ref false
let show_ctl_tex = ref false
let show_ctl_text = ref false
let inline_let_ctl = ref false
let show_mcodekind_in_ctl = ref false
(* the "underived" outputs *)
let show_binding_in_out = ref false
let show_dependencies = ref false
let verbose_cocci = ref true
let windows = ref false
let popl = ref false
let ifdef_to_if = ref true(*false*)
type include_options =
I_UNSPECIFIED | I_NO_INCLUDES | I_NORMAL_INCLUDES | I_ALL_INCLUDES
let include_options = ref I_UNSPECIFIED
let include_path = ref (None : string option)
(* if true then when have a #include "../../xx.h", we look also for xx.h in
* current directory. This is because of how works extract_c_and_res
*)
let relax_include_path = ref false
let timeout = ref (None : int option)
let worth_trying_opt = ref true
| null | https://raw.githubusercontent.com/GumTreeDiff/cgum/8521aa80fcf4873a19e60ce8c846c886aaefb41b/flag_cocci.ml | ocaml | the inputs
the output
the derived inputs
the "underived" outputs
false
if true then when have a #include "../../xx.h", we look also for xx.h in
* current directory. This is because of how works extract_c_and_res
|
* Copyright 2014 , INRIA
*
* This file is part of Cgen . Much of it comes from Coccinelle , which is
* also available under the GPLv2 license
*
* Cgen is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , according to version 2 of the License .
*
* Cgen is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with Cgen . If not , see < / > .
*
* The authors reserve the right to distribute this or future versions of
* Cgen under other licenses .
* Copyright 2014, INRIA
* Julia Lawall
* This file is part of Cgen. Much of it comes from Coccinelle, which is
* also available under the GPLv2 license
*
* Cgen is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, according to version 2 of the License.
*
* Cgen is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cgen. If not, see </>.
*
* The authors reserve the right to distribute this or future versions of
* Cgen under other licenses.
*)
# 0 "./flag_cocci.ml"
* Copyright 2013 , * Suman , ,
* This file is part of .
*
* Cgum is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , according to version 2 of the License .
*
* is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with . If not , see < / > .
*
* The authors reserve the right to distribute this or future versions of
* Cgum under other licenses .
* Copyright 2013, Inria
* Suman Saha, Julia Lawall, Gilles Muller
* This file is part of Cgum.
*
* Cgum is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, according to version 2 of the License.
*
* Cgum is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cgum. If not, see </>.
*
* The authors reserve the right to distribute this or future versions of
* Cgum under other licenses.
*)
let show_c = ref false
let show_cocci = ref false
let show_diff = ref true
let show_flow = ref false
let show_before_fixed_flow = ref false
let show_ctl_tex = ref false
let show_ctl_text = ref false
let inline_let_ctl = ref false
let show_mcodekind_in_ctl = ref false
let show_binding_in_out = ref false
let show_dependencies = ref false
let verbose_cocci = ref true
let windows = ref false
let popl = ref false
type include_options =
I_UNSPECIFIED | I_NO_INCLUDES | I_NORMAL_INCLUDES | I_ALL_INCLUDES
let include_options = ref I_UNSPECIFIED
let include_path = ref (None : string option)
let relax_include_path = ref false
let timeout = ref (None : int option)
let worth_trying_opt = ref true
|
7d90a2f0429baeec8d73d646fd87f10d930ce57d680102d289558d465f432a51 | FranklinChen/hugs98-plus-Sep2006 | IO.hs | # OPTIONS_GHC -fno - implicit - prelude #
-----------------------------------------------------------------------------
-- |
-- Module : System.IO
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
The standard IO library .
--
-----------------------------------------------------------------------------
module System.IO (
* The IO monad
instance MonadFix
fixIO, -- :: (a -> IO a) -> IO a
-- * Files and handles
FilePath, -- :: String
Handle, -- abstract, instance of: Eq, Show.
-- ** Standard handles
-- | Three handles are allocated during program initialisation,
-- and are initially open.
stdin, stdout, stderr, -- :: Handle
-- * Opening and closing files
-- ** Opening files
openFile, -- :: FilePath -> IOMode -> IO Handle
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
-- ** Closing files
hClose, -- :: Handle -> IO ()
-- ** Special cases
| These functions are also exported by the " Prelude " .
readFile, -- :: FilePath -> IO String
writeFile, -- :: FilePath -> String -> IO ()
appendFile, -- :: FilePath -> String -> IO ()
-- ** File locking
-- $locking
-- * Operations on handles
-- ** Determining and changing the size of a file
hFileSize, -- :: Handle -> IO Integer
#ifdef __GLASGOW_HASKELL__
hSetFileSize, -- :: Handle -> Integer -> IO ()
#endif
-- ** Detecting the end of input
hIsEOF, -- :: Handle -> IO Bool
isEOF, -- :: IO Bool
-- ** Buffering operations
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
: : Handle - > BufferMode - > IO ( )
: : Handle - > IO BufferMode
hFlush, -- :: Handle -> IO ()
-- ** Repositioning handles
hGetPosn, -- :: Handle -> IO HandlePosn
hSetPosn, -- :: HandlePosn -> IO ()
HandlePosn, -- abstract, instance of: Eq, Show.
hSeek, -- :: Handle -> SeekMode -> Integer -> IO ()
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
#if !defined(__NHC__)
hTell, -- :: Handle -> IO Integer
#endif
-- ** Handle properties
hIsOpen, hIsClosed, -- :: Handle -> IO Bool
hIsReadable, hIsWritable, -- :: Handle -> IO Bool
hIsSeekable, -- :: Handle -> IO Bool
-- ** Terminal operations
#if !defined(__NHC__)
hIsTerminalDevice, -- :: Handle -> IO Bool
hSetEcho, -- :: Handle -> Bool -> IO ()
hGetEcho, -- :: Handle -> IO Bool
#endif
-- ** Showing handle state
#ifdef __GLASGOW_HASKELL__
hShow, -- :: Handle -> IO String
#endif
-- * Text input and output
-- ** Text input
hWaitForInput, -- :: Handle -> Int -> IO Bool
hReady, -- :: Handle -> IO Bool
: : Handle - > IO
hGetLine, -- :: Handle -> IO [Char]
: : Handle - > IO
hGetContents, -- :: Handle -> IO [Char]
-- ** Text output
hPutChar, -- :: Handle -> Char -> IO ()
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
hPrint, -- :: Show a => Handle -> a -> IO ()
-- ** Special cases for standard input and output
| These functions are also exported by the " Prelude " .
interact, -- :: (String -> String) -> IO ()
: : IO ( )
putStr, -- :: String -> IO ()
putStrLn, -- :: String -> IO ()
print, -- :: Show a => a -> IO ()
: : IO
getLine, -- :: IO String
getContents, -- :: IO String
readIO, -- :: Read a => String -> IO a
readLn, -- :: Read a => IO a
-- * Binary input and output
openBinaryFile, -- :: FilePath -> IOMode -> IO Handle
hSetBinaryMode, -- :: Handle -> Bool -> IO ()
#if !defined(__NHC__)
hPutBuf, -- :: Handle -> Ptr a -> Int -> IO ()
hGetBuf, -- :: Handle -> Ptr a -> Int -> IO Int
#endif
#if !defined(__NHC__) && !defined(__HUGS__)
hPutBufNonBlocking, -- :: Handle -> Ptr a -> Int -> IO Int
hGetBufNonBlocking, -- :: Handle -> Ptr a -> Int -> IO Int
#endif
-- * Temporary files
#ifdef __GLASGOW_HASKELL__
openTempFile,
openBinaryTempFile,
#endif
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
Together these four Prelude modules define
all the stuff exported by IO for the GHC version
import GHC.IO
import GHC.Exception
import GHC.Num
import GHC.Read
import GHC.Show
#endif
#ifdef __HUGS__
import Hugs.IO
import Hugs.IOExts
import Hugs.IORef
import Hugs.Prelude ( throw, Exception(NonTermination) )
import System.IO.Unsafe ( unsafeInterleaveIO )
#endif
#ifdef __NHC__
import IO
( Handle ()
, HandlePosn ()
, IOMode (ReadMode,WriteMode,AppendMode,ReadWriteMode)
, BufferMode (NoBuffering,LineBuffering,BlockBuffering)
, SeekMode (AbsoluteSeek,RelativeSeek,SeekFromEnd)
, stdin, stdout, stderr
, openFile -- :: FilePath -> IOMode -> IO Handle
, hClose -- :: Handle -> IO ()
, hFileSize -- :: Handle -> IO Integer
, hIsEOF -- :: Handle -> IO Bool
, isEOF -- :: IO Bool
: : Handle - > BufferMode - > IO ( )
: : Handle - > IO BufferMode
, hFlush -- :: Handle -> IO ()
, hGetPosn -- :: Handle -> IO HandlePosn
, hSetPosn -- :: HandlePosn -> IO ()
, hSeek -- :: Handle -> SeekMode -> Integer -> IO ()
, hWaitForInput -- :: Handle -> Int -> IO Bool
: : Handle - > IO
, hGetLine -- :: Handle -> IO [Char]
: : Handle - > IO
, hGetContents -- :: Handle -> IO [Char]
, hPutChar -- :: Handle -> Char -> IO ()
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
, hIsOpen, hIsClosed -- :: Handle -> IO Bool
, hIsReadable, hIsWritable -- :: Handle -> IO Bool
, hIsSeekable -- :: Handle -> IO Bool
, IO ()
, FilePath -- :: String
)
import NHC.IOExtras (fixIO)
#endif
-- -----------------------------------------------------------------------------
-- Standard IO
#ifdef __GLASGOW_HASKELL__
-- | Write a character to the standard output device
-- (same as 'hPutChar' 'stdout').
putChar :: Char -> IO ()
putChar c = hPutChar stdout c
-- | Write a string to the standard output device
-- (same as 'hPutStr' 'stdout').
putStr :: String -> IO ()
putStr s = hPutStr stdout s
-- | The same as 'putStr', but adds a newline character.
putStrLn :: String -> IO ()
putStrLn s = do putStr s
putChar '\n'
-- | The 'print' function outputs a value of any printable type to the
-- standard output device.
-- Printable types are those that are instances of class 'Show'; 'print'
-- converts values to strings for output using the 'show' operation and
-- adds a newline.
--
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
--
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
print :: Show a => a -> IO ()
print x = putStrLn (show x)
-- | Read a character from the standard input device
( same as ' hGetChar ' ' stdin ' ) .
getChar :: IO Char
getChar = hGetChar stdin
-- | Read a line from the standard input device
( same as ' hGetLine ' ' stdin ' ) .
getLine :: IO String
getLine = hGetLine stdin
-- | The 'getContents' operation returns all user input as a single string,
-- which is read lazily as it is needed
( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents stdin
-- | The 'interact' function takes a function of type @String->String@
-- as its argument. The entire input from the standard input device is
-- passed to this function as its argument, and the resulting string is
-- output on the standard output device.
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
-- | The 'readFile' function reads a file and
-- returns the contents of the file as a string.
The file is read lazily , on demand , as with ' ' .
readFile :: FilePath -> IO String
readFile name = openFile name ReadMode >>= hGetContents
-- | The computation 'writeFile' @file str@ function writes the string @str@,
-- to the file @file@.
writeFile :: FilePath -> String -> IO ()
writeFile f txt = bracket (openFile f WriteMode) hClose
(\hdl -> hPutStr hdl txt)
-- | The computation 'appendFile' @file str@ function appends the string @str@,
-- to the file @file@.
--
-- Note that 'writeFile' and 'appendFile' write a literal string
-- to a file. To write a value of any printable type, as with 'print',
use the ' show ' function to convert the value to a string first .
--
> main = appendFile " squares " ( show [ ( x , x*x ) | x < - [ 0,0.1 .. 2 ] ] )
appendFile :: FilePath -> String -> IO ()
appendFile f txt = bracket (openFile f AppendMode) hClose
(\hdl -> hPutStr hdl txt)
-- | The 'readLn' function combines 'getLine' and 'readIO'.
readLn :: Read a => IO a
readLn = do l <- getLine
r <- readIO l
return r
-- | The 'readIO' function is similar to 'read' except that it signals
-- parse failure to the 'IO' monad instead of terminating the program.
readIO :: Read a => String -> IO a
readIO s = case (do { (x,t) <- reads s ;
("","") <- lex t ;
return x }) of
[x] -> return x
[] -> ioError (userError "Prelude.readIO: no parse")
_ -> ioError (userError "Prelude.readIO: ambiguous parse")
#endif /* __GLASGOW_HASKELL__ */
#ifndef __NHC__
| Computation ' hReady ' @hdl@ indicates whether at least one item is
-- available for input from handle @hdl@.
--
-- This operation may fail with:
--
-- * 'System.IO.Error.isEOFError' if the end of file has been reached.
hReady :: Handle -> IO Bool
hReady h = hWaitForInput h 0
-- | The same as 'hPutStr', but adds a newline character.
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn hndl str = do
hPutStr hndl str
hPutChar hndl '\n'
-- | Computation 'hPrint' @hdl t@ writes the string representation of @t@
given by the ' shows ' function to the file or channel managed by @hdl@
-- and appends a newline.
--
-- This operation may fail with:
--
* ' System . ' if the device is full ; or
--
-- * 'System.IO.Error.isPermissionError' if another system resource limit would be exceeded.
hPrint :: Show a => Handle -> a -> IO ()
hPrint hdl = hPutStrLn hdl . show
#endif /* !__NHC__ */
-- ---------------------------------------------------------------------------
-- fixIO
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
fixIO :: (a -> IO a) -> IO a
fixIO k = do
ref <- newIORef (throw NonTermination)
ans <- unsafeInterleaveIO (readIORef ref)
result <- k ans
writeIORef ref result
return result
NOTE : we do our own explicit black holing here , because GHC 's lazy
blackholing is n't enough . In an infinite loop , GHC may run the IO
-- computation a few times before it notices the loop, which is wrong.
#endif
#if defined(__NHC__)
-- Assume a unix platform, where text and binary I/O are identical.
openBinaryFile = openFile
hSetBinaryMode _ _ = return ()
#endif
-- $locking
-- Implementations should enforce as far as possible, at least locally to the
Haskell process , multiple - reader single - writer locking on files .
-- That is, /there may either be many handles on the same file which manage
-- input, or just one handle on the file which manages output/. If any
-- open or semi-closed handle is managing a file for output, no new
-- handle can be allocated for that file. If any open or semi-closed
-- handle is managing a file for input, new handles can only be allocated
if they do not manage output . Whether two files are the same is
-- implementation-dependent, but they should normally be the same if they
-- have the same absolute path name and neither has been renamed, for
-- example.
--
-- /Warning/: the 'readFile' operation holds a semi-closed handle on
-- the file until the entire contents of the file have been consumed.
-- It follows that an attempt to write to a file (using 'writeFile', for
-- example) that was earlier opened by 'readFile' will usually result in
failure with ' System . IO.Error.isAlreadyInUseError ' .
-- -----------------------------------------------------------------------------
Utils
#ifdef __GLASGOW_HASKELL__
Copied here to avoid recursive dependency with Control . Exception
bracket
^ computation to run first ( \"acquire resource\ " )
-> (a -> IO b) -- ^ computation to run last (\"release resource\")
-> (a -> IO c) -- ^ computation to run in-between
-> IO c -- returns the value from the in-between computation
bracket before after thing =
block (do
a <- before
r <- catchException
(unblock (thing a))
(\e -> do { after a; throw e })
after a
return r
)
#endif
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/base/System/IO.hs | haskell | ---------------------------------------------------------------------------
|
Module : System.IO
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
---------------------------------------------------------------------------
:: (a -> IO a) -> IO a
* Files and handles
:: String
abstract, instance of: Eq, Show.
** Standard handles
| Three handles are allocated during program initialisation,
and are initially open.
:: Handle
* Opening and closing files
** Opening files
:: FilePath -> IOMode -> IO Handle
** Closing files
:: Handle -> IO ()
** Special cases
:: FilePath -> IO String
:: FilePath -> String -> IO ()
:: FilePath -> String -> IO ()
** File locking
$locking
* Operations on handles
** Determining and changing the size of a file
:: Handle -> IO Integer
:: Handle -> Integer -> IO ()
** Detecting the end of input
:: Handle -> IO Bool
:: IO Bool
** Buffering operations
:: Handle -> IO ()
** Repositioning handles
:: Handle -> IO HandlePosn
:: HandlePosn -> IO ()
abstract, instance of: Eq, Show.
:: Handle -> SeekMode -> Integer -> IO ()
:: Handle -> IO Integer
** Handle properties
:: Handle -> IO Bool
:: Handle -> IO Bool
:: Handle -> IO Bool
** Terminal operations
:: Handle -> IO Bool
:: Handle -> Bool -> IO ()
:: Handle -> IO Bool
** Showing handle state
:: Handle -> IO String
* Text input and output
** Text input
:: Handle -> Int -> IO Bool
:: Handle -> IO Bool
:: Handle -> IO [Char]
:: Handle -> IO [Char]
** Text output
:: Handle -> Char -> IO ()
:: Show a => Handle -> a -> IO ()
** Special cases for standard input and output
:: (String -> String) -> IO ()
:: String -> IO ()
:: String -> IO ()
:: Show a => a -> IO ()
:: IO String
:: IO String
:: Read a => String -> IO a
:: Read a => IO a
* Binary input and output
:: FilePath -> IOMode -> IO Handle
:: Handle -> Bool -> IO ()
:: Handle -> Ptr a -> Int -> IO ()
:: Handle -> Ptr a -> Int -> IO Int
:: Handle -> Ptr a -> Int -> IO Int
:: Handle -> Ptr a -> Int -> IO Int
* Temporary files
:: FilePath -> IOMode -> IO Handle
:: Handle -> IO ()
:: Handle -> IO Integer
:: Handle -> IO Bool
:: IO Bool
:: Handle -> IO ()
:: Handle -> IO HandlePosn
:: HandlePosn -> IO ()
:: Handle -> SeekMode -> Integer -> IO ()
:: Handle -> Int -> IO Bool
:: Handle -> IO [Char]
:: Handle -> IO [Char]
:: Handle -> Char -> IO ()
:: Handle -> IO Bool
:: Handle -> IO Bool
:: Handle -> IO Bool
:: String
-----------------------------------------------------------------------------
Standard IO
| Write a character to the standard output device
(same as 'hPutChar' 'stdout').
| Write a string to the standard output device
(same as 'hPutStr' 'stdout').
| The same as 'putStr', but adds a newline character.
| The 'print' function outputs a value of any printable type to the
standard output device.
Printable types are those that are instances of class 'Show'; 'print'
converts values to strings for output using the 'show' operation and
adds a newline.
| Read a character from the standard input device
| Read a line from the standard input device
| The 'getContents' operation returns all user input as a single string,
which is read lazily as it is needed
| The 'interact' function takes a function of type @String->String@
as its argument. The entire input from the standard input device is
passed to this function as its argument, and the resulting string is
output on the standard output device.
| The 'readFile' function reads a file and
returns the contents of the file as a string.
| The computation 'writeFile' @file str@ function writes the string @str@,
to the file @file@.
| The computation 'appendFile' @file str@ function appends the string @str@,
to the file @file@.
Note that 'writeFile' and 'appendFile' write a literal string
to a file. To write a value of any printable type, as with 'print',
| The 'readLn' function combines 'getLine' and 'readIO'.
| The 'readIO' function is similar to 'read' except that it signals
parse failure to the 'IO' monad instead of terminating the program.
available for input from handle @hdl@.
This operation may fail with:
* 'System.IO.Error.isEOFError' if the end of file has been reached.
| The same as 'hPutStr', but adds a newline character.
| Computation 'hPrint' @hdl t@ writes the string representation of @t@
and appends a newline.
This operation may fail with:
* 'System.IO.Error.isPermissionError' if another system resource limit would be exceeded.
---------------------------------------------------------------------------
fixIO
computation a few times before it notices the loop, which is wrong.
Assume a unix platform, where text and binary I/O are identical.
$locking
Implementations should enforce as far as possible, at least locally to the
That is, /there may either be many handles on the same file which manage
input, or just one handle on the file which manages output/. If any
open or semi-closed handle is managing a file for output, no new
handle can be allocated for that file. If any open or semi-closed
handle is managing a file for input, new handles can only be allocated
implementation-dependent, but they should normally be the same if they
have the same absolute path name and neither has been renamed, for
example.
/Warning/: the 'readFile' operation holds a semi-closed handle on
the file until the entire contents of the file have been consumed.
It follows that an attempt to write to a file (using 'writeFile', for
example) that was earlier opened by 'readFile' will usually result in
-----------------------------------------------------------------------------
^ computation to run last (\"release resource\")
^ computation to run in-between
returns the value from the in-between computation | # OPTIONS_GHC -fno - implicit - prelude #
Copyright : ( c ) The University of Glasgow 2001
The standard IO library .
module System.IO (
* The IO monad
instance MonadFix
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
| These functions are also exported by the " Prelude " .
#ifdef __GLASGOW_HASKELL__
#endif
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
: : Handle - > BufferMode - > IO ( )
: : Handle - > IO BufferMode
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
#if !defined(__NHC__)
#endif
#if !defined(__NHC__)
#endif
#ifdef __GLASGOW_HASKELL__
#endif
: : Handle - > IO
: : Handle - > IO
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
| These functions are also exported by the " Prelude " .
: : IO ( )
: : IO
#if !defined(__NHC__)
#endif
#if !defined(__NHC__) && !defined(__HUGS__)
#endif
#ifdef __GLASGOW_HASKELL__
openTempFile,
openBinaryTempFile,
#endif
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
Together these four Prelude modules define
all the stuff exported by IO for the GHC version
import GHC.IO
import GHC.Exception
import GHC.Num
import GHC.Read
import GHC.Show
#endif
#ifdef __HUGS__
import Hugs.IO
import Hugs.IOExts
import Hugs.IORef
import Hugs.Prelude ( throw, Exception(NonTermination) )
import System.IO.Unsafe ( unsafeInterleaveIO )
#endif
#ifdef __NHC__
import IO
( Handle ()
, HandlePosn ()
, IOMode (ReadMode,WriteMode,AppendMode,ReadWriteMode)
, BufferMode (NoBuffering,LineBuffering,BlockBuffering)
, SeekMode (AbsoluteSeek,RelativeSeek,SeekFromEnd)
, stdin, stdout, stderr
: : Handle - > BufferMode - > IO ( )
: : Handle - > IO BufferMode
: : Handle - > IO
: : Handle - > IO
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
: : Handle - > [ IO ( )
, IO ()
)
import NHC.IOExtras (fixIO)
#endif
#ifdef __GLASGOW_HASKELL__
putChar :: Char -> IO ()
putChar c = hPutChar stdout c
putStr :: String -> IO ()
putStr s = hPutStr stdout s
putStrLn :: String -> IO ()
putStrLn s = do putStr s
putChar '\n'
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
print :: Show a => a -> IO ()
print x = putStrLn (show x)
( same as ' hGetChar ' ' stdin ' ) .
getChar :: IO Char
getChar = hGetChar stdin
( same as ' hGetLine ' ' stdin ' ) .
getLine :: IO String
getLine = hGetLine stdin
( same as ' hGetContents ' ' stdin ' ) .
getContents :: IO String
getContents = hGetContents stdin
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
The file is read lazily , on demand , as with ' ' .
readFile :: FilePath -> IO String
readFile name = openFile name ReadMode >>= hGetContents
writeFile :: FilePath -> String -> IO ()
writeFile f txt = bracket (openFile f WriteMode) hClose
(\hdl -> hPutStr hdl txt)
use the ' show ' function to convert the value to a string first .
> main = appendFile " squares " ( show [ ( x , x*x ) | x < - [ 0,0.1 .. 2 ] ] )
appendFile :: FilePath -> String -> IO ()
appendFile f txt = bracket (openFile f AppendMode) hClose
(\hdl -> hPutStr hdl txt)
readLn :: Read a => IO a
readLn = do l <- getLine
r <- readIO l
return r
readIO :: Read a => String -> IO a
readIO s = case (do { (x,t) <- reads s ;
("","") <- lex t ;
return x }) of
[x] -> return x
[] -> ioError (userError "Prelude.readIO: no parse")
_ -> ioError (userError "Prelude.readIO: ambiguous parse")
#endif /* __GLASGOW_HASKELL__ */
#ifndef __NHC__
| Computation ' hReady ' @hdl@ indicates whether at least one item is
hReady :: Handle -> IO Bool
hReady h = hWaitForInput h 0
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn hndl str = do
hPutStr hndl str
hPutChar hndl '\n'
given by the ' shows ' function to the file or channel managed by @hdl@
* ' System . ' if the device is full ; or
hPrint :: Show a => Handle -> a -> IO ()
hPrint hdl = hPutStrLn hdl . show
#endif /* !__NHC__ */
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
fixIO :: (a -> IO a) -> IO a
fixIO k = do
ref <- newIORef (throw NonTermination)
ans <- unsafeInterleaveIO (readIORef ref)
result <- k ans
writeIORef ref result
return result
NOTE : we do our own explicit black holing here , because GHC 's lazy
blackholing is n't enough . In an infinite loop , GHC may run the IO
#endif
#if defined(__NHC__)
openBinaryFile = openFile
hSetBinaryMode _ _ = return ()
#endif
Haskell process , multiple - reader single - writer locking on files .
if they do not manage output . Whether two files are the same is
failure with ' System . IO.Error.isAlreadyInUseError ' .
Utils
#ifdef __GLASGOW_HASKELL__
Copied here to avoid recursive dependency with Control . Exception
bracket
^ computation to run first ( \"acquire resource\ " )
bracket before after thing =
block (do
a <- before
r <- catchException
(unblock (thing a))
(\e -> do { after a; throw e })
after a
return r
)
#endif
|
fbfc8be8fd0a46a40c959553f4b6284b0d46c1361a44ff547b0b38a7d142d4da | input-output-hk/cardano-transactions | Encoding.hs | # LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
{-# OPTIONS_HADDOCK prune #-}
module Codec.Binary.Encoding
(
-- * Converting From Bases
fromBase16
, fromBech32
, fromBase58
, fromBase64
-- * Conversion To Bases
, base16
, bech32
, base58
, base64
) where
import Codec.Binary.Bech32
( HumanReadablePart )
import Data.ByteArray.Encoding
( Base (..), convertFromBase, convertToBase )
import Data.ByteString
( ByteString )
import Data.ByteString.Base58
( bitcoinAlphabet, decodeBase58, encodeBase58 )
import Data.Either.Extra
( eitherToMaybe )
import Data.Text
( Text )
import qualified Codec.Binary.Bech32 as Bech32
import qualified Data.Text.Encoding as T
| Convert a base16 encoded ' Text ' into a raw ' ByteString '
--
-- @since 2.0.0
fromBase16 :: Text -> Maybe ByteString
fromBase16 = eitherToMaybe . convertFromBase Base16 . T.encodeUtf8
| Convert a raw ' ByteString ' into base16 encoded ' Text '
--
-- @since 2.0.0
base16 :: ByteString -> Text
base16 = T.decodeUtf8 . convertToBase Base16
| Convert a Bech32 encoded ' Text ' into a raw ' ByteString '
--
-- @since 2.0.0
fromBech32 :: Text -> Maybe ByteString
fromBech32 txt = do
(_, dp) <- either (const Nothing) Just (Bech32.decodeLenient txt)
Bech32.dataPartToBytes dp
| Convert a raw ' ByteString ' into a bech32 encoded ' Text '
--
-- @since 2.0.0
bech32 :: HumanReadablePart -> ByteString -> Text
bech32 hrp bytes =
Bech32.encodeLenient hrp (Bech32.dataPartFromBytes bytes)
| Convert a base58 encoded ' Text ' into a raw ' ByteString '
--
-- @since 2.0.0
fromBase58 :: Text -> Maybe ByteString
fromBase58 = decodeBase58 bitcoinAlphabet . T.encodeUtf8
| Convert a raw ' ByteString ' into a base58 encoded ' Text '
base58 :: ByteString -> Text
base58 = T.decodeUtf8 . encodeBase58 bitcoinAlphabet
| Convert a base64 encoded ' Text ' into a raw ' ByteString '
--
-- @since 2.0.0
fromBase64 :: Text -> Maybe ByteString
fromBase64 = eitherToMaybe . convertFromBase Base64 . T.encodeUtf8
| Convert a raw ' ByteString ' into a base64 encoded ' Text ' , with padding .
--
-- @since 2.0.0
base64 :: ByteString -> Text
base64 = T.decodeUtf8 . convertToBase Base64
| null | https://raw.githubusercontent.com/input-output-hk/cardano-transactions/efc3914f950438b70ebe20919bf6c0ecf93b3bbc/shared/Codec/Binary/Encoding.hs | haskell | # OPTIONS_HADDOCK prune #
* Converting From Bases
* Conversion To Bases
@since 2.0.0
@since 2.0.0
@since 2.0.0
@since 2.0.0
@since 2.0.0
@since 2.0.0
@since 2.0.0 | # LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Codec.Binary.Encoding
(
fromBase16
, fromBech32
, fromBase58
, fromBase64
, base16
, bech32
, base58
, base64
) where
import Codec.Binary.Bech32
( HumanReadablePart )
import Data.ByteArray.Encoding
( Base (..), convertFromBase, convertToBase )
import Data.ByteString
( ByteString )
import Data.ByteString.Base58
( bitcoinAlphabet, decodeBase58, encodeBase58 )
import Data.Either.Extra
( eitherToMaybe )
import Data.Text
( Text )
import qualified Codec.Binary.Bech32 as Bech32
import qualified Data.Text.Encoding as T
| Convert a base16 encoded ' Text ' into a raw ' ByteString '
fromBase16 :: Text -> Maybe ByteString
fromBase16 = eitherToMaybe . convertFromBase Base16 . T.encodeUtf8
| Convert a raw ' ByteString ' into base16 encoded ' Text '
base16 :: ByteString -> Text
base16 = T.decodeUtf8 . convertToBase Base16
| Convert a Bech32 encoded ' Text ' into a raw ' ByteString '
fromBech32 :: Text -> Maybe ByteString
fromBech32 txt = do
(_, dp) <- either (const Nothing) Just (Bech32.decodeLenient txt)
Bech32.dataPartToBytes dp
| Convert a raw ' ByteString ' into a bech32 encoded ' Text '
bech32 :: HumanReadablePart -> ByteString -> Text
bech32 hrp bytes =
Bech32.encodeLenient hrp (Bech32.dataPartFromBytes bytes)
| Convert a base58 encoded ' Text ' into a raw ' ByteString '
fromBase58 :: Text -> Maybe ByteString
fromBase58 = decodeBase58 bitcoinAlphabet . T.encodeUtf8
| Convert a raw ' ByteString ' into a base58 encoded ' Text '
base58 :: ByteString -> Text
base58 = T.decodeUtf8 . encodeBase58 bitcoinAlphabet
| Convert a base64 encoded ' Text ' into a raw ' ByteString '
fromBase64 :: Text -> Maybe ByteString
fromBase64 = eitherToMaybe . convertFromBase Base64 . T.encodeUtf8
| Convert a raw ' ByteString ' into a base64 encoded ' Text ' , with padding .
base64 :: ByteString -> Text
base64 = T.decodeUtf8 . convertToBase Base64
|
d2dff01bc513d6e4df2b91b9698b63f2480b5d6bc1a4dc52ad5ce10b6c9cdeff | exoscale/clojure-kubernetes-client | v1_env_from_source.clj | (ns clojure-kubernetes-client.specs.v1-env-from-source
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-config-map-env-source :refer :all]
[clojure-kubernetes-client.specs.v1-secret-env-source :refer :all]
)
(:import (java.io File)))
(declare v1-env-from-source-data v1-env-from-source)
(def v1-env-from-source-data
{
(ds/opt :configMapRef) v1-config-map-env-source
(ds/opt :prefix) string?
(ds/opt :secretRef) v1-secret-env-source
})
(def v1-env-from-source
(ds/spec
{:name ::v1-env-from-source
:spec v1-env-from-source-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_env_from_source.clj | clojure | (ns clojure-kubernetes-client.specs.v1-env-from-source
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-config-map-env-source :refer :all]
[clojure-kubernetes-client.specs.v1-secret-env-source :refer :all]
)
(:import (java.io File)))
(declare v1-env-from-source-data v1-env-from-source)
(def v1-env-from-source-data
{
(ds/opt :configMapRef) v1-config-map-env-source
(ds/opt :prefix) string?
(ds/opt :secretRef) v1-secret-env-source
})
(def v1-env-from-source
(ds/spec
{:name ::v1-env-from-source
:spec v1-env-from-source-data}))
| |
981eeff8679be526bd510a7199f22a984e9d3241e390a453c8916929c160b429 | mvaldesdeleon/aoc18 | Day3Main.hs | module Day3Main where
import Day3 (day3)
main :: IO ()
main = day3
| null | https://raw.githubusercontent.com/mvaldesdeleon/aoc18/1a6f6de7c482e5de264360e36f97a3c7487e2457/app/Day3Main.hs | haskell | module Day3Main where
import Day3 (day3)
main :: IO ()
main = day3
| |
14b6ef87bf3ac1b7f71c4e6780799dd9d374c5a9747759a46fee1ad4f048a0ac | dnadales/sandbox | ExercisesFoldableSpec.hs | -- |
module ExercisesFoldableSpec where
import Data.Foldable
import Data.Functor.Compose
import Data.Monoid
import Data.Semigroup
import Test.Hspec
spec :: Spec
spec = do
describe "foldl" $ do
it "folds a list of numbers with +" $ do
foldl (+) 0 [0, 1, 2, 3] `shouldBe` 6
it "folds a list of strings with ++" $ do
foldl (++) "" ["a", "b", "c"] `shouldBe` "abc"
describe "fold" $ do
it "folds a list of monoids (String is a Monoid)" $ do
fold ["a", "b", "c"] `shouldBe` "abc"
it "folds a list of monoids (Integer with + is a Monoid)" $ do
fold [0, 1, 2, 3] `shouldBe` (6 :: Sum Int)
it "folds a list of monoids (Integer with * is a Monoid)" $ do
fold [0, 1, 2, 3] `shouldBe` (0 :: Product Int)
it "extracts the value of Maybe (Just)" $ do
fold (Just 15) `shouldBe` (15 :: Sum Int)
it "gives the zero element of the monoid when the maybe value is Nothing" $ do
fold Nothing `shouldBe` (0 :: Sum Int)
it ("gives the zero element of the monoid when the maybe value is Nothing"
++ " (using product as the monoid operation)") $ do
fold Nothing `shouldBe` (1 :: Product Int)
describe "foldMap" $ do
it "sums the string lengths" $ do
foldMap (Sum . length) ["a", "b", "c"] `shouldBe` 3
it "concatenates the string representation of numbers" $ do
foldMap show [0, 1, 2, 3] `shouldBe` "0123"
it "gives the minimum and maximum values of a list (*new*)" $ do
let minMax :: Int -> (Min Int, Max Int)
minMax x = (Min x, Max x)
foldMap minMax [0, 1, 2, 3] `shouldBe` (Min 0, Max 3)
describe "foldK" $ do
it "is not necessary in the case of lists" $ do
fold [[0, 1, 2], [3, 4, 5]] `shouldBe` [0, 1, 2, 3, 4, 5]
it ("requires to define a different instance of Maybe to " ++
" have the same behavior as in Cats") $ do
fold [Nothing, Just "one", Just "two"] `shouldBe` Just "onetwo"
describe "find" $ do
it "returns Just when an element that satisfies the predicate is found" $ do
find (2 <) [0, 1, 2, 3] `shouldBe` Just 3
it "returns Nothing when no element that satisfies the predicate is found" $ do
find (5 <) [0, 1, 2, 3] `shouldBe` Nothing
describe "toList" $ do
it "behaves as the identity function for lists" $ do
toList [0, 1, 2] `shouldBe` [0, 1, 2]
it "extracts the value inside a Maybe value" $ do
toList (Just (Sum 15)) `shouldBe` [15]
it "returns an empty list when Nothing is passed" $ do
toList Nothing `shouldBe` ([] :: [()])
describe "composition of foldables" $ do
it "composes lists, maybes, and ints (with sum)" $ do
let xs :: Compose [] Maybe (Sum Int)
xs = Compose [Just 0, Just 1, Just 2]
fold xs `shouldBe` 3
-- In this case the fold is applied to the list, and to the maybe values.
-- A fold on the maybe values extracts their values.
Note how this is different from ` fold [ Just 0 , Just 1 , Just 2 ] = = Just 3 `
it "is different from not using composition" $ do
let xs = [Just 0, Just 1, Just 2]
fold xs `shouldBe` Just (3 :: Sum Int)
it "composes lists, maybes, and strings" $ do
let xs :: Compose [] Maybe String
xs = Compose [Just "0", Nothing, Just "2"]
fold xs `shouldBe` "02"
| null | https://raw.githubusercontent.com/dnadales/sandbox/401c4f0fac5f8044fb6e2e443bacddce6f135b4b/my-typeclassopedia/my-typeclassopedia-haskell/test/ExercisesFoldableSpec.hs | haskell | |
In this case the fold is applied to the list, and to the maybe values.
A fold on the maybe values extracts their values. |
module ExercisesFoldableSpec where
import Data.Foldable
import Data.Functor.Compose
import Data.Monoid
import Data.Semigroup
import Test.Hspec
spec :: Spec
spec = do
describe "foldl" $ do
it "folds a list of numbers with +" $ do
foldl (+) 0 [0, 1, 2, 3] `shouldBe` 6
it "folds a list of strings with ++" $ do
foldl (++) "" ["a", "b", "c"] `shouldBe` "abc"
describe "fold" $ do
it "folds a list of monoids (String is a Monoid)" $ do
fold ["a", "b", "c"] `shouldBe` "abc"
it "folds a list of monoids (Integer with + is a Monoid)" $ do
fold [0, 1, 2, 3] `shouldBe` (6 :: Sum Int)
it "folds a list of monoids (Integer with * is a Monoid)" $ do
fold [0, 1, 2, 3] `shouldBe` (0 :: Product Int)
it "extracts the value of Maybe (Just)" $ do
fold (Just 15) `shouldBe` (15 :: Sum Int)
it "gives the zero element of the monoid when the maybe value is Nothing" $ do
fold Nothing `shouldBe` (0 :: Sum Int)
it ("gives the zero element of the monoid when the maybe value is Nothing"
++ " (using product as the monoid operation)") $ do
fold Nothing `shouldBe` (1 :: Product Int)
describe "foldMap" $ do
it "sums the string lengths" $ do
foldMap (Sum . length) ["a", "b", "c"] `shouldBe` 3
it "concatenates the string representation of numbers" $ do
foldMap show [0, 1, 2, 3] `shouldBe` "0123"
it "gives the minimum and maximum values of a list (*new*)" $ do
let minMax :: Int -> (Min Int, Max Int)
minMax x = (Min x, Max x)
foldMap minMax [0, 1, 2, 3] `shouldBe` (Min 0, Max 3)
describe "foldK" $ do
it "is not necessary in the case of lists" $ do
fold [[0, 1, 2], [3, 4, 5]] `shouldBe` [0, 1, 2, 3, 4, 5]
it ("requires to define a different instance of Maybe to " ++
" have the same behavior as in Cats") $ do
fold [Nothing, Just "one", Just "two"] `shouldBe` Just "onetwo"
describe "find" $ do
it "returns Just when an element that satisfies the predicate is found" $ do
find (2 <) [0, 1, 2, 3] `shouldBe` Just 3
it "returns Nothing when no element that satisfies the predicate is found" $ do
find (5 <) [0, 1, 2, 3] `shouldBe` Nothing
describe "toList" $ do
it "behaves as the identity function for lists" $ do
toList [0, 1, 2] `shouldBe` [0, 1, 2]
it "extracts the value inside a Maybe value" $ do
toList (Just (Sum 15)) `shouldBe` [15]
it "returns an empty list when Nothing is passed" $ do
toList Nothing `shouldBe` ([] :: [()])
describe "composition of foldables" $ do
it "composes lists, maybes, and ints (with sum)" $ do
let xs :: Compose [] Maybe (Sum Int)
xs = Compose [Just 0, Just 1, Just 2]
fold xs `shouldBe` 3
Note how this is different from ` fold [ Just 0 , Just 1 , Just 2 ] = = Just 3 `
it "is different from not using composition" $ do
let xs = [Just 0, Just 1, Just 2]
fold xs `shouldBe` Just (3 :: Sum Int)
it "composes lists, maybes, and strings" $ do
let xs :: Compose [] Maybe String
xs = Compose [Just "0", Nothing, Just "2"]
fold xs `shouldBe` "02"
|
5988cb0d0484f2cf39893d284866578d8aba6810954c0f4a07cfa0d91a579bee | danieljharvey/mimsa | IRSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module Test.IR.IRSpec (spec) where
import Data.Foldable (traverse_)
import Data.Functor
import Data.Text (Text)
import qualified Data.Text.IO as T
import qualified LLVM.AST as LLVM
import qualified Smol.Core.Compile.RunLLVM as Run
import Smol.Core.IR.FromExpr.Expr
import Smol.Core.IR.FromResolvedExpr
import Smol.Core.IR.IRExpr
import Smol.Core.IR.ToLLVM.ToLLVM
import Smol.Core.Typecheck
import Smol.Core.Types
import System.IO.Unsafe
import Test.Helpers
import Test.Hspec
import Test.IR.Samples
-- run the code, get the output, die
run :: LLVM.Module -> IO Text
run = fmap Run.rrResult . Run.run
evalExpr :: Text -> ResolvedExpr (Type ResolvedDep Annotation)
evalExpr input =
case elaborate (unsafeParseTypedExpr input $> mempty) of
Right typedExpr -> typedExpr
Left e -> error (show e)
createModule :: Text -> LLVM.Module
createModule input = do
let expr = evalExpr input
irModule = irFromExpr (fromResolvedType <$> fromResolvedExpr expr)
irToLLVM irModule
_printModule :: IRModule -> IRModule
_printModule irModule =
unsafePerformIO (T.putStrLn (prettyModule irModule) >> pure irModule)
testCompileIR :: (Text, Text) -> Spec
testCompileIR (input, result) = it ("Via IR " <> show input) $ do
resp <- run (createModule input)
resp `shouldBe` result
spec :: Spec
spec = do
describe "Compile via IR" $ do
describe "IR" $ do
it "print 42" $ do
resp <- run (irToLLVM irPrint42)
resp `shouldBe` "42"
it "use id function" $ do
resp <- run (irToLLVM irId42)
resp `shouldBe` "42"
it "creates and destructures tuple" $ do
resp <- run (irToLLVM irTwoTuple42)
resp `shouldBe` "42"
it "does an if statement" $ do
resp <- run (irToLLVM irBasicIf)
resp `shouldBe` "42"
it "does a pattern match" $ do
resp <- run (irToLLVM irPatternMatch)
resp `shouldBe` "42"
it "recursive function" $ do
resp <- run (irToLLVM irRecursive)
resp `shouldBe` "49995000"
it "curried function (no closure)" $ do
resp <- run (irToLLVM irCurriedNoClosure)
resp `shouldBe` "22"
it "curried function" $ do
resp <- run (irToLLVM irCurried)
resp `shouldBe` "42"
describe "From expressions" $ do
describe "Basic" $ do
let testVals =
[ ("42", "42"),
("True", "True"),
("False", "False"),
("(1 + 1 : Int)", "2"),
("(1 + 2 + 3 + 4 + 5 + 6 : Int)", "21"),
("(if True then 1 else 2 : Nat)", "1"),
("(if False then 1 else 2 : Nat)", "2")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Functions" $ do
let testVals =
[ ("(\\a -> a + 1 : Nat -> Nat) 2", "3"),
("(\\b -> if b then 42 else 41 : Bool -> Nat) True", "42"),
("(\\b -> if b then 1 else 42 : Bool -> Nat) False", "42"),
("(\\a -> a + 1: Nat -> Nat) 41", "42"),
("(\\a -> 42 : Nat -> Nat) 21", "42"),
("(\\a -> \\b -> a + b : Nat -> Nat -> Nat) 20 22", "42"),
("let a = (1 : Nat); let useA = (\\b -> b + a : Nat -> Nat); useA (41 : Nat)", "42"),
("let add = (\\a -> \\b -> a + b : Nat -> Nat -> Nat); add (1 : Nat) (2 : Nat)", "3"),
("let f = (\\i -> i + 1 : Nat -> Nat) in f (1 : Nat)", "2"), -- single arity function that return prim
("let f = (\\i -> (i,i) : Nat -> (Nat,Nat)); let b = f (1 : Nat); 42", "42"), -- single arity function that returns struct
("let f = (\\i -> (i,10) : Nat -> (Nat,Nat)) in (case f (100 : Nat) of (a,b) -> a + b : Nat)", "110"), -- single arity function that returns struct
("let flipConst = (\\a -> \\b -> b : Nat -> Nat -> Nat); flipConst (1 : Nat) (2 : Nat)", "2") -- oh fuck
( " let sum = ( \\a - > if a = = 10 then 0 else let a2 = a + 1 in a + sum a2 : ) ; sum ( 0 : ) " , " 1783293664 " ) ,
( " let add3 = ( \\a - > \\b - > \\c - > a + b + c : ) ; ( 1 : ) ( 2 : ) ( 3 : ) " , " 6 " ) ,
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Tuples and matching" $ do
let testVals =
[ ("let pair = (20,22); (case pair of (a,b) -> a + b : Nat)", "42"),
("(\\pair -> case pair of (a,b) -> a + b : (Nat,Nat) -> Nat) (20,22)", "42"),
("(\\triple -> case triple of (a,b,c) -> a + b + c : (Nat,Nat,Nat) -> Nat) (20,11,11)", "42"),
("(\\bool -> case bool of True -> 0 | False -> 1 : Bool -> Nat) False", "1"),
("(\\bools -> case bools of (True,_) -> 0 | (False,_) -> 1 : (Bool,Bool) -> Nat) (False,False)", "1")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Datatypes" $ do
let testVals =
[ ("(\\ord -> case ord of GT -> 21 | EQ -> 23 | LT -> 42 : Ord -> Nat) LT", "42"), -- constructor with no args
("(\\maybe -> case maybe of _ -> 42 : Maybe Nat -> Nat) (Just 41)", "42"),
("(\\maybe -> case maybe of Just a -> a + 1 | Nothing -> 0 : Maybe Nat -> Nat) (Just 41)", "42"),
("(\\maybe -> case maybe of Just 40 -> 100 | Just a -> a + 1 | Nothing -> 0 : Maybe Nat -> Nat) (Just 41)", "42"), -- predicates in constructor
("(\\maybe -> case maybe of Just 40 -> 100 | Just a -> a + 1 | Nothing -> 0 : Maybe Nat -> Nat) (Nothing : Maybe Nat)", "0"), -- predicates in constructor
("(\\these -> case these of This aa -> aa | That 27 -> 0 | These a b -> a + b : These Nat Nat -> Nat) (This 42 : These Nat Nat)", "42"), -- data shapes are wrong
("(\\these -> case these of This aa -> aa | That 60 -> 0 | These a b -> a + b : These Nat Nat -> Nat) (These 20 22 : These Nat Nat)", "42"),
( " ( \\these - > case these of This a - > a | That _ - > 1000 | These a b - > a + b : These ) ( That 42 : These ) " , " 1000"),--wildcards fuck it up for some reason
("(case (This 42 : These Nat Nat) of This a -> a : Nat)", "42")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
xdescribe "Nested datatypes (manually split cases)" $ do
let testVals =
[ ("let maybe = Just (Just 41) in 42", "42"),
("let oneList = Cons 1 Nil in 42", "42"),
("let twoList = Cons 1 (Cons 2 Nil) in 42", "42"),
("(\\maybe -> case maybe of Just a -> (case a of Just aa -> aa + 1 | _ -> 0) | _ -> 0 : Maybe (Maybe Nat) -> Nat) (Just (Just 41))", "42") -- ,
( " let nested = ( 20 , ( 11,11 ) ) in 42 " , " 42 " ) ,
( " ( \\nested - > case nested of ( a,(b , c ) ) - > a + b + c : ( , ( , ) ) - > Nat ) ( 20,(11,11 ) ) " , " 42 " ) ,
( " ( \\maybe - > case maybe of Just ( a , b , c ) - > a + b + c | Nothing - > 0 : Maybe ( , , ) - > Nat ) ( Just ( 1,2,3 ) ) " , " 6 " )
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
xdescribe "Nested datatypes (currently broken)" $ do
let testVals =
[ ("let maybe = Just (Just 41) in 42", "42"),
("(\\maybe -> case maybe of Just (Just a) -> a + 1 | _ -> 0 : Maybe (Maybe Nat) -> Nat) (Just (Just 41))", "42"),
("let nested = (20, (11,11)) in 42", "42"),
("(\\nested -> case nested of (a,(b,c)) -> a + b + c : (Nat, (Nat, Nat)) -> Nat) (20,(11,11))", "42"),
("(\\maybe -> case maybe of Just (a,b,c) -> a + b + c | Nothing -> 0 : Maybe (Nat,Nat,Nat) -> Nat) (Just (1,2,3))", "6")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/296ab9bcbdbaf682fa76921ce3c80d4bbafb52ae/smol-core/test/Test/IR/IRSpec.hs | haskell | # LANGUAGE OverloadedStrings #
run the code, get the output, die
single arity function that return prim
single arity function that returns struct
single arity function that returns struct
oh fuck
constructor with no args
predicates in constructor
predicates in constructor
data shapes are wrong
wildcards fuck it up for some reason
, |
module Test.IR.IRSpec (spec) where
import Data.Foldable (traverse_)
import Data.Functor
import Data.Text (Text)
import qualified Data.Text.IO as T
import qualified LLVM.AST as LLVM
import qualified Smol.Core.Compile.RunLLVM as Run
import Smol.Core.IR.FromExpr.Expr
import Smol.Core.IR.FromResolvedExpr
import Smol.Core.IR.IRExpr
import Smol.Core.IR.ToLLVM.ToLLVM
import Smol.Core.Typecheck
import Smol.Core.Types
import System.IO.Unsafe
import Test.Helpers
import Test.Hspec
import Test.IR.Samples
run :: LLVM.Module -> IO Text
run = fmap Run.rrResult . Run.run
evalExpr :: Text -> ResolvedExpr (Type ResolvedDep Annotation)
evalExpr input =
case elaborate (unsafeParseTypedExpr input $> mempty) of
Right typedExpr -> typedExpr
Left e -> error (show e)
createModule :: Text -> LLVM.Module
createModule input = do
let expr = evalExpr input
irModule = irFromExpr (fromResolvedType <$> fromResolvedExpr expr)
irToLLVM irModule
_printModule :: IRModule -> IRModule
_printModule irModule =
unsafePerformIO (T.putStrLn (prettyModule irModule) >> pure irModule)
testCompileIR :: (Text, Text) -> Spec
testCompileIR (input, result) = it ("Via IR " <> show input) $ do
resp <- run (createModule input)
resp `shouldBe` result
spec :: Spec
spec = do
describe "Compile via IR" $ do
describe "IR" $ do
it "print 42" $ do
resp <- run (irToLLVM irPrint42)
resp `shouldBe` "42"
it "use id function" $ do
resp <- run (irToLLVM irId42)
resp `shouldBe` "42"
it "creates and destructures tuple" $ do
resp <- run (irToLLVM irTwoTuple42)
resp `shouldBe` "42"
it "does an if statement" $ do
resp <- run (irToLLVM irBasicIf)
resp `shouldBe` "42"
it "does a pattern match" $ do
resp <- run (irToLLVM irPatternMatch)
resp `shouldBe` "42"
it "recursive function" $ do
resp <- run (irToLLVM irRecursive)
resp `shouldBe` "49995000"
it "curried function (no closure)" $ do
resp <- run (irToLLVM irCurriedNoClosure)
resp `shouldBe` "22"
it "curried function" $ do
resp <- run (irToLLVM irCurried)
resp `shouldBe` "42"
describe "From expressions" $ do
describe "Basic" $ do
let testVals =
[ ("42", "42"),
("True", "True"),
("False", "False"),
("(1 + 1 : Int)", "2"),
("(1 + 2 + 3 + 4 + 5 + 6 : Int)", "21"),
("(if True then 1 else 2 : Nat)", "1"),
("(if False then 1 else 2 : Nat)", "2")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Functions" $ do
let testVals =
[ ("(\\a -> a + 1 : Nat -> Nat) 2", "3"),
("(\\b -> if b then 42 else 41 : Bool -> Nat) True", "42"),
("(\\b -> if b then 1 else 42 : Bool -> Nat) False", "42"),
("(\\a -> a + 1: Nat -> Nat) 41", "42"),
("(\\a -> 42 : Nat -> Nat) 21", "42"),
("(\\a -> \\b -> a + b : Nat -> Nat -> Nat) 20 22", "42"),
("let a = (1 : Nat); let useA = (\\b -> b + a : Nat -> Nat); useA (41 : Nat)", "42"),
("let add = (\\a -> \\b -> a + b : Nat -> Nat -> Nat); add (1 : Nat) (2 : Nat)", "3"),
( " let sum = ( \\a - > if a = = 10 then 0 else let a2 = a + 1 in a + sum a2 : ) ; sum ( 0 : ) " , " 1783293664 " ) ,
( " let add3 = ( \\a - > \\b - > \\c - > a + b + c : ) ; ( 1 : ) ( 2 : ) ( 3 : ) " , " 6 " ) ,
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Tuples and matching" $ do
let testVals =
[ ("let pair = (20,22); (case pair of (a,b) -> a + b : Nat)", "42"),
("(\\pair -> case pair of (a,b) -> a + b : (Nat,Nat) -> Nat) (20,22)", "42"),
("(\\triple -> case triple of (a,b,c) -> a + b + c : (Nat,Nat,Nat) -> Nat) (20,11,11)", "42"),
("(\\bool -> case bool of True -> 0 | False -> 1 : Bool -> Nat) False", "1"),
("(\\bools -> case bools of (True,_) -> 0 | (False,_) -> 1 : (Bool,Bool) -> Nat) (False,False)", "1")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
describe "Datatypes" $ do
let testVals =
("(\\maybe -> case maybe of _ -> 42 : Maybe Nat -> Nat) (Just 41)", "42"),
("(\\maybe -> case maybe of Just a -> a + 1 | Nothing -> 0 : Maybe Nat -> Nat) (Just 41)", "42"),
("(\\these -> case these of This aa -> aa | That 60 -> 0 | These a b -> a + b : These Nat Nat -> Nat) (These 20 22 : These Nat Nat)", "42"),
("(case (This 42 : These Nat Nat) of This a -> a : Nat)", "42")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
xdescribe "Nested datatypes (manually split cases)" $ do
let testVals =
[ ("let maybe = Just (Just 41) in 42", "42"),
("let oneList = Cons 1 Nil in 42", "42"),
("let twoList = Cons 1 (Cons 2 Nil) in 42", "42"),
( " let nested = ( 20 , ( 11,11 ) ) in 42 " , " 42 " ) ,
( " ( \\nested - > case nested of ( a,(b , c ) ) - > a + b + c : ( , ( , ) ) - > Nat ) ( 20,(11,11 ) ) " , " 42 " ) ,
( " ( \\maybe - > case maybe of Just ( a , b , c ) - > a + b + c | Nothing - > 0 : Maybe ( , , ) - > Nat ) ( Just ( 1,2,3 ) ) " , " 6 " )
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
xdescribe "Nested datatypes (currently broken)" $ do
let testVals =
[ ("let maybe = Just (Just 41) in 42", "42"),
("(\\maybe -> case maybe of Just (Just a) -> a + 1 | _ -> 0 : Maybe (Maybe Nat) -> Nat) (Just (Just 41))", "42"),
("let nested = (20, (11,11)) in 42", "42"),
("(\\nested -> case nested of (a,(b,c)) -> a + b + c : (Nat, (Nat, Nat)) -> Nat) (20,(11,11))", "42"),
("(\\maybe -> case maybe of Just (a,b,c) -> a + b + c | Nothing -> 0 : Maybe (Nat,Nat,Nat) -> Nat) (Just (1,2,3))", "6")
]
describe "IR compile" $ do
traverse_ testCompileIR testVals
|
77f9bc652b78f97ba83c5a0e0cdb0c8887c1faa57479f87668a8cbbf60f53d3f | samply/blaze | health_test.clj | (ns blaze.handler.health-test
(:require
[blaze.handler.health]
[blaze.test-util :as tu :refer [with-system]]
[blaze.test-util.ring :refer [call]]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [deftest]]
[juxt.iota :refer [given]]
[taoensso.timbre :as log]))
(st/instrument)
(log/set-level! :trace)
(test/use-fixtures :each tu/fixture)
(def system
{:blaze.handler/health {}})
(deftest handler-test
(with-system [{handler :blaze.handler/health} system]
(given (call handler {})
:status := 200
:body := "OK")))
| null | https://raw.githubusercontent.com/samply/blaze/6441a0a2f988b8784ed555c1d20f634ef2df7e4a/test/blaze/handler/health_test.clj | clojure | (ns blaze.handler.health-test
(:require
[blaze.handler.health]
[blaze.test-util :as tu :refer [with-system]]
[blaze.test-util.ring :refer [call]]
[clojure.spec.test.alpha :as st]
[clojure.test :as test :refer [deftest]]
[juxt.iota :refer [given]]
[taoensso.timbre :as log]))
(st/instrument)
(log/set-level! :trace)
(test/use-fixtures :each tu/fixture)
(def system
{:blaze.handler/health {}})
(deftest handler-test
(with-system [{handler :blaze.handler/health} system]
(given (call handler {})
:status := 200
:body := "OK")))
| |
3899cb3b996c213ac7ce70eee28a7c599c95a87cb052b72f5f0552be7ad8aead | 3b/3bgl-misc | textures.lisp | (in-package 3bgl-sg2)
(defclass sampler ()
((sampler :reader sampler :initarg :sampler)
(spec :reader spec :initarg :spec)))
(defclass texture ()
((texture :reader texture :initarg :texture)
(source :reader source :initarg :source)
(target :reader target :initarg :target :initform :texture-2d)))
(defclass handle ()
((texture :reader texture :initarg :texture)
(sampler :reader sampler :initarg :sampler)
(handle :reader handle :initarg :handle)
(resident :accessor resident :initform nil)))
(defun get-handle (texture sampler &key resident)
(let* ((tx (get-texture texture))
(s (get-sampler sampler))
(h (or (gethash (list tx s) (handles *resource-manager*))
(setf (gethash (list tx s) (handles *resource-manager*))
(make-instance
'handle
:texture tx :sampler s
:handle
(if s
(%gl:get-texture-sampler-handle-arb (texture tx)
(sampler s))
(%gl:get-texture-handle-arb (texture tx))))))))
(when (and resident (not (resident h)))
(%gl:make-texture-handle-resident-arb (handle h))
(setf (resident h) t))
h))
(defun get-sampler (name
&key
(min-filter :linear-mipmap-linear)
(mag-filter :linear)
(max-anisotropy 1.0)
(min-lod -1000)
(max-lod 1000)
;;(swizzle-r :red)
;;(swizzle-g :green)
;;(swizzle-b :blue)
;;(swizzle-a :alpha)
(wrap-s :repeat)
(wrap-t :repeat)
(wrap-r :repeat)
(border-color #(0.0 0.0 0.0 0.0))
(compare-mode :none)
(compare-func :lequal))
(when (typep name '(or sampler null))
(return-from get-sampler name))
(when (gethash name (samplers *resource-manager*))
(return-from get-sampler (gethash name (samplers *resource-manager*))))
(macrolet ((floats (&rest vars)
`(progn
,@(loop for v in vars
collect `(setf ,v (coerce ,v 'single-float))))))
(setf border-color (map 'vector (lambda (x) (coerce x 'single-float))
border-color))
(floats max-anisotropy min-lod max-lod)
(assert (member border-color '(#(0 0 0 0) #(0 0 0 1)
#(1 1 1 0) #(1 1 1 1))
:test 'equalp))
(let ((spec (list :min-filter min-filter
:mag-filter mag-filter
:max-anisotropy max-anisotropy
:min-lod min-lod
:max-lod max-lod
;;:swizzle-r swizzle-r
;;:swizzle-g swizzle-g
;;:swizzle-b swizzle-b
;;:swizzle-a swizzle-a
:wrap-s wrap-s
:wrap-t wrap-t
:wrap-r wrap-r
:border-color border-color
:compare-mode compare-mode
:compare-func compare-func)))
(flet ((make-sampler ()
(let ((s (gl:create-sampler)))
(gl:sampler-parameter s :texture-min-filter min-filter)
(gl:sampler-parameter s :texture-mag-filter mag-filter)
(gl:sampler-parameter s :texture-max-anisotropy-ext
max-anisotropy)
(gl:sampler-parameter s :texture-min-lod min-lod)
(gl:sampler-parameter s :texture-max-lod max-lod)
;;(gl:sampler-parameter s :texture-swizzle-r swizzle-r)
;;(gl:sampler-parameter s :texture-swizzle-g swizzle-g)
;;(gl:sampler-parameter s :texture-swizzle-b swizzle-b)
;;(gl:sampler-parameter s :texture-swizzle-a swizzle-a)
(gl:sampler-parameter s :texture-wrap-s wrap-s)
(gl:sampler-parameter s :texture-wrap-t wrap-t)
(gl:sampler-parameter s :texture-wrap-r wrap-r)
(gl:sampler-parameter s :texture-border-color border-color)
(gl:sampler-parameter s :texture-compare-mode compare-mode)
(gl:sampler-parameter s :texture-compare-func compare-func)
(gl:sampler-parameter s :texture-cube-map-seamless t)
(make-instance 'sampler :sampler s :spec spec))))
(setf (gethash name (samplers *resource-manager*))
(make-sampler))))))
(defmethod load-texture ((type (eql :builtin)) name &key)
;; todo: default textures
(cerror "continue" "builtin textures not done yet"))
(defparameter *compress-loaded-textures* nil)
;; fixme: SRGB as a global parameter like this isn't quite right, need
;; to include it in the identifiert used for reusing previously loaded
;; textures
(defparameter *load-textures-as-srgb* nil)
(defun get-internal-format (channels bytes)
(cond
no compression or srgb for 16 - bit
(ecase channels (1 :r16) (2 :rg16) (3 :rgb16) (4 :rgb16)))
((and *compress-loaded-textures* *load-textures-as-srgb*)
(ecase channels
(1 :compressed-red) ;; no srgb for r,rg
(2 :compressed-rg)
(3 :compressed-srgb)
(4 :compressed-srgb-alpha)))
(*load-textures-as-srgb*
(ecase channels
(1 :red) (2 :rg) (3 :srgb) (4 :srgb-alpha)))
(t (ecase channels (1 :r8) (2 :rg8) (3 :rgb8) (4 :rgb8)))))
(defparameter *internal-formats*
todo : support signed / un - normalized formats as well ?
#2a((nil nil nil)
technically grey in png files
(nil :rg8 :rg16) ;; grey+alpha
(nil :rgb8 :rgb16)
(nil :rgba8 :rgba16)))
(defparameter *pngload-colortypes*
'(:greyscale 1 :grayscale 1
:greyscale-alpha 2 :grayscale-alpha 2
:truecolor 3 :truecolour 3
:truecolor-alpha 4 :truecolour-alpha 4))
(defparameter *layouts*
(alexandria:plist-hash-table
index by layer # (= + x -x + y -y + z -z ) , = column , row
'(:horizontal #((2 1) (0 1) (1 0) (1 2) (1 3) (1 1))
:vertical #((2 1) (0 1) (1 0) (1 2) (1 1) (1 3)))))
(defun flip-region-x (d x1 y1 w h stride)
(declare (type (simple-array (unsigned-byte 8) (*)) d))
(loop for y from y1 below (+ y1 h)
for row = (* y stride)
do (loop with w/2 = (/ w 2)
with end = (+ x1 row w)
with start = (+ x1 row)
for x below w/2
do (rotatef (aref d (+ x start )) (aref d (- end x 1))))))
(defun flip-region-y (d x1 y1 w h stride)
(declare (type (simple-array (unsigned-byte 8) (*)) d))
(loop with h/2 = (/ h 2)
for y below h/2
for r1 = (+ x1 (* (+ y1 y) stride))
for r2 = (+ x1 (* (+ y1 (- h y 1)) stride))
do (rotatef (subseq d r1 (+ r1 w))
(subseq d r2 (+ r2 w)))))
(defvar *cube-faces* '(:texture-cube-map-positive-x
:texture-cube-map-negative-x
:texture-cube-map-positive-y
:texture-cube-map-negative-y
:texture-cube-map-positive-z
:texture-cube-map-negative-z))
(defun map-cube-faces (data fun width height pixel-bytes
&key (layout :guess) data-is-static)
(when (eq layout :guess)
(unless (or (= (* width 3/4) height)
(= (* width 4/3) height))
(cerror "continue"
"failed to guess orientation of cube map, size = ~s x ~s?"
width height))
(if (>= height width)
(setf layout :vertical)
(setf layout :horizontal)))
(flet ((body (p)
(loop
with cw = (floor
(if (eq layout :vertical) (/ width 3) (/ width 4)))
with ch = (floor
(if (eq layout :vertical) (/ height 4) (/ height 3)))
;; hack to make it load something reasonable if it gets default
;; (square) texture
with wh = (min cw ch)
with stride-bytes = (* pixel-bytes width)
for layer in *cube-faces*
for (i j) across (gethash layout *layouts*)
for x = (* i cw)
for y = (* j ch)
when (eq layer :texture-cube-map-negative-z)
do (flip-region-y p x y wh wh stride-bytes)
(flip-region-x p x y wh wh stride-bytes)
do (funcall fun layer
(cffi:inc-pointer
(static-vectors:static-vector-pointer p)
(+ (* pixel-bytes x) (* stride-bytes y)))
wh wh width))))
(if data-is-static
(body data)
;; copy to foreign memory once so we don't copy whole thing for
;; each face in tex-image-2d
(static-vectors:with-static-vector (p (length data)
:element-type
'(unsigned-byte 8)
:initial-contents data)
(body p)))))
(defun load-texture-pngload (name &key cube)
(pngload:with-png-in-static-vector (png name :flip-y t)
(when png
(let* ((tex (gl:create-texture (if cube :texture-cube-map :texture-2d)))
(w (pngload:width png))
(h (pngload:height png))
(channels (/ (array-total-size (pngload:data png))
w h))
(bytes (ceiling (/ (pngload:bit-depth png) 8)))
(internal-format (aref *internal-formats* channels bytes)))
(declare (ignorable internal-format))
#++(progn
(%gl:texture-storage-2d tex
(floor (max (log w 2) (log h 2)))
internal-format
w h)
(%gl:texture-sub-image-2d tex 0 0 0 w h
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer
(pngload:data png))))
(if cube
(progn
(gl:bind-texture :texture-cube-map tex)
(map-cube-faces
(pngload:data png)
(lambda (face pointer w h pixel-stride)
(gl:pixel-store :unpack-row-length pixel-stride)
(%gl:tex-image-2d face 0
(cffi:foreign-enum-value
'%gl:enum
(get-internal-format channels 1))
w h
0
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
pointer))
w h (* channels bytes)
:data-is-static t)
(gl:pixel-store :unpack-row-length 0)
(gl:bind-texture :texture-cube-map 0))
(progn
(gl:bind-texture :texture-2d tex)
(%gl:tex-image-2d :texture-2d 0
(cffi:foreign-enum-value
'%gl:enum
(get-internal-format channels 1))
w h
0
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer
(pngload:data png)))
(gl:bind-texture :texture-2d 0)))
(gl:enable :texture-cube-map-seamless)
(gl:generate-texture-mipmap tex)
tex))))
(defun flip (image)
(check-type image (simple-array (unsigned-byte 8) 3))
(locally (declare (type (simple-array (unsigned-byte 8) 3) image)
(optimize speed))
(opticl:with-image-bounds (wy wx c) image
(assert (< wx 65536))
(assert (< wy 65536))
(assert (<= c 4))
(locally (declare (type (unsigned-byte 16) wx wy)
(type (unsigned-byte 3) c))
(loop with stride = (* wx (or c 1))
for y below (floor wy 2)
for y1 = (* y stride)
for y2 = (* (- wy y 1) stride)
do (loop for i below stride
do (rotatef (row-major-aref image (+ y1 i))
(row-major-aref image (+ y2 i)))))))))
(defun equirectangular-to-cube (src-tex dest-tex format w)
(let ((program (get-program
:compute '3bgl-sg2-shaders-common::equirectangular-to-cube)))
(gl:bind-texture :texture-2d src-tex)
(%gl:bind-image-texture 1 dest-tex 0 t 0 :read-write format)
(setf (3bgl-shaders::uniform program "e2c-in") 0)
(setf (3bgl-shaders::uniform program "e2c-out") 1)
(3bgl-shaders::use-program program)
(%gl:dispatch-compute (floor w 8) (floor w 8) 6))
(%gl:bind-image-texture 1 0 0 t 0 :read-write format))
(defun load-texture-hdr (name &key cube)
(let ((hdr (3bgl-radiance-hdr::read-hdr-file name)))
(when hdr
(let* ((tex (gl:create-texture (if cube :texture-cube-map :texture-2d)))
(w (3bgl-radiance-hdr::width hdr))
(h (3bgl-radiance-hdr::height hdr)))
;; fixme: figure out how to do this with immutable textures...
(cond
assume 3/4 or 4/3 aspect ratio is cube cross
((and cube (or (= w (* 4/3 h))
(= h (* 4/3 w))))
(gl:bind-texture :texture-cube-map tex)
(3bgl-radiance-hdr::map-cube-faces
hdr
(lambda (face pointer w h pixel-stride)
(gl:pixel-store :unpack-row-length pixel-stride)
(gl:tex-image-2d face 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
pointer)))
(gl:pixel-store :unpack-row-length 0)
(gl:bind-texture :texture-cube-map 0))
(cube
;; for any other aspect ratio load it as equirectangular and
;; convert to cube manually
(let ((temp-tex (gl:create-texture :texture-2d))
;; fixme: decide how big cube map should be?
( for now , just using width / 4 )
(cw (/ w 4)))
make cube faces a multiple of 8 so we can use 8x8
;; compute shader more easily
(setf cw (* 8 (ceiling cw 8)))
(unwind-protect
(progn
(gl:bind-texture :texture-2d temp-tex)
(gl:tex-image-2d :texture-2d 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
(3bgl-radiance-hdr::data hdr))
(gl:generate-texture-mipmap temp-tex)
(%gl:texture-storage-2d tex
(max 1 (floor (log cw 2)))
:rgba16f
cw cw)
(equirectangular-to-cube temp-tex tex :rgba16f cw))
(gl:delete-texture temp-tex))
(gl:bind-texture :texture-2d 0)))
(t
(gl:bind-texture :texture-2d tex)
(gl:tex-image-2d :texture-2d 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
(3bgl-radiance-hdr::data hdr))
(gl:bind-texture :texture-2d 0)))
(gl:enable :texture-cube-map-seamless)
(gl:generate-texture-mipmap tex)
tex))))
(defun load-texture-opticl (name)
(let ((img (opticl:read-image-file name)))
(when img
(let* ((tex (gl:create-texture :texture-2d))
(w (array-dimension img 1))
(h (array-dimension img 0))
(channels (array-dimension img 2))
todo : 16bit images ?
(internal-format (get-internal-format channels bytes))
(ats (array-total-size img)))
(%gl:texture-storage-2d tex
(floor (max (log w 2) (log h 2)))
internal-format
w h)
(static-vectors:with-static-vector (s ats)
(check-type img (simple-array (unsigned-byte 8) 3))
(locally (declare (type (simple-array (unsigned-byte 8) 3) img)
(optimize speed))
(flip img)
(loop for i below ats
do (setf (aref s i) (row-major-aref img i))))
(%gl:texture-sub-image-2d tex 0 0 0 w h
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer s)))
(gl:generate-texture-mipmap tex)
tex))))
(defun load-texture-file (name &key &allow-other-keys)
;; todo: support more than :texture-2d
(let* ((f (probe-file (merge-pathnames name))))
(when f
(cond
((alexandria:ends-with-subseq ".png" (namestring f) :test 'char-equal)
(load-texture-pngload f))
((alexandria:ends-with-subseq ".hdr" (namestring f) :test 'char-equal)
(load-texture-hdr f))
(t
(load-texture-opticl f))))))
(defun load-cube-texture-file (name &key &allow-other-keys)
(let* ((f (probe-file (merge-pathnames name)))
(ns (namestring f)))
(when f
(cond
((alexandria:ends-with-subseq ".png" ns :test 'char-equal)
(load-texture-pngload f :cube t))
((alexandria:ends-with-subseq ".hdr" ns :test 'char-equal)
(load-texture-hdr f :cube t))
(t
(error "cubemap loading not implemented for opticl yet~%(loading ~s)"
name))))))
(defmethod load-texture ((type (eql :file)) name &key target)
(if (eql target :texture-cube-map)
(load-cube-texture-file name :target target)
(load-texture-file name :target target)))
#++ ;; todo
(defmethod load-texture ((type (eql :stream)) name &key)
(load-texture-file name))
#++
(defparameter *texture-load-queue*
#+sbcl (sb-concurrency:make-queue :name "texture-load-queue")
#-sbcl nil)
#++
(defun enqueue-texture-load (target type name)
#+sbcl (sb-concurrency:enqueue (list :texture target type name) *texture-load-queue*)
#-sbcl (push (list :texture target type name) *texture-load-queue*))
(defun get-texture (name &key (type :file) (target :texture-2d))
(when (typep name '(or texture null))
(return-from get-texture name))
;; todo: load files on another thread, return a debug texture until
;; actually loaded
(let ((s (list type name target)))
(or (gethash s (textures *resource-manager*))
(let ((tx (load-texture type name :target target)))
(when tx
(setf (gethash s (textures *resource-manager*))
(make-instance 'texture
:texture (if (typep tx 'texture)
(texture tx)
tx)
:target target
:source s)))))))
(defun reset-texture (tex)
(when (texture tex)
(gl:delete-texture (shiftf (slot-value tex 'texture) nil))))
(defun reset-sampler (sampler)
(when (sampler sampler)
(gl:delete-sampler (shiftf (slot-value sampler 'sampler) nil))))
(defun reset-handle (handle)
(when (and (resident handle) (handle handle))
(when (%gl:is-texture-handle-resident-arb (handle handle))
(%gl:make-texture-handle-non-resident-arb
(shiftf (slot-value handle 'handle) nil)))))
| null | https://raw.githubusercontent.com/3b/3bgl-misc/e3bf2781d603feb6b44e5c4ec20f06225648ffd9/scenegraph2/textures.lisp | lisp | (swizzle-r :red)
(swizzle-g :green)
(swizzle-b :blue)
(swizzle-a :alpha)
:swizzle-r swizzle-r
:swizzle-g swizzle-g
:swizzle-b swizzle-b
:swizzle-a swizzle-a
(gl:sampler-parameter s :texture-swizzle-r swizzle-r)
(gl:sampler-parameter s :texture-swizzle-g swizzle-g)
(gl:sampler-parameter s :texture-swizzle-b swizzle-b)
(gl:sampler-parameter s :texture-swizzle-a swizzle-a)
todo: default textures
fixme: SRGB as a global parameter like this isn't quite right, need
to include it in the identifiert used for reusing previously loaded
textures
no srgb for r,rg
grey+alpha
hack to make it load something reasonable if it gets default
(square) texture
copy to foreign memory once so we don't copy whole thing for
each face in tex-image-2d
fixme: figure out how to do this with immutable textures...
for any other aspect ratio load it as equirectangular and
convert to cube manually
fixme: decide how big cube map should be?
compute shader more easily
todo: support more than :texture-2d
todo
todo: load files on another thread, return a debug texture until
actually loaded | (in-package 3bgl-sg2)
(defclass sampler ()
((sampler :reader sampler :initarg :sampler)
(spec :reader spec :initarg :spec)))
(defclass texture ()
((texture :reader texture :initarg :texture)
(source :reader source :initarg :source)
(target :reader target :initarg :target :initform :texture-2d)))
(defclass handle ()
((texture :reader texture :initarg :texture)
(sampler :reader sampler :initarg :sampler)
(handle :reader handle :initarg :handle)
(resident :accessor resident :initform nil)))
(defun get-handle (texture sampler &key resident)
(let* ((tx (get-texture texture))
(s (get-sampler sampler))
(h (or (gethash (list tx s) (handles *resource-manager*))
(setf (gethash (list tx s) (handles *resource-manager*))
(make-instance
'handle
:texture tx :sampler s
:handle
(if s
(%gl:get-texture-sampler-handle-arb (texture tx)
(sampler s))
(%gl:get-texture-handle-arb (texture tx))))))))
(when (and resident (not (resident h)))
(%gl:make-texture-handle-resident-arb (handle h))
(setf (resident h) t))
h))
(defun get-sampler (name
&key
(min-filter :linear-mipmap-linear)
(mag-filter :linear)
(max-anisotropy 1.0)
(min-lod -1000)
(max-lod 1000)
(wrap-s :repeat)
(wrap-t :repeat)
(wrap-r :repeat)
(border-color #(0.0 0.0 0.0 0.0))
(compare-mode :none)
(compare-func :lequal))
(when (typep name '(or sampler null))
(return-from get-sampler name))
(when (gethash name (samplers *resource-manager*))
(return-from get-sampler (gethash name (samplers *resource-manager*))))
(macrolet ((floats (&rest vars)
`(progn
,@(loop for v in vars
collect `(setf ,v (coerce ,v 'single-float))))))
(setf border-color (map 'vector (lambda (x) (coerce x 'single-float))
border-color))
(floats max-anisotropy min-lod max-lod)
(assert (member border-color '(#(0 0 0 0) #(0 0 0 1)
#(1 1 1 0) #(1 1 1 1))
:test 'equalp))
(let ((spec (list :min-filter min-filter
:mag-filter mag-filter
:max-anisotropy max-anisotropy
:min-lod min-lod
:max-lod max-lod
:wrap-s wrap-s
:wrap-t wrap-t
:wrap-r wrap-r
:border-color border-color
:compare-mode compare-mode
:compare-func compare-func)))
(flet ((make-sampler ()
(let ((s (gl:create-sampler)))
(gl:sampler-parameter s :texture-min-filter min-filter)
(gl:sampler-parameter s :texture-mag-filter mag-filter)
(gl:sampler-parameter s :texture-max-anisotropy-ext
max-anisotropy)
(gl:sampler-parameter s :texture-min-lod min-lod)
(gl:sampler-parameter s :texture-max-lod max-lod)
(gl:sampler-parameter s :texture-wrap-s wrap-s)
(gl:sampler-parameter s :texture-wrap-t wrap-t)
(gl:sampler-parameter s :texture-wrap-r wrap-r)
(gl:sampler-parameter s :texture-border-color border-color)
(gl:sampler-parameter s :texture-compare-mode compare-mode)
(gl:sampler-parameter s :texture-compare-func compare-func)
(gl:sampler-parameter s :texture-cube-map-seamless t)
(make-instance 'sampler :sampler s :spec spec))))
(setf (gethash name (samplers *resource-manager*))
(make-sampler))))))
(defmethod load-texture ((type (eql :builtin)) name &key)
(cerror "continue" "builtin textures not done yet"))
(defparameter *compress-loaded-textures* nil)
(defparameter *load-textures-as-srgb* nil)
(defun get-internal-format (channels bytes)
(cond
no compression or srgb for 16 - bit
(ecase channels (1 :r16) (2 :rg16) (3 :rgb16) (4 :rgb16)))
((and *compress-loaded-textures* *load-textures-as-srgb*)
(ecase channels
(2 :compressed-rg)
(3 :compressed-srgb)
(4 :compressed-srgb-alpha)))
(*load-textures-as-srgb*
(ecase channels
(1 :red) (2 :rg) (3 :srgb) (4 :srgb-alpha)))
(t (ecase channels (1 :r8) (2 :rg8) (3 :rgb8) (4 :rgb8)))))
(defparameter *internal-formats*
todo : support signed / un - normalized formats as well ?
#2a((nil nil nil)
technically grey in png files
(nil :rgb8 :rgb16)
(nil :rgba8 :rgba16)))
(defparameter *pngload-colortypes*
'(:greyscale 1 :grayscale 1
:greyscale-alpha 2 :grayscale-alpha 2
:truecolor 3 :truecolour 3
:truecolor-alpha 4 :truecolour-alpha 4))
(defparameter *layouts*
(alexandria:plist-hash-table
index by layer # (= + x -x + y -y + z -z ) , = column , row
'(:horizontal #((2 1) (0 1) (1 0) (1 2) (1 3) (1 1))
:vertical #((2 1) (0 1) (1 0) (1 2) (1 1) (1 3)))))
(defun flip-region-x (d x1 y1 w h stride)
(declare (type (simple-array (unsigned-byte 8) (*)) d))
(loop for y from y1 below (+ y1 h)
for row = (* y stride)
do (loop with w/2 = (/ w 2)
with end = (+ x1 row w)
with start = (+ x1 row)
for x below w/2
do (rotatef (aref d (+ x start )) (aref d (- end x 1))))))
(defun flip-region-y (d x1 y1 w h stride)
(declare (type (simple-array (unsigned-byte 8) (*)) d))
(loop with h/2 = (/ h 2)
for y below h/2
for r1 = (+ x1 (* (+ y1 y) stride))
for r2 = (+ x1 (* (+ y1 (- h y 1)) stride))
do (rotatef (subseq d r1 (+ r1 w))
(subseq d r2 (+ r2 w)))))
(defvar *cube-faces* '(:texture-cube-map-positive-x
:texture-cube-map-negative-x
:texture-cube-map-positive-y
:texture-cube-map-negative-y
:texture-cube-map-positive-z
:texture-cube-map-negative-z))
(defun map-cube-faces (data fun width height pixel-bytes
&key (layout :guess) data-is-static)
(when (eq layout :guess)
(unless (or (= (* width 3/4) height)
(= (* width 4/3) height))
(cerror "continue"
"failed to guess orientation of cube map, size = ~s x ~s?"
width height))
(if (>= height width)
(setf layout :vertical)
(setf layout :horizontal)))
(flet ((body (p)
(loop
with cw = (floor
(if (eq layout :vertical) (/ width 3) (/ width 4)))
with ch = (floor
(if (eq layout :vertical) (/ height 4) (/ height 3)))
with wh = (min cw ch)
with stride-bytes = (* pixel-bytes width)
for layer in *cube-faces*
for (i j) across (gethash layout *layouts*)
for x = (* i cw)
for y = (* j ch)
when (eq layer :texture-cube-map-negative-z)
do (flip-region-y p x y wh wh stride-bytes)
(flip-region-x p x y wh wh stride-bytes)
do (funcall fun layer
(cffi:inc-pointer
(static-vectors:static-vector-pointer p)
(+ (* pixel-bytes x) (* stride-bytes y)))
wh wh width))))
(if data-is-static
(body data)
(static-vectors:with-static-vector (p (length data)
:element-type
'(unsigned-byte 8)
:initial-contents data)
(body p)))))
(defun load-texture-pngload (name &key cube)
(pngload:with-png-in-static-vector (png name :flip-y t)
(when png
(let* ((tex (gl:create-texture (if cube :texture-cube-map :texture-2d)))
(w (pngload:width png))
(h (pngload:height png))
(channels (/ (array-total-size (pngload:data png))
w h))
(bytes (ceiling (/ (pngload:bit-depth png) 8)))
(internal-format (aref *internal-formats* channels bytes)))
(declare (ignorable internal-format))
#++(progn
(%gl:texture-storage-2d tex
(floor (max (log w 2) (log h 2)))
internal-format
w h)
(%gl:texture-sub-image-2d tex 0 0 0 w h
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer
(pngload:data png))))
(if cube
(progn
(gl:bind-texture :texture-cube-map tex)
(map-cube-faces
(pngload:data png)
(lambda (face pointer w h pixel-stride)
(gl:pixel-store :unpack-row-length pixel-stride)
(%gl:tex-image-2d face 0
(cffi:foreign-enum-value
'%gl:enum
(get-internal-format channels 1))
w h
0
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
pointer))
w h (* channels bytes)
:data-is-static t)
(gl:pixel-store :unpack-row-length 0)
(gl:bind-texture :texture-cube-map 0))
(progn
(gl:bind-texture :texture-2d tex)
(%gl:tex-image-2d :texture-2d 0
(cffi:foreign-enum-value
'%gl:enum
(get-internal-format channels 1))
w h
0
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer
(pngload:data png)))
(gl:bind-texture :texture-2d 0)))
(gl:enable :texture-cube-map-seamless)
(gl:generate-texture-mipmap tex)
tex))))
(defun flip (image)
(check-type image (simple-array (unsigned-byte 8) 3))
(locally (declare (type (simple-array (unsigned-byte 8) 3) image)
(optimize speed))
(opticl:with-image-bounds (wy wx c) image
(assert (< wx 65536))
(assert (< wy 65536))
(assert (<= c 4))
(locally (declare (type (unsigned-byte 16) wx wy)
(type (unsigned-byte 3) c))
(loop with stride = (* wx (or c 1))
for y below (floor wy 2)
for y1 = (* y stride)
for y2 = (* (- wy y 1) stride)
do (loop for i below stride
do (rotatef (row-major-aref image (+ y1 i))
(row-major-aref image (+ y2 i)))))))))
(defun equirectangular-to-cube (src-tex dest-tex format w)
(let ((program (get-program
:compute '3bgl-sg2-shaders-common::equirectangular-to-cube)))
(gl:bind-texture :texture-2d src-tex)
(%gl:bind-image-texture 1 dest-tex 0 t 0 :read-write format)
(setf (3bgl-shaders::uniform program "e2c-in") 0)
(setf (3bgl-shaders::uniform program "e2c-out") 1)
(3bgl-shaders::use-program program)
(%gl:dispatch-compute (floor w 8) (floor w 8) 6))
(%gl:bind-image-texture 1 0 0 t 0 :read-write format))
(defun load-texture-hdr (name &key cube)
(let ((hdr (3bgl-radiance-hdr::read-hdr-file name)))
(when hdr
(let* ((tex (gl:create-texture (if cube :texture-cube-map :texture-2d)))
(w (3bgl-radiance-hdr::width hdr))
(h (3bgl-radiance-hdr::height hdr)))
(cond
assume 3/4 or 4/3 aspect ratio is cube cross
((and cube (or (= w (* 4/3 h))
(= h (* 4/3 w))))
(gl:bind-texture :texture-cube-map tex)
(3bgl-radiance-hdr::map-cube-faces
hdr
(lambda (face pointer w h pixel-stride)
(gl:pixel-store :unpack-row-length pixel-stride)
(gl:tex-image-2d face 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
pointer)))
(gl:pixel-store :unpack-row-length 0)
(gl:bind-texture :texture-cube-map 0))
(cube
(let ((temp-tex (gl:create-texture :texture-2d))
( for now , just using width / 4 )
(cw (/ w 4)))
make cube faces a multiple of 8 so we can use 8x8
(setf cw (* 8 (ceiling cw 8)))
(unwind-protect
(progn
(gl:bind-texture :texture-2d temp-tex)
(gl:tex-image-2d :texture-2d 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
(3bgl-radiance-hdr::data hdr))
(gl:generate-texture-mipmap temp-tex)
(%gl:texture-storage-2d tex
(max 1 (floor (log cw 2)))
:rgba16f
cw cw)
(equirectangular-to-cube temp-tex tex :rgba16f cw))
(gl:delete-texture temp-tex))
(gl:bind-texture :texture-2d 0)))
(t
(gl:bind-texture :texture-2d tex)
(gl:tex-image-2d :texture-2d 0
:rgb9-e5
w h
0
:rgb
:unsigned-int-5-9-9-9-rev
(3bgl-radiance-hdr::data hdr))
(gl:bind-texture :texture-2d 0)))
(gl:enable :texture-cube-map-seamless)
(gl:generate-texture-mipmap tex)
tex))))
(defun load-texture-opticl (name)
(let ((img (opticl:read-image-file name)))
(when img
(let* ((tex (gl:create-texture :texture-2d))
(w (array-dimension img 1))
(h (array-dimension img 0))
(channels (array-dimension img 2))
todo : 16bit images ?
(internal-format (get-internal-format channels bytes))
(ats (array-total-size img)))
(%gl:texture-storage-2d tex
(floor (max (log w 2) (log h 2)))
internal-format
w h)
(static-vectors:with-static-vector (s ats)
(check-type img (simple-array (unsigned-byte 8) 3))
(locally (declare (type (simple-array (unsigned-byte 8) 3) img)
(optimize speed))
(flip img)
(loop for i below ats
do (setf (aref s i) (row-major-aref img i))))
(%gl:texture-sub-image-2d tex 0 0 0 w h
(ecase channels
(1 :red) (2 :rg) (3 :rgb) (4 :rgba))
(ecase bytes
(1 :unsigned-byte)
(2 :unsigned-short))
(static-vectors:static-vector-pointer s)))
(gl:generate-texture-mipmap tex)
tex))))
(defun load-texture-file (name &key &allow-other-keys)
(let* ((f (probe-file (merge-pathnames name))))
(when f
(cond
((alexandria:ends-with-subseq ".png" (namestring f) :test 'char-equal)
(load-texture-pngload f))
((alexandria:ends-with-subseq ".hdr" (namestring f) :test 'char-equal)
(load-texture-hdr f))
(t
(load-texture-opticl f))))))
(defun load-cube-texture-file (name &key &allow-other-keys)
(let* ((f (probe-file (merge-pathnames name)))
(ns (namestring f)))
(when f
(cond
((alexandria:ends-with-subseq ".png" ns :test 'char-equal)
(load-texture-pngload f :cube t))
((alexandria:ends-with-subseq ".hdr" ns :test 'char-equal)
(load-texture-hdr f :cube t))
(t
(error "cubemap loading not implemented for opticl yet~%(loading ~s)"
name))))))
(defmethod load-texture ((type (eql :file)) name &key target)
(if (eql target :texture-cube-map)
(load-cube-texture-file name :target target)
(load-texture-file name :target target)))
(defmethod load-texture ((type (eql :stream)) name &key)
(load-texture-file name))
#++
(defparameter *texture-load-queue*
#+sbcl (sb-concurrency:make-queue :name "texture-load-queue")
#-sbcl nil)
#++
(defun enqueue-texture-load (target type name)
#+sbcl (sb-concurrency:enqueue (list :texture target type name) *texture-load-queue*)
#-sbcl (push (list :texture target type name) *texture-load-queue*))
(defun get-texture (name &key (type :file) (target :texture-2d))
(when (typep name '(or texture null))
(return-from get-texture name))
(let ((s (list type name target)))
(or (gethash s (textures *resource-manager*))
(let ((tx (load-texture type name :target target)))
(when tx
(setf (gethash s (textures *resource-manager*))
(make-instance 'texture
:texture (if (typep tx 'texture)
(texture tx)
tx)
:target target
:source s)))))))
(defun reset-texture (tex)
(when (texture tex)
(gl:delete-texture (shiftf (slot-value tex 'texture) nil))))
(defun reset-sampler (sampler)
(when (sampler sampler)
(gl:delete-sampler (shiftf (slot-value sampler 'sampler) nil))))
(defun reset-handle (handle)
(when (and (resident handle) (handle handle))
(when (%gl:is-texture-handle-resident-arb (handle handle))
(%gl:make-texture-handle-non-resident-arb
(shiftf (slot-value handle 'handle) nil)))))
|
7a9ef97fd96c10dff90815064b36f4385ad3f9c11907357bba5212205a11bd49 | tezos/tezos-mirror | p2p.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
Copyright ( c ) 2019 - 2022 Nomadic Labs , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
module Events = P2p_events.P2p
type config = {
listening_port : P2p_addr.port option;
listening_addr : P2p_addr.t option;
advertised_port : P2p_addr.port option;
discovery_port : P2p_addr.port option;
discovery_addr : Ipaddr.V4.t option;
trusted_points : (P2p_point.Id.t * P2p_peer.Id.t option) list;
peers_file : string;
private_mode : bool;
identity : P2p_identity.t;
proof_of_work_target : Tezos_crypto.Crypto_box.pow_target;
trust_discovered_peers : bool;
reconnection_config : Point_reconnection_config.t;
}
let create_scheduler limits =
let open P2p_limits in
let max_upload_speed = Option.map (( * ) 1024) limits.max_upload_speed in
let max_download_speed = Option.map (( * ) 1024) limits.max_download_speed in
P2p_io_scheduler.create
~read_buffer_size:limits.read_buffer_size
?max_upload_speed
?max_download_speed
?read_queue_size:limits.read_queue_size
?write_queue_size:limits.write_queue_size
()
let create_connection_pool config limits meta_cfg log triggers =
let open P2p_limits in
let pool_cfg =
{
P2p_pool.identity = config.identity;
trusted_points = config.trusted_points;
peers_file = config.peers_file;
private_mode = config.private_mode;
max_known_points = limits.max_known_points;
max_known_peer_ids = limits.max_known_peer_ids;
peer_greylist_size = limits.peer_greylist_size;
ip_greylist_size_in_kilobytes = limits.ip_greylist_size_in_kilobytes;
ip_greylist_cleanup_delay = limits.ip_greylist_cleanup_delay;
}
in
P2p_pool.create pool_cfg meta_cfg ~log triggers
let create_connect_handler config limits pool msg_cfg conn_meta_cfg io_sched
triggers log answerer =
let open P2p_limits in
let connect_handler_cfg =
{
P2p_connect_handler.identity = config.identity;
proof_of_work_target = config.proof_of_work_target;
listening_port = config.listening_port;
advertised_port = config.advertised_port;
private_mode = config.private_mode;
reconnection_config = config.reconnection_config;
min_connections = limits.min_connections;
max_connections = limits.max_connections;
max_incoming_connections = limits.max_incoming_connections;
connection_timeout = limits.connection_timeout;
authentication_timeout = limits.authentication_timeout;
incoming_app_message_queue_size = limits.incoming_app_message_queue_size;
incoming_message_queue_size = limits.incoming_message_queue_size;
outgoing_message_queue_size = limits.outgoing_message_queue_size;
binary_chunks_size = limits.binary_chunks_size;
}
in
P2p_connect_handler.create
connect_handler_cfg
pool
msg_cfg
conn_meta_cfg
io_sched
triggers
~log
~answerer
let may_create_discovery_worker _limits config pool =
match
(config.listening_port, config.discovery_port, config.discovery_addr)
with
| Some listening_port, Some discovery_port, Some discovery_addr ->
Some
(P2p_discovery.create
pool
config.identity.peer_id
~listening_port
~discovery_port
~discovery_addr
~trust_discovered_peers:config.trust_discovered_peers)
| _, _, _ -> None
let create_maintenance_worker limits pool connect_handler config triggers log =
let open P2p_limits in
let open Lwt_syntax in
match limits.maintenance_idle_time with
| None ->
let* () = Events.(emit maintenance_disabled) () in
return_none
| Some maintenance_idle_time ->
let maintenance_config =
{
P2p_maintenance.maintenance_idle_time;
private_mode = config.private_mode;
min_connections = limits.min_connections;
max_connections = limits.max_connections;
expected_connections = limits.expected_connections;
time_between_looking_for_peers =
Ptime.Span.of_int_s 5
(* Empirical value. Enough to observe changes in the network,
and not too long to discover new peers quickly. *)
(* TODO: /-/issues/1655
Check whether the value is optimal or not through integration tests
*);
}
in
let discovery = may_create_discovery_worker limits config pool in
return_some
(P2p_maintenance.create
?discovery
maintenance_config
pool
connect_handler
triggers
~log)
let may_create_welcome_worker config limits connect_handler =
config.listening_port
|> Option.map_es (fun port ->
P2p_welcome.create
~backlog:limits.P2p_limits.backlog
connect_handler
?addr:config.listening_addr
port)
type ('msg, 'peer_meta, 'conn_meta) connection =
('msg, 'peer_meta, 'conn_meta) P2p_conn.t
module Real = struct
type ('msg, 'peer_meta, 'conn_meta) net = {
config : config;
limits : P2p_limits.t;
io_sched : P2p_io_scheduler.t;
pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t;
connect_handler : ('msg, 'peer_meta, 'conn_meta) P2p_connect_handler.t;
maintenance : ('msg, 'peer_meta, 'conn_meta) P2p_maintenance.t option;
welcome : P2p_welcome.t option;
watcher : P2p_connection.P2p_event.t Lwt_watcher.input;
triggers : P2p_trigger.t;
}
let create ~config ~limits meta_cfg msg_cfg conn_meta_cfg =
let open Lwt_result_syntax in
let io_sched = create_scheduler limits in
let watcher = Lwt_watcher.create_input () in
let log event = Lwt_watcher.notify watcher event in
let triggers = P2p_trigger.create () in
let*! pool = create_connection_pool config limits meta_cfg log triggers in
There is a mutual recursion between an answerer and connect_handler ,
for the default answerer . Because of the swap request mechanism , the
default answerer needs to initiate new connections using the
[ ] callback .
for the default answerer. Because of the swap request mechanism, the
default answerer needs to initiate new connections using the
[P2p_connect_handler.connect] callback. *)
let rec answerer =
lazy
(if config.private_mode then P2p_protocol.create_private ()
else
let connect =
P2p_connect_handler.connect (Lazy.force connect_handler)
in
let proto_conf =
{
P2p_protocol.swap_linger = limits.P2p_limits.swap_linger;
pool;
log;
connect;
latest_accepted_swap = Ptime.epoch;
latest_successful_swap = Ptime.epoch;
}
in
P2p_protocol.create_default proto_conf)
and connect_handler =
lazy
(create_connect_handler
config
limits
pool
msg_cfg
conn_meta_cfg
io_sched
triggers
log
answerer)
in
let connect_handler = Lazy.force connect_handler in
let*! maintenance =
create_maintenance_worker limits pool connect_handler config triggers log
in
let* welcome = may_create_welcome_worker config limits connect_handler in
P2p_metrics.collect pool ;
return
{
config;
limits;
io_sched;
pool;
connect_handler;
maintenance;
welcome;
watcher;
triggers;
}
let peer_id {config; _} = config.identity.peer_id
let maintain {maintenance; _} () =
let open Lwt_result_syntax in
match maintenance with
| Some maintenance ->
let*! () = P2p_maintenance.maintain maintenance in
return_unit
| None -> tzfail P2p_errors.Maintenance_disabled
let activate t () =
Events.(emit__dont_wait__use_with_care activate_network)
t.config.identity.peer_id ;
(match t.welcome with None -> () | Some w -> P2p_welcome.activate w) ;
match t.maintenance with
| Some maintenance -> P2p_maintenance.activate maintenance
| None -> ()
(* TODO: /-/issues/4597
Implement [roll] function. *)
let roll _net () = Lwt.return_unit
(* returns when all workers have shut down in the opposite
creation order. *)
let shutdown net () =
let open Lwt_syntax in
let* () = Events.(emit shutdown_welcome_worker) () in
let* () = Option.iter_s P2p_welcome.shutdown net.welcome in
let* () = Events.(emit shutdown_maintenance_worker) () in
let* () =
Option.iter_s
(fun maintenance -> P2p_maintenance.shutdown maintenance)
net.maintenance
in
let* () = Events.(emit shutdown_connection_pool) () in
let* () = P2p_pool.destroy net.pool in
let* () = Events.(emit shutdown_connection_handler) () in
let* () = P2p_connect_handler.destroy net.connect_handler in
let* () = Events.(emit shutdown_scheduler) () in
P2p_io_scheduler.shutdown ~timeout:3.0 net.io_sched
let connections {pool; _} () =
P2p_pool.Connection.fold pool ~init:[] ~f:(fun _peer_id c acc -> c :: acc)
let find_connection_by_peer_id {pool; _} peer_id =
P2p_pool.Connection.find_by_peer_id pool peer_id
let find_connection_by_point {pool; _} point =
P2p_pool.Connection.find_by_point pool point
let disconnect ?wait conn = P2p_conn.disconnect ?wait conn
let connection_info _net conn = P2p_conn.info conn
let connection_local_metadata _net conn = P2p_conn.local_metadata conn
let connection_remote_metadata _net conn = P2p_conn.remote_metadata conn
let connection_stat _net conn = P2p_conn.stat conn
let global_stat {connect_handler; _} () =
P2p_connect_handler.stat connect_handler
let set_peer_metadata {pool; _} conn meta =
P2p_pool.Peers.set_peer_metadata pool conn meta
let get_peer_metadata {pool; _} conn =
P2p_pool.Peers.get_peer_metadata pool conn
let connect ?timeout net point =
P2p_connect_handler.connect ?timeout net.connect_handler point
let recv _net conn =
let open Lwt_syntax in
let* msg = P2p_conn.read conn in
let peer_id = (P2p_conn.info conn).peer_id in
let* () =
match msg with
| Ok _ ->
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_received ;
Events.(emit message_read) peer_id
| Error _ ->
Prometheus.Counter.inc_one
P2p_metrics.Messages.user_message_received_error ;
Events.(emit message_read_error) peer_id
in
return msg
let rec recv_any net () =
let open Lwt_syntax in
let pipes =
P2p_pool.Connection.fold net.pool ~init:[] ~f:(fun _peer_id conn acc ->
(let* r = P2p_conn.is_readable conn in
match r with
| Ok () -> Lwt.return_some conn
| Error _ -> Lwt_utils.never_ending ())
:: acc)
in
let new_connection =
let* () = P2p_trigger.wait_new_connection net.triggers in
Lwt.return_none
in
let* o = Lwt.pick (new_connection :: pipes) in
match o with
| None -> recv_any net ()
| Some conn -> (
let* r = recv net conn in
match r with
| Ok msg -> Lwt.return (conn, msg)
| Error _ ->
let* () = Lwt.pause () in
recv_any net ())
let send _net conn m =
let open Lwt_result_syntax in
let*! r = P2p_conn.write conn m in
let*! () =
match r with
| Ok () ->
let peer_id = (P2p_conn.info conn).peer_id in
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_sent ;
Events.(emit message_sent) peer_id
| Error trace ->
Events.(emit sending_message_error)
((P2p_conn.info conn).peer_id, trace)
in
Lwt.return r
let try_send _net conn v =
match P2p_conn.write_now conn v with
| Ok v ->
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_sent ;
Events.(emit__dont_wait__use_with_care message_trysent)
(P2p_conn.info conn).peer_id ;
v
| Error err ->
Events.(emit__dont_wait__use_with_care trysending_message_error)
((P2p_conn.info conn).peer_id, err) ;
false
Memoization of broadcast encoded [ msg ] inside a buffer [ buf ] .
(* For generalisation purposes, each connection has a `writer` that
defines a specific encoding for messages. Currently we use the
same encoding for every connection. It makes this simple
memoization possible but will need modifications if connections
have specialised encodings. *)
let broadcast_encode conn buff msg =
let open Result_syntax in
match !buff with
| None ->
let* encoded_msg = P2p_conn.encode conn msg in
buff := Some encoded_msg ;
return encoded_msg
| Some em -> return em
let send_conn ?alt conn buf alt_buf msg =
let open Result_syntax in
(* Silently discards Error P2p_errors.Connection_closed in case
the pipe is closed. Shouldn't happen because
- no race conditions (no Lwt)
- the peer state is Running.
Also ignore if the message is dropped instead of being added
to the write queue. *)
TODO : /-/issues/4205
Ensure sent messages are actually sent .
Ensure sent messages are actually sent.
*)
ignore
@@ let* encoded_msg =
match alt with
| None -> broadcast_encode conn buf msg
| Some (if_conn, then_msg) ->
if if_conn conn then broadcast_encode conn alt_buf then_msg
else broadcast_encode conn buf msg
in
Prometheus.Counter.inc_one P2p_metrics.Messages.broadcast_message_sent ;
P2p_conn.write_encoded_now
conn
(P2p_socket.copy_encoded_message encoded_msg)
let broadcast connections ?except ?alt msg =
let buf = ref None in
let alt_buf = ref None in
let send conn = send_conn ?alt conn buf alt_buf msg in
P2p_peer.Table.iter
(fun _peer_id conn ->
match except with
| None -> send conn
| Some f when not (f conn) -> send conn
| _ -> ())
connections ;
Events.(emit__dont_wait__use_with_care broadcast) ()
let fold_connections {pool; _} ~init ~f =
P2p_pool.Connection.fold pool ~init ~f
let iter_connections {pool; _} f =
P2p_pool.Connection.fold pool ~init:() ~f:(fun gid conn () -> f gid conn)
let on_new_connection {connect_handler; _} f =
P2p_connect_handler.on_new_connection connect_handler f
let negotiated_version _ conn = P2p_conn.negotiated_version conn
end
module Fake = struct
let id = P2p_identity.generate_with_pow_target_0 ()
let empty_stat =
{
P2p_stat.total_sent = 0L;
total_recv = 0L;
current_inflow = 0;
current_outflow = 0;
}
let connection_info announced_version faked_metadata =
{
P2p_connection.Info.incoming = false;
peer_id = id.peer_id;
id_point = (Ipaddr.V6.unspecified, None);
remote_socket_port = 0;
announced_version;
local_metadata = faked_metadata;
remote_metadata = faked_metadata;
private_node = false;
}
end
type ('msg, 'peer_meta, 'conn_meta) t = {
announced_version : Network_version.t;
peer_id : P2p_peer.Id.t;
maintain : unit -> unit tzresult Lwt.t;
roll : unit -> unit Lwt.t;
shutdown : unit -> unit Lwt.t;
connections : unit -> ('msg, 'peer_meta, 'conn_meta) connection list;
find_connection_by_peer_id :
P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option;
find_connection_by_point :
P2p_point.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option;
disconnect :
?wait:bool -> ('msg, 'peer_meta, 'conn_meta) connection -> unit Lwt.t;
connection_info :
('msg, 'peer_meta, 'conn_meta) connection ->
'conn_meta P2p_connection.Info.t;
connection_local_metadata :
('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta;
connection_remote_metadata :
('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta;
connection_stat : ('msg, 'peer_meta, 'conn_meta) connection -> P2p_stat.t;
global_stat : unit -> P2p_stat.t;
get_peer_metadata : P2p_peer.Id.t -> 'peer_meta;
set_peer_metadata : P2p_peer.Id.t -> 'peer_meta -> unit;
connect :
?timeout:Ptime.span ->
P2p_point.Id.t ->
('msg, 'peer_meta, 'conn_meta) connection tzresult Lwt.t;
recv : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg tzresult Lwt.t;
recv_any : unit -> (('msg, 'peer_meta, 'conn_meta) connection * 'msg) Lwt.t;
send :
('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t;
try_send : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> bool;
pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option;
connect_handler : ('msg, 'peer_meta, 'conn_meta) P2p_connect_handler.t option;
fold_connections :
'a.
init:'a ->
f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> 'a -> 'a) ->
'a;
iter_connections :
(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit;
on_new_connection :
(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit;
negotiated_version :
('msg, 'peer_meta, 'conn_meta) connection -> Network_version.t;
activate : unit -> unit;
watcher : P2p_connection.P2p_event.t Lwt_watcher.input;
}
type ('msg, 'peer_meta, 'conn_meta) net = ('msg, 'peer_meta, 'conn_meta) t
let announced_version net = net.announced_version
let pool net = net.pool
let connect_handler net = net.connect_handler
let check_limits =
let open Result_syntax in
let open P2p_limits in
let fail_1 v orig =
if not (Ptime.Span.compare v Ptime.Span.zero <= 0) then return_unit
else
Error_monad.error_with
"value of option %S cannot be negative or null@."
orig
in
let fail_2 v orig =
if not (v < 0) then return_unit
else Error_monad.error_with "value of option %S cannot be negative@." orig
in
fun c ->
let* () = fail_1 c.authentication_timeout "authentication-timeout" in
let* () = fail_2 c.min_connections "min-connections" in
let* () = fail_2 c.expected_connections "expected-connections" in
let* () = fail_2 c.max_connections "max-connections" in
let* () = fail_2 c.max_incoming_connections "max-incoming-connections" in
let* () = fail_2 c.read_buffer_size "read-buffer-size" in
let* () = fail_1 c.swap_linger "swap-linger" in
let* () =
match c.binary_chunks_size with
| None -> return_unit
| Some size -> P2p_socket.check_binary_chunks_size size
in
return_unit
let create ~config ~limits peer_cfg conn_cfg msg_cfg =
let open Lwt_result_syntax in
let*? () = check_limits limits in
let* net = Real.create ~config ~limits peer_cfg msg_cfg conn_cfg in
return
{
announced_version =
Network_version.announced
~chain_name:msg_cfg.chain_name
~distributed_db_versions:msg_cfg.distributed_db_versions
~p2p_versions:P2p_version.supported;
peer_id = Real.peer_id net;
maintain = Real.maintain net;
roll = Real.roll net;
shutdown = Real.shutdown net;
connections = Real.connections net;
find_connection_by_peer_id = Real.find_connection_by_peer_id net;
find_connection_by_point = Real.find_connection_by_point net;
disconnect = Real.disconnect;
connection_info = Real.connection_info net;
connection_local_metadata = Real.connection_local_metadata net;
connection_remote_metadata = Real.connection_remote_metadata net;
connection_stat = Real.connection_stat net;
global_stat = Real.global_stat net;
get_peer_metadata = Real.get_peer_metadata net;
set_peer_metadata = Real.set_peer_metadata net;
connect = (fun ?timeout -> Real.connect ?timeout net);
recv = Real.recv net;
recv_any = Real.recv_any net;
send = Real.send net;
try_send = Real.try_send net;
pool = Some net.pool;
connect_handler = Some net.connect_handler;
fold_connections = (fun ~init ~f -> Real.fold_connections net ~init ~f);
iter_connections = Real.iter_connections net;
on_new_connection = Real.on_new_connection net;
negotiated_version = Real.negotiated_version net;
activate = Real.activate net;
watcher = net.Real.watcher;
}
let activate t =
Events.(emit__dont_wait__use_with_care activate_layer) () ;
t.activate ()
let faked_network (msg_cfg : 'msg P2p_params.message_config) peer_cfg
faked_metadata =
let announced_version =
Network_version.announced
~chain_name:msg_cfg.chain_name
~distributed_db_versions:msg_cfg.distributed_db_versions
~p2p_versions:P2p_version.supported
in
{
announced_version;
peer_id = Fake.id.peer_id;
maintain = Lwt_result_syntax.return;
roll = Lwt.return;
shutdown = Lwt.return;
connections = (fun () -> []);
find_connection_by_peer_id = (fun _ -> None);
find_connection_by_point = (fun _ -> None);
disconnect = (fun ?wait:_ _ -> Lwt.return_unit);
connection_info =
(fun _ -> Fake.connection_info announced_version faked_metadata);
connection_local_metadata = (fun _ -> faked_metadata);
connection_remote_metadata = (fun _ -> faked_metadata);
connection_stat = (fun _ -> Fake.empty_stat);
global_stat = (fun () -> Fake.empty_stat);
get_peer_metadata = (fun _ -> peer_cfg.P2p_params.peer_meta_initial ());
set_peer_metadata = (fun _ _ -> ());
connect =
(fun ?timeout:_ _ ->
Lwt_result_syntax.tzfail P2p_errors.Connection_refused);
recv = (fun _ -> Lwt_utils.never_ending ());
recv_any = (fun () -> Lwt_utils.never_ending ());
send = (fun _ _ -> Lwt_result_syntax.tzfail P2p_errors.Connection_closed);
try_send = (fun _ _ -> false);
fold_connections = (fun ~init ~f:_ -> init);
iter_connections = (fun _f -> ());
on_new_connection = (fun _f -> ());
negotiated_version = (fun _ -> announced_version);
pool = None;
connect_handler = None;
activate = (fun _ -> ());
watcher = Lwt_watcher.create_input ();
}
let peer_id net = net.peer_id
let maintain net = net.maintain ()
let roll net = net.roll ()
let shutdown net = net.shutdown ()
let connections net = net.connections ()
let disconnect net = net.disconnect
let find_connection_by_peer_id net = net.find_connection_by_peer_id
let find_connection_by_point net = net.find_connection_by_point
let connection_info net = net.connection_info
let connection_local_metadata net = net.connection_local_metadata
let connection_remote_metadata net = net.connection_remote_metadata
let connection_stat net = net.connection_stat
let global_stat net = net.global_stat ()
let get_peer_metadata net = net.get_peer_metadata
let set_peer_metadata net = net.set_peer_metadata
let connect net = net.connect
let recv net = net.recv
let recv_any net = net.recv_any ()
let send net = net.send
let try_send net = net.try_send
let broadcast connections ?except ?alt msg =
Real.broadcast connections ?except ?alt msg
let fold_connections net = net.fold_connections
let iter_connections net = net.iter_connections
let on_new_connection net = net.on_new_connection
let greylist_addr net addr =
Option.iter (fun pool -> P2p_pool.greylist_addr pool addr) net.pool
let greylist_peer net peer_id =
Option.iter (fun pool -> P2p_pool.greylist_peer pool peer_id) net.pool
let watcher net = Lwt_watcher.create_stream net.watcher
let negotiated_version net = net.negotiated_version
module Internal_for_tests = struct
let broadcast_conns (connections : ('a, 'b, 'c) P2p_conn.t P2p_peer.Table.t)
?except ?alt msg =
broadcast connections ?except ?alt msg
end
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/7f2b112463a4c5de2a96aada8d07c0f9e142d4f1/src/lib_p2p/p2p.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Empirical value. Enough to observe changes in the network,
and not too long to discover new peers quickly.
TODO: /-/issues/1655
Check whether the value is optimal or not through integration tests
TODO: /-/issues/4597
Implement [roll] function.
returns when all workers have shut down in the opposite
creation order.
For generalisation purposes, each connection has a `writer` that
defines a specific encoding for messages. Currently we use the
same encoding for every connection. It makes this simple
memoization possible but will need modifications if connections
have specialised encodings.
Silently discards Error P2p_errors.Connection_closed in case
the pipe is closed. Shouldn't happen because
- no race conditions (no Lwt)
- the peer state is Running.
Also ignore if the message is dropped instead of being added
to the write queue. | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
Copyright ( c ) 2019 - 2022 Nomadic Labs , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
module Events = P2p_events.P2p
type config = {
listening_port : P2p_addr.port option;
listening_addr : P2p_addr.t option;
advertised_port : P2p_addr.port option;
discovery_port : P2p_addr.port option;
discovery_addr : Ipaddr.V4.t option;
trusted_points : (P2p_point.Id.t * P2p_peer.Id.t option) list;
peers_file : string;
private_mode : bool;
identity : P2p_identity.t;
proof_of_work_target : Tezos_crypto.Crypto_box.pow_target;
trust_discovered_peers : bool;
reconnection_config : Point_reconnection_config.t;
}
let create_scheduler limits =
let open P2p_limits in
let max_upload_speed = Option.map (( * ) 1024) limits.max_upload_speed in
let max_download_speed = Option.map (( * ) 1024) limits.max_download_speed in
P2p_io_scheduler.create
~read_buffer_size:limits.read_buffer_size
?max_upload_speed
?max_download_speed
?read_queue_size:limits.read_queue_size
?write_queue_size:limits.write_queue_size
()
let create_connection_pool config limits meta_cfg log triggers =
let open P2p_limits in
let pool_cfg =
{
P2p_pool.identity = config.identity;
trusted_points = config.trusted_points;
peers_file = config.peers_file;
private_mode = config.private_mode;
max_known_points = limits.max_known_points;
max_known_peer_ids = limits.max_known_peer_ids;
peer_greylist_size = limits.peer_greylist_size;
ip_greylist_size_in_kilobytes = limits.ip_greylist_size_in_kilobytes;
ip_greylist_cleanup_delay = limits.ip_greylist_cleanup_delay;
}
in
P2p_pool.create pool_cfg meta_cfg ~log triggers
let create_connect_handler config limits pool msg_cfg conn_meta_cfg io_sched
triggers log answerer =
let open P2p_limits in
let connect_handler_cfg =
{
P2p_connect_handler.identity = config.identity;
proof_of_work_target = config.proof_of_work_target;
listening_port = config.listening_port;
advertised_port = config.advertised_port;
private_mode = config.private_mode;
reconnection_config = config.reconnection_config;
min_connections = limits.min_connections;
max_connections = limits.max_connections;
max_incoming_connections = limits.max_incoming_connections;
connection_timeout = limits.connection_timeout;
authentication_timeout = limits.authentication_timeout;
incoming_app_message_queue_size = limits.incoming_app_message_queue_size;
incoming_message_queue_size = limits.incoming_message_queue_size;
outgoing_message_queue_size = limits.outgoing_message_queue_size;
binary_chunks_size = limits.binary_chunks_size;
}
in
P2p_connect_handler.create
connect_handler_cfg
pool
msg_cfg
conn_meta_cfg
io_sched
triggers
~log
~answerer
let may_create_discovery_worker _limits config pool =
match
(config.listening_port, config.discovery_port, config.discovery_addr)
with
| Some listening_port, Some discovery_port, Some discovery_addr ->
Some
(P2p_discovery.create
pool
config.identity.peer_id
~listening_port
~discovery_port
~discovery_addr
~trust_discovered_peers:config.trust_discovered_peers)
| _, _, _ -> None
let create_maintenance_worker limits pool connect_handler config triggers log =
let open P2p_limits in
let open Lwt_syntax in
match limits.maintenance_idle_time with
| None ->
let* () = Events.(emit maintenance_disabled) () in
return_none
| Some maintenance_idle_time ->
let maintenance_config =
{
P2p_maintenance.maintenance_idle_time;
private_mode = config.private_mode;
min_connections = limits.min_connections;
max_connections = limits.max_connections;
expected_connections = limits.expected_connections;
time_between_looking_for_peers =
Ptime.Span.of_int_s 5
}
in
let discovery = may_create_discovery_worker limits config pool in
return_some
(P2p_maintenance.create
?discovery
maintenance_config
pool
connect_handler
triggers
~log)
let may_create_welcome_worker config limits connect_handler =
config.listening_port
|> Option.map_es (fun port ->
P2p_welcome.create
~backlog:limits.P2p_limits.backlog
connect_handler
?addr:config.listening_addr
port)
type ('msg, 'peer_meta, 'conn_meta) connection =
('msg, 'peer_meta, 'conn_meta) P2p_conn.t
module Real = struct
type ('msg, 'peer_meta, 'conn_meta) net = {
config : config;
limits : P2p_limits.t;
io_sched : P2p_io_scheduler.t;
pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t;
connect_handler : ('msg, 'peer_meta, 'conn_meta) P2p_connect_handler.t;
maintenance : ('msg, 'peer_meta, 'conn_meta) P2p_maintenance.t option;
welcome : P2p_welcome.t option;
watcher : P2p_connection.P2p_event.t Lwt_watcher.input;
triggers : P2p_trigger.t;
}
let create ~config ~limits meta_cfg msg_cfg conn_meta_cfg =
let open Lwt_result_syntax in
let io_sched = create_scheduler limits in
let watcher = Lwt_watcher.create_input () in
let log event = Lwt_watcher.notify watcher event in
let triggers = P2p_trigger.create () in
let*! pool = create_connection_pool config limits meta_cfg log triggers in
There is a mutual recursion between an answerer and connect_handler ,
for the default answerer . Because of the swap request mechanism , the
default answerer needs to initiate new connections using the
[ ] callback .
for the default answerer. Because of the swap request mechanism, the
default answerer needs to initiate new connections using the
[P2p_connect_handler.connect] callback. *)
let rec answerer =
lazy
(if config.private_mode then P2p_protocol.create_private ()
else
let connect =
P2p_connect_handler.connect (Lazy.force connect_handler)
in
let proto_conf =
{
P2p_protocol.swap_linger = limits.P2p_limits.swap_linger;
pool;
log;
connect;
latest_accepted_swap = Ptime.epoch;
latest_successful_swap = Ptime.epoch;
}
in
P2p_protocol.create_default proto_conf)
and connect_handler =
lazy
(create_connect_handler
config
limits
pool
msg_cfg
conn_meta_cfg
io_sched
triggers
log
answerer)
in
let connect_handler = Lazy.force connect_handler in
let*! maintenance =
create_maintenance_worker limits pool connect_handler config triggers log
in
let* welcome = may_create_welcome_worker config limits connect_handler in
P2p_metrics.collect pool ;
return
{
config;
limits;
io_sched;
pool;
connect_handler;
maintenance;
welcome;
watcher;
triggers;
}
let peer_id {config; _} = config.identity.peer_id
let maintain {maintenance; _} () =
let open Lwt_result_syntax in
match maintenance with
| Some maintenance ->
let*! () = P2p_maintenance.maintain maintenance in
return_unit
| None -> tzfail P2p_errors.Maintenance_disabled
let activate t () =
Events.(emit__dont_wait__use_with_care activate_network)
t.config.identity.peer_id ;
(match t.welcome with None -> () | Some w -> P2p_welcome.activate w) ;
match t.maintenance with
| Some maintenance -> P2p_maintenance.activate maintenance
| None -> ()
let roll _net () = Lwt.return_unit
let shutdown net () =
let open Lwt_syntax in
let* () = Events.(emit shutdown_welcome_worker) () in
let* () = Option.iter_s P2p_welcome.shutdown net.welcome in
let* () = Events.(emit shutdown_maintenance_worker) () in
let* () =
Option.iter_s
(fun maintenance -> P2p_maintenance.shutdown maintenance)
net.maintenance
in
let* () = Events.(emit shutdown_connection_pool) () in
let* () = P2p_pool.destroy net.pool in
let* () = Events.(emit shutdown_connection_handler) () in
let* () = P2p_connect_handler.destroy net.connect_handler in
let* () = Events.(emit shutdown_scheduler) () in
P2p_io_scheduler.shutdown ~timeout:3.0 net.io_sched
let connections {pool; _} () =
P2p_pool.Connection.fold pool ~init:[] ~f:(fun _peer_id c acc -> c :: acc)
let find_connection_by_peer_id {pool; _} peer_id =
P2p_pool.Connection.find_by_peer_id pool peer_id
let find_connection_by_point {pool; _} point =
P2p_pool.Connection.find_by_point pool point
let disconnect ?wait conn = P2p_conn.disconnect ?wait conn
let connection_info _net conn = P2p_conn.info conn
let connection_local_metadata _net conn = P2p_conn.local_metadata conn
let connection_remote_metadata _net conn = P2p_conn.remote_metadata conn
let connection_stat _net conn = P2p_conn.stat conn
let global_stat {connect_handler; _} () =
P2p_connect_handler.stat connect_handler
let set_peer_metadata {pool; _} conn meta =
P2p_pool.Peers.set_peer_metadata pool conn meta
let get_peer_metadata {pool; _} conn =
P2p_pool.Peers.get_peer_metadata pool conn
let connect ?timeout net point =
P2p_connect_handler.connect ?timeout net.connect_handler point
let recv _net conn =
let open Lwt_syntax in
let* msg = P2p_conn.read conn in
let peer_id = (P2p_conn.info conn).peer_id in
let* () =
match msg with
| Ok _ ->
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_received ;
Events.(emit message_read) peer_id
| Error _ ->
Prometheus.Counter.inc_one
P2p_metrics.Messages.user_message_received_error ;
Events.(emit message_read_error) peer_id
in
return msg
let rec recv_any net () =
let open Lwt_syntax in
let pipes =
P2p_pool.Connection.fold net.pool ~init:[] ~f:(fun _peer_id conn acc ->
(let* r = P2p_conn.is_readable conn in
match r with
| Ok () -> Lwt.return_some conn
| Error _ -> Lwt_utils.never_ending ())
:: acc)
in
let new_connection =
let* () = P2p_trigger.wait_new_connection net.triggers in
Lwt.return_none
in
let* o = Lwt.pick (new_connection :: pipes) in
match o with
| None -> recv_any net ()
| Some conn -> (
let* r = recv net conn in
match r with
| Ok msg -> Lwt.return (conn, msg)
| Error _ ->
let* () = Lwt.pause () in
recv_any net ())
let send _net conn m =
let open Lwt_result_syntax in
let*! r = P2p_conn.write conn m in
let*! () =
match r with
| Ok () ->
let peer_id = (P2p_conn.info conn).peer_id in
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_sent ;
Events.(emit message_sent) peer_id
| Error trace ->
Events.(emit sending_message_error)
((P2p_conn.info conn).peer_id, trace)
in
Lwt.return r
let try_send _net conn v =
match P2p_conn.write_now conn v with
| Ok v ->
TODO : /-/issues/4874
the counter should be moved to P2p_conn instead
the counter should be moved to P2p_conn instead *)
Prometheus.Counter.inc_one P2p_metrics.Messages.user_message_sent ;
Events.(emit__dont_wait__use_with_care message_trysent)
(P2p_conn.info conn).peer_id ;
v
| Error err ->
Events.(emit__dont_wait__use_with_care trysending_message_error)
((P2p_conn.info conn).peer_id, err) ;
false
Memoization of broadcast encoded [ msg ] inside a buffer [ buf ] .
let broadcast_encode conn buff msg =
let open Result_syntax in
match !buff with
| None ->
let* encoded_msg = P2p_conn.encode conn msg in
buff := Some encoded_msg ;
return encoded_msg
| Some em -> return em
let send_conn ?alt conn buf alt_buf msg =
let open Result_syntax in
TODO : /-/issues/4205
Ensure sent messages are actually sent .
Ensure sent messages are actually sent.
*)
ignore
@@ let* encoded_msg =
match alt with
| None -> broadcast_encode conn buf msg
| Some (if_conn, then_msg) ->
if if_conn conn then broadcast_encode conn alt_buf then_msg
else broadcast_encode conn buf msg
in
Prometheus.Counter.inc_one P2p_metrics.Messages.broadcast_message_sent ;
P2p_conn.write_encoded_now
conn
(P2p_socket.copy_encoded_message encoded_msg)
let broadcast connections ?except ?alt msg =
let buf = ref None in
let alt_buf = ref None in
let send conn = send_conn ?alt conn buf alt_buf msg in
P2p_peer.Table.iter
(fun _peer_id conn ->
match except with
| None -> send conn
| Some f when not (f conn) -> send conn
| _ -> ())
connections ;
Events.(emit__dont_wait__use_with_care broadcast) ()
let fold_connections {pool; _} ~init ~f =
P2p_pool.Connection.fold pool ~init ~f
let iter_connections {pool; _} f =
P2p_pool.Connection.fold pool ~init:() ~f:(fun gid conn () -> f gid conn)
let on_new_connection {connect_handler; _} f =
P2p_connect_handler.on_new_connection connect_handler f
let negotiated_version _ conn = P2p_conn.negotiated_version conn
end
module Fake = struct
let id = P2p_identity.generate_with_pow_target_0 ()
let empty_stat =
{
P2p_stat.total_sent = 0L;
total_recv = 0L;
current_inflow = 0;
current_outflow = 0;
}
let connection_info announced_version faked_metadata =
{
P2p_connection.Info.incoming = false;
peer_id = id.peer_id;
id_point = (Ipaddr.V6.unspecified, None);
remote_socket_port = 0;
announced_version;
local_metadata = faked_metadata;
remote_metadata = faked_metadata;
private_node = false;
}
end
type ('msg, 'peer_meta, 'conn_meta) t = {
announced_version : Network_version.t;
peer_id : P2p_peer.Id.t;
maintain : unit -> unit tzresult Lwt.t;
roll : unit -> unit Lwt.t;
shutdown : unit -> unit Lwt.t;
connections : unit -> ('msg, 'peer_meta, 'conn_meta) connection list;
find_connection_by_peer_id :
P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option;
find_connection_by_point :
P2p_point.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option;
disconnect :
?wait:bool -> ('msg, 'peer_meta, 'conn_meta) connection -> unit Lwt.t;
connection_info :
('msg, 'peer_meta, 'conn_meta) connection ->
'conn_meta P2p_connection.Info.t;
connection_local_metadata :
('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta;
connection_remote_metadata :
('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta;
connection_stat : ('msg, 'peer_meta, 'conn_meta) connection -> P2p_stat.t;
global_stat : unit -> P2p_stat.t;
get_peer_metadata : P2p_peer.Id.t -> 'peer_meta;
set_peer_metadata : P2p_peer.Id.t -> 'peer_meta -> unit;
connect :
?timeout:Ptime.span ->
P2p_point.Id.t ->
('msg, 'peer_meta, 'conn_meta) connection tzresult Lwt.t;
recv : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg tzresult Lwt.t;
recv_any : unit -> (('msg, 'peer_meta, 'conn_meta) connection * 'msg) Lwt.t;
send :
('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t;
try_send : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> bool;
pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option;
connect_handler : ('msg, 'peer_meta, 'conn_meta) P2p_connect_handler.t option;
fold_connections :
'a.
init:'a ->
f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> 'a -> 'a) ->
'a;
iter_connections :
(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit;
on_new_connection :
(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit;
negotiated_version :
('msg, 'peer_meta, 'conn_meta) connection -> Network_version.t;
activate : unit -> unit;
watcher : P2p_connection.P2p_event.t Lwt_watcher.input;
}
type ('msg, 'peer_meta, 'conn_meta) net = ('msg, 'peer_meta, 'conn_meta) t
let announced_version net = net.announced_version
let pool net = net.pool
let connect_handler net = net.connect_handler
let check_limits =
let open Result_syntax in
let open P2p_limits in
let fail_1 v orig =
if not (Ptime.Span.compare v Ptime.Span.zero <= 0) then return_unit
else
Error_monad.error_with
"value of option %S cannot be negative or null@."
orig
in
let fail_2 v orig =
if not (v < 0) then return_unit
else Error_monad.error_with "value of option %S cannot be negative@." orig
in
fun c ->
let* () = fail_1 c.authentication_timeout "authentication-timeout" in
let* () = fail_2 c.min_connections "min-connections" in
let* () = fail_2 c.expected_connections "expected-connections" in
let* () = fail_2 c.max_connections "max-connections" in
let* () = fail_2 c.max_incoming_connections "max-incoming-connections" in
let* () = fail_2 c.read_buffer_size "read-buffer-size" in
let* () = fail_1 c.swap_linger "swap-linger" in
let* () =
match c.binary_chunks_size with
| None -> return_unit
| Some size -> P2p_socket.check_binary_chunks_size size
in
return_unit
let create ~config ~limits peer_cfg conn_cfg msg_cfg =
let open Lwt_result_syntax in
let*? () = check_limits limits in
let* net = Real.create ~config ~limits peer_cfg msg_cfg conn_cfg in
return
{
announced_version =
Network_version.announced
~chain_name:msg_cfg.chain_name
~distributed_db_versions:msg_cfg.distributed_db_versions
~p2p_versions:P2p_version.supported;
peer_id = Real.peer_id net;
maintain = Real.maintain net;
roll = Real.roll net;
shutdown = Real.shutdown net;
connections = Real.connections net;
find_connection_by_peer_id = Real.find_connection_by_peer_id net;
find_connection_by_point = Real.find_connection_by_point net;
disconnect = Real.disconnect;
connection_info = Real.connection_info net;
connection_local_metadata = Real.connection_local_metadata net;
connection_remote_metadata = Real.connection_remote_metadata net;
connection_stat = Real.connection_stat net;
global_stat = Real.global_stat net;
get_peer_metadata = Real.get_peer_metadata net;
set_peer_metadata = Real.set_peer_metadata net;
connect = (fun ?timeout -> Real.connect ?timeout net);
recv = Real.recv net;
recv_any = Real.recv_any net;
send = Real.send net;
try_send = Real.try_send net;
pool = Some net.pool;
connect_handler = Some net.connect_handler;
fold_connections = (fun ~init ~f -> Real.fold_connections net ~init ~f);
iter_connections = Real.iter_connections net;
on_new_connection = Real.on_new_connection net;
negotiated_version = Real.negotiated_version net;
activate = Real.activate net;
watcher = net.Real.watcher;
}
let activate t =
Events.(emit__dont_wait__use_with_care activate_layer) () ;
t.activate ()
let faked_network (msg_cfg : 'msg P2p_params.message_config) peer_cfg
faked_metadata =
let announced_version =
Network_version.announced
~chain_name:msg_cfg.chain_name
~distributed_db_versions:msg_cfg.distributed_db_versions
~p2p_versions:P2p_version.supported
in
{
announced_version;
peer_id = Fake.id.peer_id;
maintain = Lwt_result_syntax.return;
roll = Lwt.return;
shutdown = Lwt.return;
connections = (fun () -> []);
find_connection_by_peer_id = (fun _ -> None);
find_connection_by_point = (fun _ -> None);
disconnect = (fun ?wait:_ _ -> Lwt.return_unit);
connection_info =
(fun _ -> Fake.connection_info announced_version faked_metadata);
connection_local_metadata = (fun _ -> faked_metadata);
connection_remote_metadata = (fun _ -> faked_metadata);
connection_stat = (fun _ -> Fake.empty_stat);
global_stat = (fun () -> Fake.empty_stat);
get_peer_metadata = (fun _ -> peer_cfg.P2p_params.peer_meta_initial ());
set_peer_metadata = (fun _ _ -> ());
connect =
(fun ?timeout:_ _ ->
Lwt_result_syntax.tzfail P2p_errors.Connection_refused);
recv = (fun _ -> Lwt_utils.never_ending ());
recv_any = (fun () -> Lwt_utils.never_ending ());
send = (fun _ _ -> Lwt_result_syntax.tzfail P2p_errors.Connection_closed);
try_send = (fun _ _ -> false);
fold_connections = (fun ~init ~f:_ -> init);
iter_connections = (fun _f -> ());
on_new_connection = (fun _f -> ());
negotiated_version = (fun _ -> announced_version);
pool = None;
connect_handler = None;
activate = (fun _ -> ());
watcher = Lwt_watcher.create_input ();
}
let peer_id net = net.peer_id
let maintain net = net.maintain ()
let roll net = net.roll ()
let shutdown net = net.shutdown ()
let connections net = net.connections ()
let disconnect net = net.disconnect
let find_connection_by_peer_id net = net.find_connection_by_peer_id
let find_connection_by_point net = net.find_connection_by_point
let connection_info net = net.connection_info
let connection_local_metadata net = net.connection_local_metadata
let connection_remote_metadata net = net.connection_remote_metadata
let connection_stat net = net.connection_stat
let global_stat net = net.global_stat ()
let get_peer_metadata net = net.get_peer_metadata
let set_peer_metadata net = net.set_peer_metadata
let connect net = net.connect
let recv net = net.recv
let recv_any net = net.recv_any ()
let send net = net.send
let try_send net = net.try_send
let broadcast connections ?except ?alt msg =
Real.broadcast connections ?except ?alt msg
let fold_connections net = net.fold_connections
let iter_connections net = net.iter_connections
let on_new_connection net = net.on_new_connection
let greylist_addr net addr =
Option.iter (fun pool -> P2p_pool.greylist_addr pool addr) net.pool
let greylist_peer net peer_id =
Option.iter (fun pool -> P2p_pool.greylist_peer pool peer_id) net.pool
let watcher net = Lwt_watcher.create_stream net.watcher
let negotiated_version net = net.negotiated_version
module Internal_for_tests = struct
let broadcast_conns (connections : ('a, 'b, 'c) P2p_conn.t P2p_peer.Table.t)
?except ?alt msg =
broadcast connections ?except ?alt msg
end
|
b6603874dcac3fae2b1fe9d0d9ae119963cff3c50259536526b1f10d035b0443 | aphyr/gretchen | flatzinc_test.clj | (ns gretchen.constraint.flatzinc-test
(:require [clojure.test :refer :all]
[clojure.test.check :as tc]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.test.check.clojure-test :refer [defspec]]
[clojure.walk :refer [postwalk-replace]]
[clojure.set :as set]
[clojure.math.numeric-tower :refer [expt]]
[clojure.pprint :refer [pprint]]
[gretchen.constraint-test :as ct]
[gretchen.constraint.flatzinc :refer :all]))
(def n 100) ; test.spec iters
(deftest flatzinc-test
(is (= (str "\nvar bool: x :: output_var;\n"
"\n\n"
"constraint bool_eq(x, true);\n"
"\n"
"solve satisfy;\n")
(flatzinc-str '(and (bool :x)
:x))))
(is (= (str "var 0..2: a :: output_var;\n"
"var 0..2: b :: output_var;\n"
"\n\n\n"
"constraint int_lt(a, b);\n"
"\n"
"solve :: int_search([a, b], input_order, indomain_split, complete) satisfy;\n")
(flatzinc-str '(and (in :a 0 2)
(in :b 0 2)
(< :a :b)))))
(is (= (str "var 0..2: a :: output_var;\n"
"var 0..2: b :: output_var;\n"
"\n\n"
"var bool: _fz0 :: var_is_introduced :: is_defined_var;\n"
"var bool: _fz1 :: var_is_introduced :: is_defined_var;\n"
"\n"
"constraint int_lt_reif(a, b, _fz0) :: defines_var(_fz0);\n"
"constraint int_lt_reif(b, a, _fz1) :: defines_var(_fz1);\n"
"constraint bool_or(_fz0, _fz1, true);\n"
"\n"
"solve :: int_search([a, b], input_order, indomain_split, complete) "
"satisfy;\n")
(flatzinc-str '(and (in :a 0 2)
(in :b 0 2)
(or (< :a :b)
(< :b :a)))))))
(deftest solutions-test
(is (= #{{:a 0 :b 1}
{:a 0 :b 2}
{:a 1 :b 2}
{:a 1 :b 0}
{:a 2 :b 0}
{:a 2 :b 1}}
(set (solutions '(and (in :a 0 2)
(in :b 0 2)
(or (< :a :b)
(< :b :a))))))))
; TODO: verify that all_different works
(defspec tseitin-spec
n
(prop/for-all [e ct/gen-full-expr]
(let [brute-solutions (ct/solutions e)
fz-solutions (solutions e)]
(or (= (set brute-solutions)
(set fz-solutions))
(println)
(println "----------------------------------------------")
(println "Expression to solve")
(pprint e)
(println)
(println "Brute-force solutions")
(pprint (set brute-solutions))
(println)
(println "Flatzinc-gecode solutions")
(pprint (set fz-solutions))))))
| null | https://raw.githubusercontent.com/aphyr/gretchen/bb7c4439884494a218561bc08a164ff0da84af36/test/gretchen/constraint/flatzinc_test.clj | clojure | test.spec iters
TODO: verify that all_different works | (ns gretchen.constraint.flatzinc-test
(:require [clojure.test :refer :all]
[clojure.test.check :as tc]
[clojure.test.check.generators :as gen]
[clojure.test.check.properties :as prop]
[clojure.test.check.clojure-test :refer [defspec]]
[clojure.walk :refer [postwalk-replace]]
[clojure.set :as set]
[clojure.math.numeric-tower :refer [expt]]
[clojure.pprint :refer [pprint]]
[gretchen.constraint-test :as ct]
[gretchen.constraint.flatzinc :refer :all]))
(deftest flatzinc-test
(is (= (str "\nvar bool: x :: output_var;\n"
"\n\n"
"constraint bool_eq(x, true);\n"
"\n"
"solve satisfy;\n")
(flatzinc-str '(and (bool :x)
:x))))
(is (= (str "var 0..2: a :: output_var;\n"
"var 0..2: b :: output_var;\n"
"\n\n\n"
"constraint int_lt(a, b);\n"
"\n"
"solve :: int_search([a, b], input_order, indomain_split, complete) satisfy;\n")
(flatzinc-str '(and (in :a 0 2)
(in :b 0 2)
(< :a :b)))))
(is (= (str "var 0..2: a :: output_var;\n"
"var 0..2: b :: output_var;\n"
"\n\n"
"var bool: _fz0 :: var_is_introduced :: is_defined_var;\n"
"var bool: _fz1 :: var_is_introduced :: is_defined_var;\n"
"\n"
"constraint int_lt_reif(a, b, _fz0) :: defines_var(_fz0);\n"
"constraint int_lt_reif(b, a, _fz1) :: defines_var(_fz1);\n"
"constraint bool_or(_fz0, _fz1, true);\n"
"\n"
"solve :: int_search([a, b], input_order, indomain_split, complete) "
"satisfy;\n")
(flatzinc-str '(and (in :a 0 2)
(in :b 0 2)
(or (< :a :b)
(< :b :a)))))))
(deftest solutions-test
(is (= #{{:a 0 :b 1}
{:a 0 :b 2}
{:a 1 :b 2}
{:a 1 :b 0}
{:a 2 :b 0}
{:a 2 :b 1}}
(set (solutions '(and (in :a 0 2)
(in :b 0 2)
(or (< :a :b)
(< :b :a))))))))
(defspec tseitin-spec
n
(prop/for-all [e ct/gen-full-expr]
(let [brute-solutions (ct/solutions e)
fz-solutions (solutions e)]
(or (= (set brute-solutions)
(set fz-solutions))
(println)
(println "----------------------------------------------")
(println "Expression to solve")
(pprint e)
(println)
(println "Brute-force solutions")
(pprint (set brute-solutions))
(println)
(println "Flatzinc-gecode solutions")
(pprint (set fz-solutions))))))
|
3bd1716076572d971773aca913d7cd322fc04ccaacd5deef900c9ff4fe83f104 | kutyel/haskell-book | Chapter14Spec.hs | module Test.Chapter14Spec where
import Chapter14
import Data.List (sort)
import Test.Hspec (Spec, describe, it)
import Test.QuickCheck
( Gen,
NonZero (NonZero),
Positive (Positive),
Testable (property),
elements,
expectFailure,
frequency,
)
genFoolEqual :: Gen Fool
genFoolEqual = elements [Fulse, Frue]
genFool :: Gen Fool
genFool = frequency [(3, return Fulse), (1, return Frue)]
prop_quotAndRem :: Integral a => NonZero a -> NonZero a -> Bool
prop_quotAndRem (NonZero x) (NonZero y) = quot x y * y + rem x y == x
prop_divAndMod :: Integral a => NonZero a -> NonZero a -> Bool
prop_divAndMod (NonZero x) (NonZero y) = div x y * y + mod x y == x
prop_squareId :: (Eq a, Floating a) => a -> Bool
prop_squareId x = squareIdentity x == x
assocExp :: Integral a => Positive a -> Positive a -> Positive a -> Bool
assocExp (Positive x) (Positive y) (Positive z) = associative (^) x y z
commuExp :: Integral a => Positive a -> Positive a -> Bool
commuExp (Positive x) (Positive y) = commutative (^) x y
spec :: Spec
spec =
describe "Property Testing" $ do
it "half of n should work for fractional numbers" $
property (prop_half :: Double -> Bool)
it "the half identity should hold" $
property (prop_halfIdentity :: Double -> Bool)
it "for any list you apply sort to" $
property ((listOrdered . sort) :: [Int] -> Bool)
it "addition should be associative" $
property (associative (+) :: Int -> Int -> Int -> Bool)
it "addition should be commutative" $
property (commutative (+) :: Int -> Int -> Bool)
it "multiplication should be associative" $
property (associative (*) :: Int -> Int -> Int -> Bool)
it "multiplication should be commutative" $
property (commutative (*) :: Int -> Int -> Bool)
it "quot and rem should be related" $
property (prop_quotAndRem :: NonZero Int -> NonZero Int -> Bool)
it "div and mod should be related" $
property (prop_divAndMod :: NonZero Int -> NonZero Int -> Bool)
it "exponentiation should *not* be associative" $
expectFailure $
property
(assocExp :: Positive Int -> Positive Int -> Positive Int -> Bool)
it "exponentiation should *not* be commutative" $
expectFailure $
property (commuExp :: Positive Int -> Positive Int -> Bool)
it "reversing a list twice is the identity of the list" $
property (prop_reverseTwice :: [Int] -> Bool)
it "apply operator ($) should work correctly" $
property (prop_applyOperator :: Int -> Bool)
it "composition operator (.) should work correctly" $
property (prop_composition :: String -> Bool)
it "read is the inverse of show" $
property (prop_roundTrip :: String -> Bool)
it "folding by cons shoud *not* concat" $
expectFailure $
property (prop_foldrPlusPlus :: [Int] -> [Int] -> Bool)
it "folding by concat with empty list should equal concat" $
property (prop_foldrConcat :: [[Int]] -> Bool)
it "take and length of n should not hold" $
expectFailure $
property (prop_takeLength :: Int -> [Int] -> Bool)
it "floating point arithmetic should fail -.-" $
expectFailure $
property (prop_squareId :: Double -> Bool)
it "idempotence should work for capitalizing" $
property (prop_idemCapitalize :: String -> Bool)
it "idempotence should work for sorting" $
property (prop_idemSort :: [Int] -> Bool)
| null | https://raw.githubusercontent.com/kutyel/haskell-book/fd4dc0332b67575cfaf5e3fb0e26687dc01772a0/test/Test/Chapter14Spec.hs | haskell | module Test.Chapter14Spec where
import Chapter14
import Data.List (sort)
import Test.Hspec (Spec, describe, it)
import Test.QuickCheck
( Gen,
NonZero (NonZero),
Positive (Positive),
Testable (property),
elements,
expectFailure,
frequency,
)
genFoolEqual :: Gen Fool
genFoolEqual = elements [Fulse, Frue]
genFool :: Gen Fool
genFool = frequency [(3, return Fulse), (1, return Frue)]
prop_quotAndRem :: Integral a => NonZero a -> NonZero a -> Bool
prop_quotAndRem (NonZero x) (NonZero y) = quot x y * y + rem x y == x
prop_divAndMod :: Integral a => NonZero a -> NonZero a -> Bool
prop_divAndMod (NonZero x) (NonZero y) = div x y * y + mod x y == x
prop_squareId :: (Eq a, Floating a) => a -> Bool
prop_squareId x = squareIdentity x == x
assocExp :: Integral a => Positive a -> Positive a -> Positive a -> Bool
assocExp (Positive x) (Positive y) (Positive z) = associative (^) x y z
commuExp :: Integral a => Positive a -> Positive a -> Bool
commuExp (Positive x) (Positive y) = commutative (^) x y
spec :: Spec
spec =
describe "Property Testing" $ do
it "half of n should work for fractional numbers" $
property (prop_half :: Double -> Bool)
it "the half identity should hold" $
property (prop_halfIdentity :: Double -> Bool)
it "for any list you apply sort to" $
property ((listOrdered . sort) :: [Int] -> Bool)
it "addition should be associative" $
property (associative (+) :: Int -> Int -> Int -> Bool)
it "addition should be commutative" $
property (commutative (+) :: Int -> Int -> Bool)
it "multiplication should be associative" $
property (associative (*) :: Int -> Int -> Int -> Bool)
it "multiplication should be commutative" $
property (commutative (*) :: Int -> Int -> Bool)
it "quot and rem should be related" $
property (prop_quotAndRem :: NonZero Int -> NonZero Int -> Bool)
it "div and mod should be related" $
property (prop_divAndMod :: NonZero Int -> NonZero Int -> Bool)
it "exponentiation should *not* be associative" $
expectFailure $
property
(assocExp :: Positive Int -> Positive Int -> Positive Int -> Bool)
it "exponentiation should *not* be commutative" $
expectFailure $
property (commuExp :: Positive Int -> Positive Int -> Bool)
it "reversing a list twice is the identity of the list" $
property (prop_reverseTwice :: [Int] -> Bool)
it "apply operator ($) should work correctly" $
property (prop_applyOperator :: Int -> Bool)
it "composition operator (.) should work correctly" $
property (prop_composition :: String -> Bool)
it "read is the inverse of show" $
property (prop_roundTrip :: String -> Bool)
it "folding by cons shoud *not* concat" $
expectFailure $
property (prop_foldrPlusPlus :: [Int] -> [Int] -> Bool)
it "folding by concat with empty list should equal concat" $
property (prop_foldrConcat :: [[Int]] -> Bool)
it "take and length of n should not hold" $
expectFailure $
property (prop_takeLength :: Int -> [Int] -> Bool)
it "floating point arithmetic should fail -.-" $
expectFailure $
property (prop_squareId :: Double -> Bool)
it "idempotence should work for capitalizing" $
property (prop_idemCapitalize :: String -> Bool)
it "idempotence should work for sorting" $
property (prop_idemSort :: [Int] -> Bool)
| |
fdf35d7c1aa09c3a5805c8137c185342daaf0449469864d0a6b155a3fa18c445 | dcastro/haskell-flatbuffers | Parser.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeApplications #
HLINT ignore structField " Reduce duplication "
HLINT ignore typeRef " Use < $ > "
module FlatBuffers.Internal.Compiler.Parser where
import Control.Monad ( when )
import qualified Control.Monad.Combinators.NonEmpty as NE
import qualified Data.ByteString as BS
import Data.Coerce ( coerce )
import Data.Functor ( (<&>), void )
import Data.List.NonEmpty ( NonEmpty((:|)) )
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe ( catMaybes )
import Data.Scientific ( Scientific )
import Data.Text ( Text )
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Void ( Void )
import Data.Word ( Word8 )
import FlatBuffers.Internal.Compiler.SyntaxTree
import FlatBuffers.Internal.Constants ( fileIdentifierSize )
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Read ( readMaybe )
type Parser = Parsec Void String
-- | Roughly based on: .
-- Differences between this parser and the above grammar:
--
-- * Unions members now support aliases.
* An enum 's underlying type used to be optional ( defaulting to ) , but now it 's mandatory .
-- * Attributes can be reffered to either as an identifier or as a string literal (e.g. @attr@ or @"attr"@).
-- * Struct fields can't have default values.
-- * The grammar states that table/struct field defaults can only be scalars (integer/floating point constants),
-- when in reality, it could be also be a boolean or an enum identifier.
-- * The grammar says attribute values can be integers, floats or string literals.
Flatc only allows integers and string literals . To make things simpler , we decided to go with flatc 's
-- approach and disallow floats.
* The grammar says namespaces must include at least one fragment , but an empty namespace
( i.e. @namespace ; @ ) is perfectly valid .
* This supports @native_include@ statements
-- (see: #flatbuffers_cpp_object_based_api)
schema :: Parser Schema
schema = do
sc
includes <- catMaybes <$> many (Just <$> include <|> Nothing <$ nativeInclude)
decls <- many (decl <|> failOnInclude)
eof
pure $ Schema includes (catMaybes decls)
where
failOnInclude =
rword "include" *> fail "\"include\" statements must be at the beginning of the file."
<|> (rword "native_include" *> fail "\"native_include\" statements must be at the beginning of the file.")
decl :: Parser (Maybe Decl)
decl =
choice
[ Just . DeclN <$> namespaceDecl
, Just . DeclT <$> tableDecl
, Just . DeclS <$> structDecl
, Just . DeclE <$> enumDecl
, Just . DeclU <$> unionDecl
, Just . DeclR <$> rootDecl
, Just . DeclFI <$> fileIdentifierDecl
, Just . DeclA <$> attributeDecl
, Nothing <$ fileExtensionDecl
, Nothing <$ jsonObj
, Nothing <$ rpcDecl
]
-- | space consumer - this consumes and ignores any whitespace + comments
sc :: Parser ()
sc = L.space space1 lineCmnt blockCmnt
where
lineCmnt = L.skipLineComment "//"
blockCmnt = L.skipBlockComment "/*" "*/"
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: String -> Parser String
symbol = L.symbol sc
rword :: String -> Parser ()
rword w = (lexeme . try) (string w *> notFollowedBy (alphaNumChar <|> char '_'))
curly, square, parens :: Parser a -> Parser a
curly = between (symbol "{") (symbol "}")
square = between (symbol "[") (symbol "]")
parens = between (symbol "(") (symbol ")")
commaSep :: Parser a -> Parser [a]
commaSep p = sepBy p (symbol ",")
commaSep1 :: Parser a -> Parser (NonEmpty a)
commaSep1 p = NE.sepBy1 p (symbol ",")
commaSepEndBy1 :: Parser a -> Parser (NonEmpty a)
commaSepEndBy1 p = NE.sepEndBy1 p (symbol ",")
semi, colon :: Parser ()
semi = void $ symbol ";"
colon = void $ symbol ":"
ident :: Parser Ident
ident = label "identifier" $ (lexeme . try) identifier
where
identifier = fmap (Ident . T.pack) $ (:) <$> letterChar <*> many (alphaNumChar <|> char '_')
typ :: Parser Type
typ =
TInt8 <$ (rword "int8" <|> rword "byte") <|>
TInt16 <$ (rword "int16" <|> rword "short") <|>
TInt32 <$ (rword "int32" <|> rword "int") <|>
TInt64 <$ (rword "int64" <|> rword "long") <|>
TWord8 <$ (rword "uint8" <|> rword "ubyte") <|>
TWord16 <$ (rword "uint16" <|> rword "ushort") <|>
TWord32 <$ (rword "uint32" <|> rword "uint") <|>
TWord64 <$ (rword "uint64" <|> rword "ulong") <|>
TFloat <$ (rword "float32" <|> rword "float") <|>
TDouble <$ (rword "float64" <|> rword "double") <|>
TBool <$ rword "bool" <|>
TString <$ rword "string" <|>
label "type identifier" (TRef <$> typeRef) <|>
label "vector type" vector
where
vector = TVector <$> between
(symbol "[" *> (notFollowedBy (symbol "[") <|> fail "nested vector types not supported" ))
(symbol "]")
typ
typeRef :: Parser TypeRef
typeRef = do
idents <- many (try (ident <* symbol "."))
i <- ident
pure $ TypeRef (Namespace (coerce idents)) i
tableField :: Parser TableField
tableField = do
i <- ident
colon
t <- typ
def <- optional (symbol "=" *> defaultVal)
md <- metadata
semi
pure $ TableField i t def md
structField :: Parser StructField
structField = do
i <- ident
colon
t <- typ
md <- metadata
semi
pure $ StructField i t md
tableDecl :: Parser TableDecl
tableDecl = do
rword "table"
i <- ident
md <- metadata
fs <- curly (many tableField)
pure $ TableDecl i md fs
structDecl :: Parser StructDecl
structDecl = do
rword "struct"
i <- ident
md <- metadata
fs <- curly (NE.some structField)
pure $ StructDecl i md fs
enumDecl :: Parser EnumDecl
enumDecl = do
rword "enum"
i <- ident
colon
t <- typ
md <- metadata
v <- curly (commaSepEndBy1 enumVal)
pure $ EnumDecl i t md v
enumVal :: Parser EnumVal
enumVal = EnumVal <$> ident <*> optional (symbol "=" *> intLiteral)
unionDecl :: Parser UnionDecl
unionDecl = do
rword "union"
i <- ident
md <- metadata
v <- curly (commaSepEndBy1 unionVal)
pure $ UnionDecl i md v
unionVal :: Parser UnionVal
unionVal = UnionVal <$> optional (try (ident <* colon)) <*> typeRef
namespaceDecl :: Parser NamespaceDecl
namespaceDecl =
NamespaceDecl . Namespace . coerce <$>
(rword "namespace" *> sepBy ident (symbol ".") <* semi)
stringLiteral :: Parser StringLiteral
stringLiteral =
label "string literal" $
fmap (StringLiteral . T.pack) . lexeme $
char '"' >> manyTill L.charLiteral (char '"')
intLiteral :: Parser IntLiteral
intLiteral =
label "integer literal" . lexeme $
L.signed sc L.decimal
attributeVal :: Parser AttributeVal
attributeVal =
choice
[ AttrI . unIntLiteral <$> intLiteral
, AttrS . unStringLiteral <$> stringLiteral
]
defaultVal :: Parser DefaultVal
defaultVal =
choice
[ DefaultBool True <$ rword "true"
, DefaultBool False <$ rword "false"
, DefaultNum <$> label "number literal" (lexeme (L.signed sc L.scientific))
, ident <&> \(Ident ref) -> DefaultRef (ref :| [])
, stringLiteral >>= \(StringLiteral str) ->
case T.strip str of
"true" -> pure $ DefaultBool True
"false" -> pure $ DefaultBool False
other ->
case readMaybe @Scientific (T.unpack other) of
Just n -> pure $ DefaultNum n
Nothing ->
case NE.nonEmpty (T.words str) of
Just refs -> pure $ DefaultRef refs
Nothing -> fail "Expected 'true', 'false', a number, or one or more identifiers"
]
metadata :: Parser Metadata
metadata =
label "metadata"
. fmap (Metadata . Map.fromList . maybe [] NE.toList)
. optional
. parens
. commaSep1 $
(,) <$> attributeName <*> optional (colon *> attributeVal)
include :: Parser Include
include = Include <$> (rword "include" *> stringLiteral <* semi)
-- | See: #flatbuffers_cpp_object_based_api
nativeInclude :: Parser ()
nativeInclude = void (rword "native_include" >> stringLiteral >> semi)
rootDecl :: Parser RootDecl
rootDecl = RootDecl <$> (rword "root_type" *> typeRef <* semi)
fileExtensionDecl :: Parser ()
fileExtensionDecl = void (rword "file_extension" *> stringLiteral <* semi)
fileIdentifierDecl :: Parser FileIdentifierDecl
fileIdentifierDecl = do
rword "file_identifier"
fi <- coerce stringLiteral
let byteCount = BS.length (T.encodeUtf8 fi)
let codePointCount = T.length fi
when (byteCount /= fileIdentifierSize) $
if codePointCount == byteCount
-- if the user is using ASCII characters
then fail $ "file_identifier must be exactly " <> show (fileIdentifierSize @Word8) <> " characters"
if the user is using multi UTF-8 code unit characters , show a more detailed error message
else fail $ "file_identifier must be exactly " <> show (fileIdentifierSize @Word8) <> " UTF-8 code units"
semi
pure (FileIdentifierDecl fi)
attributeDecl :: Parser AttributeDecl
attributeDecl = AttributeDecl <$> (rword "attribute" *> attributeName <* semi)
attributeName :: Parser Text
attributeName = coerce stringLiteral <|> coerce ident
jsonObj :: Parser ()
jsonObj =
label "JSON object" (void jobject)
where
json = choice [void jstring, void jnumber, jbool, jnull, void jarray, void jobject]
jnull = rword "null"
jbool = rword "true" <|> rword "false"
jstring = stringLiteral
jnumber = lexeme $ L.signed sc L.scientific
jarray = square (commaSep json)
jobject = curly (commaSep keyValuePair)
keyValuePair = do
void stringLiteral <|> void ident
colon
json
rpcDecl :: Parser ()
rpcDecl = void $ rword "rpc_service" >> ident >> curly (NE.some rpcMethod)
rpcMethod :: Parser ()
rpcMethod = ident >> parens ident >> colon >> ident >> metadata >> void semi
| null | https://raw.githubusercontent.com/dcastro/haskell-flatbuffers/cea6a75109de109ae906741ee73cbb0f356a8e0d/src/FlatBuffers/Internal/Compiler/Parser.hs | haskell | # LANGUAGE OverloadedStrings #
| Roughly based on: .
Differences between this parser and the above grammar:
* Unions members now support aliases.
* Attributes can be reffered to either as an identifier or as a string literal (e.g. @attr@ or @"attr"@).
* Struct fields can't have default values.
* The grammar states that table/struct field defaults can only be scalars (integer/floating point constants),
when in reality, it could be also be a boolean or an enum identifier.
* The grammar says attribute values can be integers, floats or string literals.
approach and disallow floats.
(see: #flatbuffers_cpp_object_based_api)
| space consumer - this consumes and ignores any whitespace + comments
| See: #flatbuffers_cpp_object_based_api
if the user is using ASCII characters | # LANGUAGE TypeApplications #
HLINT ignore structField " Reduce duplication "
HLINT ignore typeRef " Use < $ > "
module FlatBuffers.Internal.Compiler.Parser where
import Control.Monad ( when )
import qualified Control.Monad.Combinators.NonEmpty as NE
import qualified Data.ByteString as BS
import Data.Coerce ( coerce )
import Data.Functor ( (<&>), void )
import Data.List.NonEmpty ( NonEmpty((:|)) )
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe ( catMaybes )
import Data.Scientific ( Scientific )
import Data.Text ( Text )
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Void ( Void )
import Data.Word ( Word8 )
import FlatBuffers.Internal.Compiler.SyntaxTree
import FlatBuffers.Internal.Constants ( fileIdentifierSize )
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Text.Read ( readMaybe )
type Parser = Parsec Void String
* An enum 's underlying type used to be optional ( defaulting to ) , but now it 's mandatory .
Flatc only allows integers and string literals . To make things simpler , we decided to go with flatc 's
* The grammar says namespaces must include at least one fragment , but an empty namespace
( i.e. @namespace ; @ ) is perfectly valid .
* This supports @native_include@ statements
schema :: Parser Schema
schema = do
sc
includes <- catMaybes <$> many (Just <$> include <|> Nothing <$ nativeInclude)
decls <- many (decl <|> failOnInclude)
eof
pure $ Schema includes (catMaybes decls)
where
failOnInclude =
rword "include" *> fail "\"include\" statements must be at the beginning of the file."
<|> (rword "native_include" *> fail "\"native_include\" statements must be at the beginning of the file.")
decl :: Parser (Maybe Decl)
decl =
choice
[ Just . DeclN <$> namespaceDecl
, Just . DeclT <$> tableDecl
, Just . DeclS <$> structDecl
, Just . DeclE <$> enumDecl
, Just . DeclU <$> unionDecl
, Just . DeclR <$> rootDecl
, Just . DeclFI <$> fileIdentifierDecl
, Just . DeclA <$> attributeDecl
, Nothing <$ fileExtensionDecl
, Nothing <$ jsonObj
, Nothing <$ rpcDecl
]
sc :: Parser ()
sc = L.space space1 lineCmnt blockCmnt
where
lineCmnt = L.skipLineComment "//"
blockCmnt = L.skipBlockComment "/*" "*/"
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: String -> Parser String
symbol = L.symbol sc
rword :: String -> Parser ()
rword w = (lexeme . try) (string w *> notFollowedBy (alphaNumChar <|> char '_'))
curly, square, parens :: Parser a -> Parser a
curly = between (symbol "{") (symbol "}")
square = between (symbol "[") (symbol "]")
parens = between (symbol "(") (symbol ")")
commaSep :: Parser a -> Parser [a]
commaSep p = sepBy p (symbol ",")
commaSep1 :: Parser a -> Parser (NonEmpty a)
commaSep1 p = NE.sepBy1 p (symbol ",")
commaSepEndBy1 :: Parser a -> Parser (NonEmpty a)
commaSepEndBy1 p = NE.sepEndBy1 p (symbol ",")
semi, colon :: Parser ()
semi = void $ symbol ";"
colon = void $ symbol ":"
ident :: Parser Ident
ident = label "identifier" $ (lexeme . try) identifier
where
identifier = fmap (Ident . T.pack) $ (:) <$> letterChar <*> many (alphaNumChar <|> char '_')
typ :: Parser Type
typ =
TInt8 <$ (rword "int8" <|> rword "byte") <|>
TInt16 <$ (rword "int16" <|> rword "short") <|>
TInt32 <$ (rword "int32" <|> rword "int") <|>
TInt64 <$ (rword "int64" <|> rword "long") <|>
TWord8 <$ (rword "uint8" <|> rword "ubyte") <|>
TWord16 <$ (rword "uint16" <|> rword "ushort") <|>
TWord32 <$ (rword "uint32" <|> rword "uint") <|>
TWord64 <$ (rword "uint64" <|> rword "ulong") <|>
TFloat <$ (rword "float32" <|> rword "float") <|>
TDouble <$ (rword "float64" <|> rword "double") <|>
TBool <$ rword "bool" <|>
TString <$ rword "string" <|>
label "type identifier" (TRef <$> typeRef) <|>
label "vector type" vector
where
vector = TVector <$> between
(symbol "[" *> (notFollowedBy (symbol "[") <|> fail "nested vector types not supported" ))
(symbol "]")
typ
typeRef :: Parser TypeRef
typeRef = do
idents <- many (try (ident <* symbol "."))
i <- ident
pure $ TypeRef (Namespace (coerce idents)) i
tableField :: Parser TableField
tableField = do
i <- ident
colon
t <- typ
def <- optional (symbol "=" *> defaultVal)
md <- metadata
semi
pure $ TableField i t def md
structField :: Parser StructField
structField = do
i <- ident
colon
t <- typ
md <- metadata
semi
pure $ StructField i t md
tableDecl :: Parser TableDecl
tableDecl = do
rword "table"
i <- ident
md <- metadata
fs <- curly (many tableField)
pure $ TableDecl i md fs
structDecl :: Parser StructDecl
structDecl = do
rword "struct"
i <- ident
md <- metadata
fs <- curly (NE.some structField)
pure $ StructDecl i md fs
enumDecl :: Parser EnumDecl
enumDecl = do
rword "enum"
i <- ident
colon
t <- typ
md <- metadata
v <- curly (commaSepEndBy1 enumVal)
pure $ EnumDecl i t md v
enumVal :: Parser EnumVal
enumVal = EnumVal <$> ident <*> optional (symbol "=" *> intLiteral)
unionDecl :: Parser UnionDecl
unionDecl = do
rword "union"
i <- ident
md <- metadata
v <- curly (commaSepEndBy1 unionVal)
pure $ UnionDecl i md v
unionVal :: Parser UnionVal
unionVal = UnionVal <$> optional (try (ident <* colon)) <*> typeRef
namespaceDecl :: Parser NamespaceDecl
namespaceDecl =
NamespaceDecl . Namespace . coerce <$>
(rword "namespace" *> sepBy ident (symbol ".") <* semi)
stringLiteral :: Parser StringLiteral
stringLiteral =
label "string literal" $
fmap (StringLiteral . T.pack) . lexeme $
char '"' >> manyTill L.charLiteral (char '"')
intLiteral :: Parser IntLiteral
intLiteral =
label "integer literal" . lexeme $
L.signed sc L.decimal
attributeVal :: Parser AttributeVal
attributeVal =
choice
[ AttrI . unIntLiteral <$> intLiteral
, AttrS . unStringLiteral <$> stringLiteral
]
defaultVal :: Parser DefaultVal
defaultVal =
choice
[ DefaultBool True <$ rword "true"
, DefaultBool False <$ rword "false"
, DefaultNum <$> label "number literal" (lexeme (L.signed sc L.scientific))
, ident <&> \(Ident ref) -> DefaultRef (ref :| [])
, stringLiteral >>= \(StringLiteral str) ->
case T.strip str of
"true" -> pure $ DefaultBool True
"false" -> pure $ DefaultBool False
other ->
case readMaybe @Scientific (T.unpack other) of
Just n -> pure $ DefaultNum n
Nothing ->
case NE.nonEmpty (T.words str) of
Just refs -> pure $ DefaultRef refs
Nothing -> fail "Expected 'true', 'false', a number, or one or more identifiers"
]
metadata :: Parser Metadata
metadata =
label "metadata"
. fmap (Metadata . Map.fromList . maybe [] NE.toList)
. optional
. parens
. commaSep1 $
(,) <$> attributeName <*> optional (colon *> attributeVal)
include :: Parser Include
include = Include <$> (rword "include" *> stringLiteral <* semi)
nativeInclude :: Parser ()
nativeInclude = void (rword "native_include" >> stringLiteral >> semi)
rootDecl :: Parser RootDecl
rootDecl = RootDecl <$> (rword "root_type" *> typeRef <* semi)
fileExtensionDecl :: Parser ()
fileExtensionDecl = void (rword "file_extension" *> stringLiteral <* semi)
fileIdentifierDecl :: Parser FileIdentifierDecl
fileIdentifierDecl = do
rword "file_identifier"
fi <- coerce stringLiteral
let byteCount = BS.length (T.encodeUtf8 fi)
let codePointCount = T.length fi
when (byteCount /= fileIdentifierSize) $
if codePointCount == byteCount
then fail $ "file_identifier must be exactly " <> show (fileIdentifierSize @Word8) <> " characters"
if the user is using multi UTF-8 code unit characters , show a more detailed error message
else fail $ "file_identifier must be exactly " <> show (fileIdentifierSize @Word8) <> " UTF-8 code units"
semi
pure (FileIdentifierDecl fi)
attributeDecl :: Parser AttributeDecl
attributeDecl = AttributeDecl <$> (rword "attribute" *> attributeName <* semi)
attributeName :: Parser Text
attributeName = coerce stringLiteral <|> coerce ident
jsonObj :: Parser ()
jsonObj =
label "JSON object" (void jobject)
where
json = choice [void jstring, void jnumber, jbool, jnull, void jarray, void jobject]
jnull = rword "null"
jbool = rword "true" <|> rword "false"
jstring = stringLiteral
jnumber = lexeme $ L.signed sc L.scientific
jarray = square (commaSep json)
jobject = curly (commaSep keyValuePair)
keyValuePair = do
void stringLiteral <|> void ident
colon
json
rpcDecl :: Parser ()
rpcDecl = void $ rword "rpc_service" >> ident >> curly (NE.some rpcMethod)
rpcMethod :: Parser ()
rpcMethod = ident >> parens ident >> colon >> ident >> metadata >> void semi
|
5af99c5da7dfa09c6c61b4d8012a5ab7327f5f2253966b6ec2b39d559c70b0d6 | michalkonecny/aern2 | App.hs | |
Module : AERN2.RealFun . PlotService . App
Description : Serving data about a real function
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Serving data about a real function
Module : AERN2.RealFun.PlotService.App
Description : Serving data about a real function
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Serving data about a real function
-}
module AERN2.RealFun.PlotService.App
(
startServer, app,
Functions, Function(..), functionUsingEval, intervalFunctionUsingEval
)
where
import MixedTypesNumPrelude
import qualified Prelude as P
-- import Text.Printf
import System.IO
import Control.Concurrent
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import qualified Data.Map as Map
import Network.Wai
import Network.Wai.MakeAssets
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.RequestLogger
import Servant
import AERN2.MP.Dyadic (dyadic)
import AERN2.MP.Ball as MPBall
import AERN2.Interval as Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.PlotService.API
startServer :: Functions -> Bool -> Port -> IO ()
startServer fns shouldLog port = do
runSettings settings =<< (fmap logger (app fns))
where
logger
| shouldLog = logStdoutDev
| otherwise = id
settings =
setPort port $
setBeforeMainLoop (hPutStrLn stderr
("listening on port " ++ show port ++ "...")) $
defaultSettings
type WithAssets = Api :<|> Raw
withAssets :: Proxy WithAssets
withAssets = Proxy
app ::
Functions -> IO Application
app fns =
serve withAssets <$> server fns
server ::
Functions -> IO (Server WithAssets)
server fns = do
assets <- serveAssets def
samplingsStore <- mkSamplingsStore
return $ apiServer fns samplingsStore :<|> assets
apiServer ::
Functions -> SamplingsStore -> Server Api
apiServer fns samplingsStore =
listSamplings samplingsStore :<|>
postSampling samplingsStore :<|>
getSampling samplingsStore :<|>
listFunctionIds fns :<|>
getFunctionDomain fns :<|>
getFunctionValues fns samplingsStore :<|>
getFunctionName fns :<|>
getFunctionColour fns
{- Functions processing -}
type Functions = [Function]
data Function =
Function
{ function_name :: FunctionName
, function_colour :: FunctionColour
, function_dom :: DyadicInterval
, function_getBounds :: DyadicInterval -> Interval MPBall MPBall
}
functionUsingEval ::
(HasDomain fn, Domain fn ~ DyadicInterval,
CanApply fn DyadicInterval, ApplyType fn DyadicInterval ~ MPBall)
=>
(FunctionName, fn) -> Function
functionUsingEval (name, fn) =
Function
{ function_name = name
, function_dom = getDomain fn
, function_colour = functionColour (0,0,0)
, function_getBounds = \ di -> let val = apply fn di in Interval val val
}
intervalFunctionUsingEval ::
(HasDomain fn, Domain fn ~ DyadicInterval,
CanApply fn DyadicInterval, ApplyType fn DyadicInterval ~ Interval MPBall MPBall)
=>
(FunctionName, fn) -> Function
intervalFunctionUsingEval (name, fn) =
Function
{ function_name = name
, function_dom = getDomain fn
, function_colour = functionColour (0,0,0)
, function_getBounds = apply fn
}
listFunctionIds :: Functions -> Handler [FunctionId]
listFunctionIds fns = return $ map int [0..n - 1]
where
n = integer $ length fns
getFunctionDomain ::
Functions -> FunctionId -> Handler FunctionDomain
getFunctionDomain fns fnId =
maybe (throwE err404) return =<< (return $ fmap getDom maybeFn)
where
getDom = dyadicIntervalAPI . function_dom
maybeFn = lookupFunction fns fnId
getFunctionName ::
Functions -> FunctionId -> Handler FunctionName
getFunctionName fns fnId =
maybe (throwE err404) return =<< (return $ fmap function_name maybeFn)
where
maybeFn = lookupFunction fns fnId
getFunctionColour ::
Functions -> FunctionId -> Handler FunctionColour
getFunctionColour fns fnId =
maybe (throwE err404) return =<< (return $ fmap function_colour maybeFn)
where
maybeFn = lookupFunction fns fnId
lookupFunction :: Functions -> FunctionId -> Maybe Function
lookupFunction fns fnId
| 0 <= fnId && fnId < length fns = Just (fns !! fnId)
| otherwise = Nothing
getFunctionValues ::
Functions ->
SamplingsStore ->
FunctionId ->
SamplingId ->
Handler [FunctionSegment]
getFunctionValues fns samplingsStore fnId samplingId =
do
maybeSampling <- liftIO $ lookupSampling samplingsStore samplingId
useSamplingAndFn maybeSampling (lookupFunction fns fnId)
where
useSamplingAndFn Nothing _ = throwE err404
useSamplingAndFn _ Nothing = throwE err404
useSamplingAndFn (Just sampling) (Just fn) =
case getMaybeValueCN intersectedDomCN of
Just intersectedDom -> return $ recursiveEval maxDepth intersectedDom
_ -> return []
where
intersectedDomCN = dom `Interval.intersect` samplingDom
dom = function_dom fn
samplingDom = sampling_dom sampling
maxStep = dyadic $ sampling_maxStep sampling
getBounds = function_getBounds fn
maxDepth = 20
recursiveEval maxD di
| boundsGoodEnough || maxD == 0 =
[FunctionSegment
(dyadicIntervalAPI di)
(mpBallIntervalAPI boundsL)
(mpBallIntervalAPI boundsR)]
| otherwise = (recursiveEval maxD' di1) ++ (recursiveEval maxD' di2)
where
maxD' = maxD - 1
(di1, di2) = Interval.split di
(Interval diL diR) = di
boundsL@(Interval lL rL) = getBounds (Interval diL diL)
boundsR@(Interval lR rR) = getBounds (Interval diR diR)
boundsGoodEnough =
(diR-diL <= maxStep && abs (lL-lR) !<=! maxStep) && (abs (rL-rR) !<=! maxStep)
storage
newtype SamplingsStore = SamplingsStore (MVar (Map.Map SamplingId Sampling))
mkSamplingsStore :: IO SamplingsStore
mkSamplingsStore = SamplingsStore <$> newMVar Map.empty
listSamplings :: SamplingsStore -> Handler [SamplingId]
listSamplings samplingsStore = liftIO $ allSamplingIds samplingsStore
allSamplingIds :: SamplingsStore -> IO [SamplingId]
allSamplingIds (SamplingsStore mvar) =
Map.keys <$> readMVar mvar
postSampling :: SamplingsStore -> Sampling -> Handler SamplingId
postSampling samplingsStore newSampling =
liftIO $ insertSampling samplingsStore newSampling
insertSampling :: SamplingsStore -> Sampling -> IO SamplingId
insertSampling (SamplingsStore mvar) newSampling = modifyMVar mvar $ \ m -> do
let newKey = case Map.keys m of
[] -> int 0
ks -> succ (maximum ks)
return (Map.insert newKey newSampling m, newKey)
getSampling :: SamplingsStore -> SamplingId -> Handler Sampling
getSampling samplingsStore i =
maybe (throwE err404) return =<< liftIO (lookupSampling samplingsStore i)
lookupSampling :: SamplingsStore -> SamplingId -> IO (Maybe Sampling)
lookupSampling (SamplingsStore mvar) i = do
Map.lookup i <$> readMVar mvar
| null | https://raw.githubusercontent.com/michalkonecny/aern2/1c8f12dfcb287bd8e3353802a94865d7c2c121ec/aern2-fun-plot/server/src/AERN2/RealFun/PlotService/App.hs | haskell | import Text.Printf
Functions processing | |
Module : AERN2.RealFun . PlotService . App
Description : Serving data about a real function
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Serving data about a real function
Module : AERN2.RealFun.PlotService.App
Description : Serving data about a real function
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Serving data about a real function
-}
module AERN2.RealFun.PlotService.App
(
startServer, app,
Functions, Function(..), functionUsingEval, intervalFunctionUsingEval
)
where
import MixedTypesNumPrelude
import qualified Prelude as P
import System.IO
import Control.Concurrent
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import qualified Data.Map as Map
import Network.Wai
import Network.Wai.MakeAssets
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.RequestLogger
import Servant
import AERN2.MP.Dyadic (dyadic)
import AERN2.MP.Ball as MPBall
import AERN2.Interval as Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.PlotService.API
startServer :: Functions -> Bool -> Port -> IO ()
startServer fns shouldLog port = do
runSettings settings =<< (fmap logger (app fns))
where
logger
| shouldLog = logStdoutDev
| otherwise = id
settings =
setPort port $
setBeforeMainLoop (hPutStrLn stderr
("listening on port " ++ show port ++ "...")) $
defaultSettings
type WithAssets = Api :<|> Raw
withAssets :: Proxy WithAssets
withAssets = Proxy
app ::
Functions -> IO Application
app fns =
serve withAssets <$> server fns
server ::
Functions -> IO (Server WithAssets)
server fns = do
assets <- serveAssets def
samplingsStore <- mkSamplingsStore
return $ apiServer fns samplingsStore :<|> assets
apiServer ::
Functions -> SamplingsStore -> Server Api
apiServer fns samplingsStore =
listSamplings samplingsStore :<|>
postSampling samplingsStore :<|>
getSampling samplingsStore :<|>
listFunctionIds fns :<|>
getFunctionDomain fns :<|>
getFunctionValues fns samplingsStore :<|>
getFunctionName fns :<|>
getFunctionColour fns
type Functions = [Function]
data Function =
Function
{ function_name :: FunctionName
, function_colour :: FunctionColour
, function_dom :: DyadicInterval
, function_getBounds :: DyadicInterval -> Interval MPBall MPBall
}
functionUsingEval ::
(HasDomain fn, Domain fn ~ DyadicInterval,
CanApply fn DyadicInterval, ApplyType fn DyadicInterval ~ MPBall)
=>
(FunctionName, fn) -> Function
functionUsingEval (name, fn) =
Function
{ function_name = name
, function_dom = getDomain fn
, function_colour = functionColour (0,0,0)
, function_getBounds = \ di -> let val = apply fn di in Interval val val
}
intervalFunctionUsingEval ::
(HasDomain fn, Domain fn ~ DyadicInterval,
CanApply fn DyadicInterval, ApplyType fn DyadicInterval ~ Interval MPBall MPBall)
=>
(FunctionName, fn) -> Function
intervalFunctionUsingEval (name, fn) =
Function
{ function_name = name
, function_dom = getDomain fn
, function_colour = functionColour (0,0,0)
, function_getBounds = apply fn
}
listFunctionIds :: Functions -> Handler [FunctionId]
listFunctionIds fns = return $ map int [0..n - 1]
where
n = integer $ length fns
getFunctionDomain ::
Functions -> FunctionId -> Handler FunctionDomain
getFunctionDomain fns fnId =
maybe (throwE err404) return =<< (return $ fmap getDom maybeFn)
where
getDom = dyadicIntervalAPI . function_dom
maybeFn = lookupFunction fns fnId
getFunctionName ::
Functions -> FunctionId -> Handler FunctionName
getFunctionName fns fnId =
maybe (throwE err404) return =<< (return $ fmap function_name maybeFn)
where
maybeFn = lookupFunction fns fnId
getFunctionColour ::
Functions -> FunctionId -> Handler FunctionColour
getFunctionColour fns fnId =
maybe (throwE err404) return =<< (return $ fmap function_colour maybeFn)
where
maybeFn = lookupFunction fns fnId
lookupFunction :: Functions -> FunctionId -> Maybe Function
lookupFunction fns fnId
| 0 <= fnId && fnId < length fns = Just (fns !! fnId)
| otherwise = Nothing
getFunctionValues ::
Functions ->
SamplingsStore ->
FunctionId ->
SamplingId ->
Handler [FunctionSegment]
getFunctionValues fns samplingsStore fnId samplingId =
do
maybeSampling <- liftIO $ lookupSampling samplingsStore samplingId
useSamplingAndFn maybeSampling (lookupFunction fns fnId)
where
useSamplingAndFn Nothing _ = throwE err404
useSamplingAndFn _ Nothing = throwE err404
useSamplingAndFn (Just sampling) (Just fn) =
case getMaybeValueCN intersectedDomCN of
Just intersectedDom -> return $ recursiveEval maxDepth intersectedDom
_ -> return []
where
intersectedDomCN = dom `Interval.intersect` samplingDom
dom = function_dom fn
samplingDom = sampling_dom sampling
maxStep = dyadic $ sampling_maxStep sampling
getBounds = function_getBounds fn
maxDepth = 20
recursiveEval maxD di
| boundsGoodEnough || maxD == 0 =
[FunctionSegment
(dyadicIntervalAPI di)
(mpBallIntervalAPI boundsL)
(mpBallIntervalAPI boundsR)]
| otherwise = (recursiveEval maxD' di1) ++ (recursiveEval maxD' di2)
where
maxD' = maxD - 1
(di1, di2) = Interval.split di
(Interval diL diR) = di
boundsL@(Interval lL rL) = getBounds (Interval diL diL)
boundsR@(Interval lR rR) = getBounds (Interval diR diR)
boundsGoodEnough =
(diR-diL <= maxStep && abs (lL-lR) !<=! maxStep) && (abs (rL-rR) !<=! maxStep)
storage
newtype SamplingsStore = SamplingsStore (MVar (Map.Map SamplingId Sampling))
mkSamplingsStore :: IO SamplingsStore
mkSamplingsStore = SamplingsStore <$> newMVar Map.empty
listSamplings :: SamplingsStore -> Handler [SamplingId]
listSamplings samplingsStore = liftIO $ allSamplingIds samplingsStore
allSamplingIds :: SamplingsStore -> IO [SamplingId]
allSamplingIds (SamplingsStore mvar) =
Map.keys <$> readMVar mvar
postSampling :: SamplingsStore -> Sampling -> Handler SamplingId
postSampling samplingsStore newSampling =
liftIO $ insertSampling samplingsStore newSampling
insertSampling :: SamplingsStore -> Sampling -> IO SamplingId
insertSampling (SamplingsStore mvar) newSampling = modifyMVar mvar $ \ m -> do
let newKey = case Map.keys m of
[] -> int 0
ks -> succ (maximum ks)
return (Map.insert newKey newSampling m, newKey)
getSampling :: SamplingsStore -> SamplingId -> Handler Sampling
getSampling samplingsStore i =
maybe (throwE err404) return =<< liftIO (lookupSampling samplingsStore i)
lookupSampling :: SamplingsStore -> SamplingId -> IO (Maybe Sampling)
lookupSampling (SamplingsStore mvar) i = do
Map.lookup i <$> readMVar mvar
|
00d577c302950849d7570dcdf8d6ea46152604bb0e6df9fe24984ae020262ad6 | Apress/haskell-quick-syntax-reference | ch7.hs | add :: Integer -> Integer -> Integer
add x y = x + y
main = do
putStrLn "Adding two numbers:"
print(add 3 7)
main = putStrLn "Learning about Haskell main function"
main = do
putStrLn "Are you enjoying Haskell?"
answer <- getLine
putStrLn ("You answered: " ++ answer)
day :: (Integral a) => a -> String
day 1 = "Monday"
day 2 = "Tuesday"
day 3 = "Wednesday"
day 4 = "Thursday"
day 5 = "Friday"
day 6 = "Saturday"
day 7 = "Sunday"
day x = "The week has only 7 days!"
numbers :: (Integral a) => a -> String
numbers x =
if x < 0 then "negative"
else "positive"
day :: (Integral a) => a -> String
day x = case x of 1 -> "Monday"
2 -> "Tuesday"
3 -> "Wednesday"
4 -> "Thursday"
5 -> "Friday"
6 -> "Saturday"
7 -> "Sunday"
_ -> "The week has only 7 days!"
sign :: (RealFloat a) => a -> String
sign x
| x < 0 = "negative"
| x == 0 = "zero"
| otherwise = "positive"
quadraticEq :: (Float, Float, Float) -> (Float, Float)
quadraticEq (a, b, c) = (x1, x2)
where
x1 = (-b - sqrt delta) / (2 * a)
x2 = (-b + sqrt delta) / (2 * a)
delta = b * b - 4 * a * c
pyramivVol :: (RealFloat a) => a -> a -> a
pyramivVol l h =
let area = l^2
in (area * h)/3
5 * (let a = 2 in a^2) + 7
[let cube x = x^3 in (cube 6, cube 3)]
(let x = 100; y = 200 in x+y, let l="Anne "; f = "Scott" in l ++ f)
main = do
putStrLn "The square of 2 is:"
print ((\x -> x^2) 2)
let concatAndPrint a b = putStrLn $ (++) a b
concatAndPrint "abc" "def"
"abc" `concatAndPrint` "def"
multiplyList m [] = []
multiplyList m (y:ys) = m*y : multiplyList m ys
multiplyListBy3 = multiplyList 3
| null | https://raw.githubusercontent.com/Apress/haskell-quick-syntax-reference/8bcb2773532de752d6297a91a3aaf49fd92ed03b/ch7.hs | haskell | add :: Integer -> Integer -> Integer
add x y = x + y
main = do
putStrLn "Adding two numbers:"
print(add 3 7)
main = putStrLn "Learning about Haskell main function"
main = do
putStrLn "Are you enjoying Haskell?"
answer <- getLine
putStrLn ("You answered: " ++ answer)
day :: (Integral a) => a -> String
day 1 = "Monday"
day 2 = "Tuesday"
day 3 = "Wednesday"
day 4 = "Thursday"
day 5 = "Friday"
day 6 = "Saturday"
day 7 = "Sunday"
day x = "The week has only 7 days!"
numbers :: (Integral a) => a -> String
numbers x =
if x < 0 then "negative"
else "positive"
day :: (Integral a) => a -> String
day x = case x of 1 -> "Monday"
2 -> "Tuesday"
3 -> "Wednesday"
4 -> "Thursday"
5 -> "Friday"
6 -> "Saturday"
7 -> "Sunday"
_ -> "The week has only 7 days!"
sign :: (RealFloat a) => a -> String
sign x
| x < 0 = "negative"
| x == 0 = "zero"
| otherwise = "positive"
quadraticEq :: (Float, Float, Float) -> (Float, Float)
quadraticEq (a, b, c) = (x1, x2)
where
x1 = (-b - sqrt delta) / (2 * a)
x2 = (-b + sqrt delta) / (2 * a)
delta = b * b - 4 * a * c
pyramivVol :: (RealFloat a) => a -> a -> a
pyramivVol l h =
let area = l^2
in (area * h)/3
5 * (let a = 2 in a^2) + 7
[let cube x = x^3 in (cube 6, cube 3)]
(let x = 100; y = 200 in x+y, let l="Anne "; f = "Scott" in l ++ f)
main = do
putStrLn "The square of 2 is:"
print ((\x -> x^2) 2)
let concatAndPrint a b = putStrLn $ (++) a b
concatAndPrint "abc" "def"
"abc" `concatAndPrint` "def"
multiplyList m [] = []
multiplyList m (y:ys) = m*y : multiplyList m ys
multiplyListBy3 = multiplyList 3
| |
d56cc48e54b161c3836034ec8a6a6b80561dbd84d02610b61279067ac7b1a33e | gator1/jepsen | time.clj | (ns jepsen.nemesis.time
"Functions for messing with time and clocks."
(:require [jepsen.control :as c]
[jepsen.os.debian :as debian]
[clojure.java.io :as io])
(:import (java.io File)))
(defn compile!
"Takes a Reader to C source code and spits out a binary to /opt/jepsen/<bin>."
[reader bin]
(c/su
(let [tmp-file (File/createTempFile "jepsen-upload" ".c")]
(try
(io/copy reader tmp-file)
; Upload
(c/exec :mkdir :-p "/opt/jepsen")
(c/exec :chmod "a+rwx" "/opt/jepsen")
(c/upload (.getCanonicalPath tmp-file) (str "/opt/jepsen/" bin ".c"))
(c/cd "/opt/jepsen"
(c/exec :gcc (str bin ".c"))
(c/exec :mv "a.out" bin))
(finally
(.delete tmp-file)))))
bin)
(defn compile-resource!
"Given a resource name, spits out a binary to /opt/jepsen/<bin>."
[resource bin]
(with-open [r (io/reader (io/resource resource))]
(compile! r bin)))
(defn install!
"Uploads and compiles some C programs for messing with clocks."
[]
(c/su
(debian/install [:build-essential])
(compile-resource! "strobe-time.c" "strobe-time")
(compile-resource! "bump-time.c" "bump-time")))
(defn reset-time!
"Resets the local node's clock to NTP. If a test is given, resets time on all
nodes across the test."
([] (c/su (c/exec :ntpdate :-b "pool.ntp.org")))
([test] (c/with-test-nodes test (reset-time!))))
(defn bump-time!
"Adjusts the clock by delta milliseconds."
[delta]
(c/su (c/exec "/opt/jepsen/bump-time" delta)))
(defn strobe-time!
"Strobes the time back and forth by delta milliseconds, every period
milliseconds, for duration seconds."
[delta period duration]
(c/su (c/exec "/opt/jepsen/strobe-time" delta period duration)))
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/jepsen/src/jepsen/nemesis/time.clj | clojure | Upload | (ns jepsen.nemesis.time
"Functions for messing with time and clocks."
(:require [jepsen.control :as c]
[jepsen.os.debian :as debian]
[clojure.java.io :as io])
(:import (java.io File)))
(defn compile!
"Takes a Reader to C source code and spits out a binary to /opt/jepsen/<bin>."
[reader bin]
(c/su
(let [tmp-file (File/createTempFile "jepsen-upload" ".c")]
(try
(io/copy reader tmp-file)
(c/exec :mkdir :-p "/opt/jepsen")
(c/exec :chmod "a+rwx" "/opt/jepsen")
(c/upload (.getCanonicalPath tmp-file) (str "/opt/jepsen/" bin ".c"))
(c/cd "/opt/jepsen"
(c/exec :gcc (str bin ".c"))
(c/exec :mv "a.out" bin))
(finally
(.delete tmp-file)))))
bin)
(defn compile-resource!
"Given a resource name, spits out a binary to /opt/jepsen/<bin>."
[resource bin]
(with-open [r (io/reader (io/resource resource))]
(compile! r bin)))
(defn install!
"Uploads and compiles some C programs for messing with clocks."
[]
(c/su
(debian/install [:build-essential])
(compile-resource! "strobe-time.c" "strobe-time")
(compile-resource! "bump-time.c" "bump-time")))
(defn reset-time!
"Resets the local node's clock to NTP. If a test is given, resets time on all
nodes across the test."
([] (c/su (c/exec :ntpdate :-b "pool.ntp.org")))
([test] (c/with-test-nodes test (reset-time!))))
(defn bump-time!
"Adjusts the clock by delta milliseconds."
[delta]
(c/su (c/exec "/opt/jepsen/bump-time" delta)))
(defn strobe-time!
"Strobes the time back and forth by delta milliseconds, every period
milliseconds, for duration seconds."
[delta period duration]
(c/su (c/exec "/opt/jepsen/strobe-time" delta period duration)))
|
81dc4829431207da7d5c6edb5db074b634a5f181ce24e219a156597a4856b17e | nitrogen/NitrogenProject.com | tests_advancedcontrols2.erl | -module(tests_advancedcontrols2).
-compile(export_all).
-include_lib("nitrogen_core/include/wf.hrl").
main() ->
wf_test:start_other(demos_advancedcontrols2, fun tests/0).
tests() ->
timer:sleep(1000), %% give google charts a chance to respond
?wf_test_js(line_chart, chart_test(line_chart)),
?wf_test_js(bar_chart, chart_test(bar_chart)),
?wf_test_js(pie_chart, chart_test(pie_chart)).
chart_test(ID) ->
{
undefined,
wf:f("return objs('~s').width()", [ID]),
fun([Val]) -> Val==400 end
}.
| null | https://raw.githubusercontent.com/nitrogen/NitrogenProject.com/b4b3a0dbe17394608d2ee6eaa089e3ece1285075/src/tests/tests_advancedcontrols2.erl | erlang | give google charts a chance to respond | -module(tests_advancedcontrols2).
-compile(export_all).
-include_lib("nitrogen_core/include/wf.hrl").
main() ->
wf_test:start_other(demos_advancedcontrols2, fun tests/0).
tests() ->
?wf_test_js(line_chart, chart_test(line_chart)),
?wf_test_js(bar_chart, chart_test(bar_chart)),
?wf_test_js(pie_chart, chart_test(pie_chart)).
chart_test(ID) ->
{
undefined,
wf:f("return objs('~s').width()", [ID]),
fun([Val]) -> Val==400 end
}.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.