_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
3a4eb99ec51a19c5f7fc4e89dfd4e92785aa039f670b32eb37c402b1ca9e9500 | samuelrivas/moka | moka_history_tests.erl | Copyright ( c ) 2013 , < >
%%% All rights reserved.
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are met:
%%% * Redistributions of source code must retain the above copyright
%%% notice, this list of conditions and the following disclaimer.
%%% * Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
%%% * Neither the name the author nor the names of its contributors may
%%% be used to endorse or promote products derived from this software
%%% without specific prior written permission.
%%%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
%%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
%%% ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
%%% LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
%%% THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%%% @doc unit tests for the moka history server
-module(moka_history_tests).
-export([]).
%%%_* Includes =========================================================
-define(PROPER_NO_IMPORTS, true).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
_ * Eunit wrapper = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
all_properties_test_() -> sel_test:props_to_eunit(?MODULE).
%%%_* Properties =======================================================
prop_get_history() ->
?FORALL(
History, history(),
try
crashfy:untuple(moka_history:start_link(test_server_name())),
replay_history(test_server_name(), History),
proper:equals(History, moka_history:get_calls(test_server_name()))
after
moka_history:stop(test_server_name())
end
).
%%%_* Generators =======================================================
history() -> proper_types:list(history_entry()).
history_entry() -> {description(), args_gen(), result_gen()}.
Variety does n't seem relevant for this test case , so we just generate a few
%% possibilities
description() -> {module_gen(), function_gen()}.
module_gen() -> proper_types:elements([mod1, mod2, mod3, mod4]).
function_gen() -> proper_types:elements([fun1, fun2, fun3, fun4]).
args_gen() -> proper_types:list(proper_types:integer()).
result_gen() -> proper_types:oneof([return_gen(), exception_gen()]).
return_gen() -> {return, proper_types:integer()}.
exception_gen() -> {exception, class(), proper_types:integer()}.
class() -> proper_types:oneof([throw, exit, error]).
%%%_* Private Functions ================================================
test_server_name() -> moka_history_test_server.
replay_history(Server, History) ->
lists:foreach(
fun({Description, Args, {return, Result}}) ->
moka_history:add_return(
Server, Description, Args, Result);
({Description, Args, {exception, Class, Reason}}) ->
moka_history:add_exception(
Server, Description, Args, Class, Reason)
end,
History).
%%%_* Emacs ============================================================
%%% Local Variables:
%%% allout-layout: t
erlang - indent - level : 4
%%% End:
| null | https://raw.githubusercontent.com/samuelrivas/moka/92521e43d1d685794f462ed49403c99baeae0226/test/unit/moka_history_tests.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name the author nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@doc unit tests for the moka history server
_* Includes =========================================================
_* Properties =======================================================
_* Generators =======================================================
possibilities
_* Private Functions ================================================
_* Emacs ============================================================
Local Variables:
allout-layout: t
End: | Copyright ( c ) 2013 , < >
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
-module(moka_history_tests).
-export([]).
-define(PROPER_NO_IMPORTS, true).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
_ * Eunit wrapper = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
all_properties_test_() -> sel_test:props_to_eunit(?MODULE).
prop_get_history() ->
?FORALL(
History, history(),
try
crashfy:untuple(moka_history:start_link(test_server_name())),
replay_history(test_server_name(), History),
proper:equals(History, moka_history:get_calls(test_server_name()))
after
moka_history:stop(test_server_name())
end
).
history() -> proper_types:list(history_entry()).
history_entry() -> {description(), args_gen(), result_gen()}.
Variety does n't seem relevant for this test case , so we just generate a few
description() -> {module_gen(), function_gen()}.
module_gen() -> proper_types:elements([mod1, mod2, mod3, mod4]).
function_gen() -> proper_types:elements([fun1, fun2, fun3, fun4]).
args_gen() -> proper_types:list(proper_types:integer()).
result_gen() -> proper_types:oneof([return_gen(), exception_gen()]).
return_gen() -> {return, proper_types:integer()}.
exception_gen() -> {exception, class(), proper_types:integer()}.
class() -> proper_types:oneof([throw, exit, error]).
test_server_name() -> moka_history_test_server.
replay_history(Server, History) ->
lists:foreach(
fun({Description, Args, {return, Result}}) ->
moka_history:add_return(
Server, Description, Args, Result);
({Description, Args, {exception, Class, Reason}}) ->
moka_history:add_exception(
Server, Description, Args, Class, Reason)
end,
History).
erlang - indent - level : 4
|
7782fd82d1c36561086a1c3afb126101b6d47bc1d93c290acd19eca7a5a1891c | mokus0/junkbox | DeCasteljau.hs | module Math.DeCasteljau where
import Data.VectorSpace
interp a x y = lerp x y a
deCasteljau [] t = []
deCasteljau ps t = ps : deCasteljau (zipWith (interp t) ps (tail ps)) t
bezier ps = head . last . deCasteljau ps
split ps t = (map head pss, reverse (map last pss))
where pss = deCasteljau ps t
| null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Math/DeCasteljau.hs | haskell | module Math.DeCasteljau where
import Data.VectorSpace
interp a x y = lerp x y a
deCasteljau [] t = []
deCasteljau ps t = ps : deCasteljau (zipWith (interp t) ps (tail ps)) t
bezier ps = head . last . deCasteljau ps
split ps t = (map head pss, reverse (map last pss))
where pss = deCasteljau ps t
| |
a4dfc99b50b0667ab31e63d67bbcc91d98eb78ce7e5617feddade246376c624e | Yume-Labs/prism | db_test.clj | (ns prism.db-test
(:require [clojure.test :refer :all]
[prism.db :refer :all]
[prism.helpers :refer [with-test-node
full-config
full-nft
second-nft
partial-config-twin-dedupe
partial-config-no-twin-dedupe]]
[xtdb.api :refer :all]))
(deftest test-storing-and-retrieving-config
(testing "We can store and retrieve config"
(-> (with-test-node)
(store-config "test" full-config)
(retrieve-config "test")
(= full-config)
(is "we can get our config back by collection name"))))
(deftest test-state-validation
(testing "validate-nft-state"
(is (= true (validate-nft-state :to-do)) "testing :to-do")
(is (= true (validate-nft-state :decisions-made)) "testing :decisions-made")
(is (= true (validate-nft-state :outcomes-resolved)) "testing :outcomes-resolved")
(is (= true (validate-nft-state :ready)) "testing :ready")
(is (= true (validate-nft-state :metadata-generated)) "testing :metadata-generated")
(is (= true (validate-nft-state :image-rendered)) "testing :image-rendered")
(is (= false (validate-nft-state :another-value)) "testing incorrect state"))
(testing "get-previous-state"
(is (= nil (get-previous-state :to-do)) "testing :to-do")
(is (= :to-do (get-previous-state :decisions-made)) "testing :decisions-made")
(is (= :decisions-made (get-previous-state :outcomes-resolved)) "testing :outcomes-resolved")
(is (= :outcomes-resolved (get-previous-state :ready)) "testing :ready")
(is (= :ready (get-previous-state :metadata-generated)) "testing :metadata-generated")
(is (= :metadata-generated (get-previous-state :image-rendered)) "testing :image-rendered")))
(defn check-states
[res & states]
(if (= (count res) (count states))
(loop [cur-res (first res)
cur-state (first states)
next-res (next res)
next-states (next states)
err []]
(if (and (nil? cur-res) (nil? cur-state))
err
(let [payload (second cur-res)
chk-state (:state payload)]
(recur (first next-res)
(first next-states)
(next next-res)
(next next-states)
(if (= cur-state chk-state)
err
(conj err "Mismatched."))))))
["Mismatched array sizes."]))
(deftest test-storing-and-retrieving-nft
(testing "We can store and retrieve config"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(retrieve-nft "test" (:id full-nft))
(= full-nft)
(is "we can get our nft back by collection name and id")))
(testing "get-nfts"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(store-nft "test" (assoc second-nft :state :to-do) full-config)
(second)
(store-nft "test" second-nft full-config)
(second)
(get-nfts-thawed "test" get-nfts)
(count)
(= 2)
(is "we get back two NFTs as expected")))
(testing "get-nfts returns right state"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(store-nft "test" (assoc second-nft :state :to-do) full-config)
(second)
(store-nft "test" second-nft full-config)
(second)
(get-nfts-thawed "test" get-nfts)
(check-states :to-do :decisions-made)
(is "we get back correct states on results"))))
(deftest test-digests
(testing "get-digest"
(is (= nil (get-digest full-nft partial-config-twin-dedupe)))
(is (= "88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589"
(get-digest second-nft partial-config-twin-dedupe)))
(is (= nil (get-digest second-nft partial-config-no-twin-dedupe)))))
(deftest test-counting-nfts-with-digests
(testing "count-nfts-with-digest"
(-> (with-test-node)
(store-nft "test" second-nft partial-config-twin-dedupe true)
(second)
(count-nfts-with-digest "test" "88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589"))))
| null | https://raw.githubusercontent.com/Yume-Labs/prism/1dd2c0f4eac8bbd32877a40617b36c1319d8f114/t/prism/db_test.clj | clojure | (ns prism.db-test
(:require [clojure.test :refer :all]
[prism.db :refer :all]
[prism.helpers :refer [with-test-node
full-config
full-nft
second-nft
partial-config-twin-dedupe
partial-config-no-twin-dedupe]]
[xtdb.api :refer :all]))
(deftest test-storing-and-retrieving-config
(testing "We can store and retrieve config"
(-> (with-test-node)
(store-config "test" full-config)
(retrieve-config "test")
(= full-config)
(is "we can get our config back by collection name"))))
(deftest test-state-validation
(testing "validate-nft-state"
(is (= true (validate-nft-state :to-do)) "testing :to-do")
(is (= true (validate-nft-state :decisions-made)) "testing :decisions-made")
(is (= true (validate-nft-state :outcomes-resolved)) "testing :outcomes-resolved")
(is (= true (validate-nft-state :ready)) "testing :ready")
(is (= true (validate-nft-state :metadata-generated)) "testing :metadata-generated")
(is (= true (validate-nft-state :image-rendered)) "testing :image-rendered")
(is (= false (validate-nft-state :another-value)) "testing incorrect state"))
(testing "get-previous-state"
(is (= nil (get-previous-state :to-do)) "testing :to-do")
(is (= :to-do (get-previous-state :decisions-made)) "testing :decisions-made")
(is (= :decisions-made (get-previous-state :outcomes-resolved)) "testing :outcomes-resolved")
(is (= :outcomes-resolved (get-previous-state :ready)) "testing :ready")
(is (= :ready (get-previous-state :metadata-generated)) "testing :metadata-generated")
(is (= :metadata-generated (get-previous-state :image-rendered)) "testing :image-rendered")))
(defn check-states
[res & states]
(if (= (count res) (count states))
(loop [cur-res (first res)
cur-state (first states)
next-res (next res)
next-states (next states)
err []]
(if (and (nil? cur-res) (nil? cur-state))
err
(let [payload (second cur-res)
chk-state (:state payload)]
(recur (first next-res)
(first next-states)
(next next-res)
(next next-states)
(if (= cur-state chk-state)
err
(conj err "Mismatched."))))))
["Mismatched array sizes."]))
(deftest test-storing-and-retrieving-nft
(testing "We can store and retrieve config"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(retrieve-nft "test" (:id full-nft))
(= full-nft)
(is "we can get our nft back by collection name and id")))
(testing "get-nfts"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(store-nft "test" (assoc second-nft :state :to-do) full-config)
(second)
(store-nft "test" second-nft full-config)
(second)
(get-nfts-thawed "test" get-nfts)
(count)
(= 2)
(is "we get back two NFTs as expected")))
(testing "get-nfts returns right state"
(-> (with-test-node)
(store-nft "test" full-nft full-config)
(second)
(store-nft "test" (assoc second-nft :state :to-do) full-config)
(second)
(store-nft "test" second-nft full-config)
(second)
(get-nfts-thawed "test" get-nfts)
(check-states :to-do :decisions-made)
(is "we get back correct states on results"))))
(deftest test-digests
(testing "get-digest"
(is (= nil (get-digest full-nft partial-config-twin-dedupe)))
(is (= "88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589"
(get-digest second-nft partial-config-twin-dedupe)))
(is (= nil (get-digest second-nft partial-config-no-twin-dedupe)))))
(deftest test-counting-nfts-with-digests
(testing "count-nfts-with-digest"
(-> (with-test-node)
(store-nft "test" second-nft partial-config-twin-dedupe true)
(second)
(count-nfts-with-digest "test" "88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589"))))
| |
e64f67c61ccbf626304cd11523e487591eebebc1851e0b2a8b8e61631672261b | leocadiotine/fred | image_saver.clj | (ns fred.image-saver
(:require [fs.core :as fs]))
(defn substitute [s substitution-map]
(reduce (fn [s [match replacement]]
(clojure.string/replace s match replacement))
s substitution-map))
(defn rename-droid
"Returns a string that matches Android's resource naming requirements:
all lowercase, replaces '-' and whitespaces with '_' and removes @2x."
[old-name]
(substitute (clojure.string/lower-case old-name) {" " "_"
"-" "_"
"@2x" ""}))
(defn copy-to-dir-and-rename
"Moves a list of files from the given folder to the given target directory.
Also, renames all the files to match Android's resource naming requirements:
all lowercase, replaces '-' and whitespaces with '_' and removes @2x."
[file-names files-path target-directory-name]
(doseq [filename file-names]
(fs/copy+ (str files-path filename)
(str target-directory-name
(rename-droid filename)))))
(defn clone-dir
"Makes several copies of original-path. root is the root folder and all the other
arguments as folders relative to it."
[root-folder original-path & new-paths]
(doseq [new-path new-paths]
(fs/copy-dir (str root-folder original-path) (str root-folder new-path))))
| null | https://raw.githubusercontent.com/leocadiotine/fred/8fa95f83f5fec49bc35676dc6261e0cbccb911fc/code/src/fred/image_saver.clj | clojure | (ns fred.image-saver
(:require [fs.core :as fs]))
(defn substitute [s substitution-map]
(reduce (fn [s [match replacement]]
(clojure.string/replace s match replacement))
s substitution-map))
(defn rename-droid
"Returns a string that matches Android's resource naming requirements:
all lowercase, replaces '-' and whitespaces with '_' and removes @2x."
[old-name]
(substitute (clojure.string/lower-case old-name) {" " "_"
"-" "_"
"@2x" ""}))
(defn copy-to-dir-and-rename
"Moves a list of files from the given folder to the given target directory.
Also, renames all the files to match Android's resource naming requirements:
all lowercase, replaces '-' and whitespaces with '_' and removes @2x."
[file-names files-path target-directory-name]
(doseq [filename file-names]
(fs/copy+ (str files-path filename)
(str target-directory-name
(rename-droid filename)))))
(defn clone-dir
"Makes several copies of original-path. root is the root folder and all the other
arguments as folders relative to it."
[root-folder original-path & new-paths]
(doseq [new-path new-paths]
(fs/copy-dir (str root-folder original-path) (str root-folder new-path))))
| |
b575f6c085330f6be9047c10ee3e4c8db0fc6ecf980e59176d431fcf13d8593d | AdRoll/rebar3_format | type_specs.erl | -module(type_specs).
-include_lib("syntax_tools/include/merl.hrl").
-export([f/1, b/0, c/2]).
-export_type([t/0, ot/2, ff2/0]).
-type aa() :: _.
-type t() :: integer().
-type ff(A) :: ot(A, A) | tuple() | 1..3 | map() | {}.
-type ff1() :: ff(bin()) | foo:bar().
-type ff2() :: {list(), [_], list(integer()),
nonempty_list(), nonempty_list(atom()), [ff1(), ...],
nil(), []}.
-type bin() :: <<>>
| <<_:(+4)>>
| <<_:_*8>>
| <<_:12, _:_*16>>
| <<_:16, _:_*(0)>>
| <<_:16, _:_*(+0)>>.
-callback cb() -> t().
-optional_callbacks([cb/0]).
-opaque ot(A, B) :: {A, B}.
-type f1() :: fun().
-type f2() :: fun((...) -> t()).
-type f3() :: fun(() -> t()).
-type f4() :: fun((t(), t()) -> t()).
-wild(attribute).
-record(par, {a :: undefined | ?MODULE}).
-record(r0, {}).
-record(r,
{f1 :: integer(),
f2 = a :: atom(),
f3 :: fun(),
f4 = 7}).
-type r0() :: #r0{} | #r{f1 :: 3} | #r{f1 :: 3, f2 :: 'sju'}.
-type m1() :: #{} | map().
-type m2() :: #{a := m1(), b => #{} | fy:m2()}.
-type ( ) : : # { ... } .
%-type m4() :: #{_ => _, ...}.
%-type m5() :: #{any() => any(), ...}.
-type m3() :: #{any() => any()}.
-type m4() :: #{_ => _, any() => any()}.
-type m5() :: #{any() => any(), any() => any()}.
-type b1() :: B1 :: binary() | (BitString :: bitstring()).
-type pair(A, B) :: {(A), (B)}.
-spec type_specs:f(pair(r0(), r0())) -> pair(t(), t()).
f({R, R}) ->
_ = ?MODULE_STRING ++ "hej",
_ = <<"foo">>,
_ = R#r.f1,
_ = R#r{f1 = 17, f2 = b},
{1, 1}.
-spec type_specs:b() -> pos_integer().
b() ->
case foo:bar() of
#{a := 2} -> 19
end.
-define(I, integer).
-spec c(Atom :: atom(), Integer :: ?I()) -> {atom(), integer()};
(X, Y) -> {atom(), float()} when X :: atom(),
is_subtype(Y, float());
(integer(), atom()) -> {integer(), atom()}.
c(A, B) ->
_ = ?I,
{A, B}.
| null | https://raw.githubusercontent.com/AdRoll/rebar3_format/5ffb11341796173317ae094d4e165b85fad6aa19/test_app/src/otp_samples/type_specs.erl | erlang | -type m4() :: #{_ => _, ...}.
-type m5() :: #{any() => any(), ...}. | -module(type_specs).
-include_lib("syntax_tools/include/merl.hrl").
-export([f/1, b/0, c/2]).
-export_type([t/0, ot/2, ff2/0]).
-type aa() :: _.
-type t() :: integer().
-type ff(A) :: ot(A, A) | tuple() | 1..3 | map() | {}.
-type ff1() :: ff(bin()) | foo:bar().
-type ff2() :: {list(), [_], list(integer()),
nonempty_list(), nonempty_list(atom()), [ff1(), ...],
nil(), []}.
-type bin() :: <<>>
| <<_:(+4)>>
| <<_:_*8>>
| <<_:12, _:_*16>>
| <<_:16, _:_*(0)>>
| <<_:16, _:_*(+0)>>.
-callback cb() -> t().
-optional_callbacks([cb/0]).
-opaque ot(A, B) :: {A, B}.
-type f1() :: fun().
-type f2() :: fun((...) -> t()).
-type f3() :: fun(() -> t()).
-type f4() :: fun((t(), t()) -> t()).
-wild(attribute).
-record(par, {a :: undefined | ?MODULE}).
-record(r0, {}).
-record(r,
{f1 :: integer(),
f2 = a :: atom(),
f3 :: fun(),
f4 = 7}).
-type r0() :: #r0{} | #r{f1 :: 3} | #r{f1 :: 3, f2 :: 'sju'}.
-type m1() :: #{} | map().
-type m2() :: #{a := m1(), b => #{} | fy:m2()}.
-type ( ) : : # { ... } .
-type m3() :: #{any() => any()}.
-type m4() :: #{_ => _, any() => any()}.
-type m5() :: #{any() => any(), any() => any()}.
-type b1() :: B1 :: binary() | (BitString :: bitstring()).
-type pair(A, B) :: {(A), (B)}.
-spec type_specs:f(pair(r0(), r0())) -> pair(t(), t()).
f({R, R}) ->
_ = ?MODULE_STRING ++ "hej",
_ = <<"foo">>,
_ = R#r.f1,
_ = R#r{f1 = 17, f2 = b},
{1, 1}.
-spec type_specs:b() -> pos_integer().
b() ->
case foo:bar() of
#{a := 2} -> 19
end.
-define(I, integer).
-spec c(Atom :: atom(), Integer :: ?I()) -> {atom(), integer()};
(X, Y) -> {atom(), float()} when X :: atom(),
is_subtype(Y, float());
(integer(), atom()) -> {integer(), atom()}.
c(A, B) ->
_ = ?I,
{A, B}.
|
fa904d15efbcca243ed8af9dd901709abccacf07e91c6fff76093944de0d67b3 | hidaris/thinking-dumps | steak.rkt | #lang typed/racket
(define-type meza
(U Shrimp
Calamari
Escargots
Hummus))
(struct Shrimp () #:transparent)
(struct Calamari () #:transparent)
(struct Escargots () #:transparent)
(struct Hummus () #:transparent)
(define-type main
(U Steak
Ravioli
Chicken
Eggplant))
(struct Steak () #:transparent)
(struct Ravioli () #:transparent)
(struct Chicken () #:transparent)
(struct Eggplant () #:transparent)
(define-type salad
(U Green
Cucumber
Greek))
(struct Green () #:transparent)
(struct Cucumber () #:transparent)
(struct Greek () #:transparent)
(define-type desserts
(U Sundae
Mousse
Torte))
(struct Sundae () #:transparent)
(struct Mousse () #:transparent)
(struct Torte () #:transparent)
(: add - a - steak ( - > meza ( ) ) )
;; we don't need a type denotation here.
(define (add-a-steak m)
(match m
[(Shrimp)
(cons (Shrimp)
(Steak))]
[(Calamari)
(cons (Calamari)
(Steak))]
[(Escargots)
(cons (Escargots)
(Steak))]
[(Hummus)
(cons (Hummus)
(Steak))]))
(: add-a-steak2
(-> meza (Pairof meza main)))
(define add-a-steak2
(lambda (m)
(cons m (Steak))))
;;; I'm confused on how to use pattern match here.
(: eq - main ( - > main main Boolean ) )
;; (define eq-main
;; (lambda (t m)
;; (cond
;; [(and (Steak? t) (Steak? m)) true]
[ ( and ( ? t ) ( ? m ) ) true ]
;; [(and (Chicken? t) (Chicken? m)) true]
;; [(and (Eggplant? t) (Eggplant? m)) true]
;; [else false])))
;;; ok, I find it.
(: eq - main ( - > main main Boolean ) )
;; (define (eq-main t m)
;; (match t
;; [(Steak)
;; (Steak? m)]
[ ( )
( ? m ) ]
;; [(Chicken)
;; (Chicken? m)]
;; [(Eggplant)
;; (Eggplant? m)]
;; [_ false]))
;; a better way
(: eq-main (-> main main Boolean))
(define (eq-main t m)
(match* (t m)
[((Steak) (Steak))
true]
[((Ravioli) (Ravioli))
true]
[((Chicken) (Chicken))
true]
[((Eggplant) (Eggplant))
true]
[(_ _) false]))
(: has-steak (-> meza
Any
desserts
Boolean))
(define (has-steak a b c)
(match b
[(Steak) true]
[_ false]))
| null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/typed-racket/04-look-to-the-stars/steak.rkt | racket | we don't need a type denotation here.
I'm confused on how to use pattern match here.
(define eq-main
(lambda (t m)
(cond
[(and (Steak? t) (Steak? m)) true]
[(and (Chicken? t) (Chicken? m)) true]
[(and (Eggplant? t) (Eggplant? m)) true]
[else false])))
ok, I find it.
(define (eq-main t m)
(match t
[(Steak)
(Steak? m)]
[(Chicken)
(Chicken? m)]
[(Eggplant)
(Eggplant? m)]
[_ false]))
a better way | #lang typed/racket
(define-type meza
(U Shrimp
Calamari
Escargots
Hummus))
(struct Shrimp () #:transparent)
(struct Calamari () #:transparent)
(struct Escargots () #:transparent)
(struct Hummus () #:transparent)
(define-type main
(U Steak
Ravioli
Chicken
Eggplant))
(struct Steak () #:transparent)
(struct Ravioli () #:transparent)
(struct Chicken () #:transparent)
(struct Eggplant () #:transparent)
(define-type salad
(U Green
Cucumber
Greek))
(struct Green () #:transparent)
(struct Cucumber () #:transparent)
(struct Greek () #:transparent)
(define-type desserts
(U Sundae
Mousse
Torte))
(struct Sundae () #:transparent)
(struct Mousse () #:transparent)
(struct Torte () #:transparent)
(: add - a - steak ( - > meza ( ) ) )
(define (add-a-steak m)
(match m
[(Shrimp)
(cons (Shrimp)
(Steak))]
[(Calamari)
(cons (Calamari)
(Steak))]
[(Escargots)
(cons (Escargots)
(Steak))]
[(Hummus)
(cons (Hummus)
(Steak))]))
(: add-a-steak2
(-> meza (Pairof meza main)))
(define add-a-steak2
(lambda (m)
(cons m (Steak))))
(: eq - main ( - > main main Boolean ) )
[ ( and ( ? t ) ( ? m ) ) true ]
(: eq - main ( - > main main Boolean ) )
[ ( )
( ? m ) ]
(: eq-main (-> main main Boolean))
(define (eq-main t m)
(match* (t m)
[((Steak) (Steak))
true]
[((Ravioli) (Ravioli))
true]
[((Chicken) (Chicken))
true]
[((Eggplant) (Eggplant))
true]
[(_ _) false]))
(: has-steak (-> meza
Any
desserts
Boolean))
(define (has-steak a b c)
(match b
[(Steak) true]
[_ false]))
|
218c63d060d1e2dd145028c0e935f7aabef80c646527fe43d5a959999d240baa | lesguillemets/sicp-haskell | 1.45.hs | module OneFortyfive where
import NewtonsMethod
repeated :: (a -> a) -> Int -> a -> a
repeated _ 0 = id
repeated f n = f . repeated f (n-1)
nthRoot :: Int -> Double -> Double
nthRoot n x = fixP (repeated averaageDamp n' (\y -> x / y^(n-1))) 1.0
where n' = floor . logBase 2 $ fromIntegral n
-- |
-- >>> import Text.Printf
> > > printf " % .4f " $ ( nthRoot 23 19)^23
19.0000
| null | https://raw.githubusercontent.com/lesguillemets/sicp-haskell/df524a1e28c45fb16a56f539cad8babc881d0431/exercise/chap01/sect3/1.45.hs | haskell | |
>>> import Text.Printf | module OneFortyfive where
import NewtonsMethod
repeated :: (a -> a) -> Int -> a -> a
repeated _ 0 = id
repeated f n = f . repeated f (n-1)
nthRoot :: Int -> Double -> Double
nthRoot n x = fixP (repeated averaageDamp n' (\y -> x / y^(n-1))) 1.0
where n' = floor . logBase 2 $ fromIntegral n
> > > printf " % .4f " $ ( nthRoot 23 19)^23
19.0000
|
99e6809aeef66fbbf7ffa1c04058d04e275db0d101c506e3ae8052f4387fa829 | nd/bird | 3.6.2.hs | p = ( m + n ) div 2
-- if m + 1 < n
( m + n ) div 2 =
-- {def of div}
floor ( ( m + n ) / 2 ) =
( ( m + n ) / 2 ) < n and floor ( ( m + n ) / 2 ) < n = > ( m + n ) div 2 < n
also ( ( m + n ) / 2 ) > m { since m + 1 < n }
-- so we get m < p < n
--
-- if m + 1 = n
( m + n ) div 2 = ( m + m + 1 ) div 2 = ( 2 m + 1 ) div 2 = floor ( m + 0.5 ) = m | null | https://raw.githubusercontent.com/nd/bird/06dba97af7cfb11f558eaeb31a75bd04cacf7201/ch03/3.6.2.hs | haskell | if m + 1 < n
{def of div}
so we get m < p < n
if m + 1 = n | p = ( m + n ) div 2
( m + n ) div 2 =
floor ( ( m + n ) / 2 ) =
( ( m + n ) / 2 ) < n and floor ( ( m + n ) / 2 ) < n = > ( m + n ) div 2 < n
also ( ( m + n ) / 2 ) > m { since m + 1 < n }
( m + n ) div 2 = ( m + m + 1 ) div 2 = ( 2 m + 1 ) div 2 = floor ( m + 0.5 ) = m |
b839be61869ea1937b0e24dd2e423d3c2cae1cea6e074f0a44cbdfc844effe69 | hoon0612/Lisp-in-OCaml | parser.mli | type token =
| Int of (int)
| Float of (float)
| String of (string)
| Symbol of (string)
| Quote
| Opener
| Closer
val pmain :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> Types.s_exp
| null | https://raw.githubusercontent.com/hoon0612/Lisp-in-OCaml/498ace173018af8f7772b3db803872e76b5902cf/parser.mli | ocaml | type token =
| Int of (int)
| Float of (float)
| String of (string)
| Symbol of (string)
| Quote
| Opener
| Closer
val pmain :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> Types.s_exp
| |
0d915128379c7d83e8b8190728f4fa4463ce8ed0355dfa4b88aba76dbf18fcff | chiroptical/thinking-with-types | HList.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
-- Heterogenous List
module HList where
import Data.Kind (Constraint, Type)
import GHC.TypeLits (TypeError)
import GHC.TypeLits (ErrorMessage (Text))
data HList (ts :: [Type]) where
HNil :: HList '[]
(:#) :: t -> HList ts -> HList (t ': ts)
infixr 5 :#
hLength :: HList ts -> Int
hLength HNil = 0
hLength (_ :# ts) = 1 + hLength ts
hHead :: HList (t ': ts) -> t
hHead (h :# _) = h
hTail :: HList (t ': ts) -> HList ts
hTail (_ :# ts) = ts
Original , non - ideal , implementation of hLast
-- data Any = forall a. Any a
hLast : : HList ( t ' : ts ) - > Any
hLast ( t : # HNil ) = Any t
hLast ( _ : # t : # ts ) = hLast ( t : # ts )
type family Last (ts :: [Type]) :: Type where
Last '[] = TypeError (Text "Empty lists don't have a Last type")
Last (t ': '[]) = t
Last (t ': ts) = Last ts
hLast :: HList ts -> Last ts
hLast HNil = error "hLast bottoms for empty list"
hLast (t :# HNil) = t
hLast (_ :# ts@(_ :# _)) = hLast ts
showBool :: HList '[_1, Bool, _2] -> String
showBool (_ :# b :# _ :# HNil) = show b
instance ( HList ' [ ] ) where
HNil = = HNil = True
instance ( Eq t , ( HList ts ) ) = > Eq ( HList ( t ' : ts ) ) where
( a : # as ) = = ( b : # bs ) = a = = b & & as = = bs
instance All Eq ts => Eq (HList ts) where
HNil == HNil = True
(a :# as) == (b :# bs) = a == b && as == bs
type family AllEq ( ts : : [ Type ] ) : : Constraint where
AllEq ' [ ] = ( )
AllEq ( t ' : ts ) = ( Eq t , AllEq ts )
type family All (c :: Type -> Constraint) (ts :: [Type]) :: Constraint where
All c '[] = ()
All c (t ': ts) = (c t, All c ts)
instance All Show ts => Show (HList ts) where
show HNil = "HNil"
show (t :# ts) = show t <> " :# " <> show ts
Due to the monomorphism restriction ,
-- -> one cannot use where (or let) to keep `t` and `ts`
-- -> as the same types in the signature
-- -> and therefore `go` (below) can't compile
-- -> **Need to learn a bit more about this**
show ( t : # ts ) = " [ " + + go ( t : # ts ) + + " ] "
-- where
-- go (t :# ts) = show t ++ "," ++ go ts
instance (All Eq ts, All Ord ts) => Ord (HList ts) where
HNil `compare` HNil = EQ
(t :# ts) `compare` (t' :# ts') = t `compare` t' <> ts `compare` ts' | null | https://raw.githubusercontent.com/chiroptical/thinking-with-types/781f90f1b08eb94ef3600c5b7da92dfaf9ea4285/Chapter5/src/HList.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DataKinds #
# LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
Heterogenous List
data Any = forall a. Any a
-> one cannot use where (or let) to keep `t` and `ts`
-> as the same types in the signature
-> and therefore `go` (below) can't compile
-> **Need to learn a bit more about this**
where
go (t :# ts) = show t ++ "," ++ go ts | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module HList where
import Data.Kind (Constraint, Type)
import GHC.TypeLits (TypeError)
import GHC.TypeLits (ErrorMessage (Text))
data HList (ts :: [Type]) where
HNil :: HList '[]
(:#) :: t -> HList ts -> HList (t ': ts)
infixr 5 :#
hLength :: HList ts -> Int
hLength HNil = 0
hLength (_ :# ts) = 1 + hLength ts
hHead :: HList (t ': ts) -> t
hHead (h :# _) = h
hTail :: HList (t ': ts) -> HList ts
hTail (_ :# ts) = ts
Original , non - ideal , implementation of hLast
hLast : : HList ( t ' : ts ) - > Any
hLast ( t : # HNil ) = Any t
hLast ( _ : # t : # ts ) = hLast ( t : # ts )
type family Last (ts :: [Type]) :: Type where
Last '[] = TypeError (Text "Empty lists don't have a Last type")
Last (t ': '[]) = t
Last (t ': ts) = Last ts
hLast :: HList ts -> Last ts
hLast HNil = error "hLast bottoms for empty list"
hLast (t :# HNil) = t
hLast (_ :# ts@(_ :# _)) = hLast ts
showBool :: HList '[_1, Bool, _2] -> String
showBool (_ :# b :# _ :# HNil) = show b
instance ( HList ' [ ] ) where
HNil = = HNil = True
instance ( Eq t , ( HList ts ) ) = > Eq ( HList ( t ' : ts ) ) where
( a : # as ) = = ( b : # bs ) = a = = b & & as = = bs
instance All Eq ts => Eq (HList ts) where
HNil == HNil = True
(a :# as) == (b :# bs) = a == b && as == bs
type family AllEq ( ts : : [ Type ] ) : : Constraint where
AllEq ' [ ] = ( )
AllEq ( t ' : ts ) = ( Eq t , AllEq ts )
type family All (c :: Type -> Constraint) (ts :: [Type]) :: Constraint where
All c '[] = ()
All c (t ': ts) = (c t, All c ts)
instance All Show ts => Show (HList ts) where
show HNil = "HNil"
show (t :# ts) = show t <> " :# " <> show ts
Due to the monomorphism restriction ,
show ( t : # ts ) = " [ " + + go ( t : # ts ) + + " ] "
instance (All Eq ts, All Ord ts) => Ord (HList ts) where
HNil `compare` HNil = EQ
(t :# ts) `compare` (t' :# ts') = t `compare` t' <> ts `compare` ts' |
50f2fa1bbd604898f378d41da71ab31a07f122fcaa29812839ba94a9d72f264c | gusbicalho/effects-toy | WaiHandler.hs | # LANGUAGE BlockArguments #
module Polysemy.EffectsToy.Interpreter.WaiHandler
( runWaiHandler
-- * Re-exports
, module Polysemy.EffectsToy.Effect.WaiHandler
) where
import Polysemy
import Polysemy.Reader
import Polysemy.State
import Polysemy.Writer
import Polysemy.EffectsToy.Effect.WaiHandler
import qualified Network.Wai as Wai
import qualified Network.HTTP.Types as HTTP
import qualified Polysemy.EffectsToy.Effect.ByteStream as ByteStream
runWaiHandler' :: ( Member ByteStream.ByteStream r
) => Wai.Request -> Sem (WaiHandler : r) a -> Sem r (HTTP.ResponseHeaders, (HTTP.Status, a))
runWaiHandler' request = runWriter @HTTP.ResponseHeaders
. runState HTTP.status500
. runReader request
. reinterpret3 \case
AskRequest -> ask
PutStatus status -> put status
TellHeaders headers -> tell headers
TellChunk chunk -> ByteStream.tellChunk chunk
runWaiHandler :: ( Member ByteStream.ByteStream r
) => Wai.Request -> Sem (WaiHandler : r) () -> Sem r (HTTP.ResponseHeaders, HTTP.Status)
runWaiHandler request waiApp = do
(headers, (status, ())) <- runWaiHandler' request waiApp
return (headers, status)
| null | https://raw.githubusercontent.com/gusbicalho/effects-toy/e1905ced0f56b13448c1880285f77b957726f50d/toy-polysemy/src/Polysemy/EffectsToy/Interpreter/WaiHandler.hs | haskell | * Re-exports | # LANGUAGE BlockArguments #
module Polysemy.EffectsToy.Interpreter.WaiHandler
( runWaiHandler
, module Polysemy.EffectsToy.Effect.WaiHandler
) where
import Polysemy
import Polysemy.Reader
import Polysemy.State
import Polysemy.Writer
import Polysemy.EffectsToy.Effect.WaiHandler
import qualified Network.Wai as Wai
import qualified Network.HTTP.Types as HTTP
import qualified Polysemy.EffectsToy.Effect.ByteStream as ByteStream
runWaiHandler' :: ( Member ByteStream.ByteStream r
) => Wai.Request -> Sem (WaiHandler : r) a -> Sem r (HTTP.ResponseHeaders, (HTTP.Status, a))
runWaiHandler' request = runWriter @HTTP.ResponseHeaders
. runState HTTP.status500
. runReader request
. reinterpret3 \case
AskRequest -> ask
PutStatus status -> put status
TellHeaders headers -> tell headers
TellChunk chunk -> ByteStream.tellChunk chunk
runWaiHandler :: ( Member ByteStream.ByteStream r
) => Wai.Request -> Sem (WaiHandler : r) () -> Sem r (HTTP.ResponseHeaders, HTTP.Status)
runWaiHandler request waiApp = do
(headers, (status, ())) <- runWaiHandler' request waiApp
return (headers, status)
|
1b84b308ff4fb0614d21ab68097284996acec974e7ec851493c89adc7c41649e | fluree/ledger | performance.clj | (ns fluree.db.ledger.Performance.performance
(:require [clojure.java.io :as io]
[criterium.core :as criterium]
[fluree.db.api :as fdb]
[clojure.tools.logging :as log]
[clojure.core.async :as async]
[fluree.db.test-helpers :as test]
[clojure.edn :as edn]))
;; UTILITY FUNCTIONS - Time and Results Formatting
(defn time-return-data
[f & args]
(let [start-time (System/nanoTime)
_ (apply f args)
end-time (System/nanoTime)]
(float (/ (- end-time start-time) 1000000))))
(defn average
[numbers]
(if (empty? numbers)
0
(/ (reduce + numbers) (count numbers))))
(defn format-res
[res type]
(let [mean (-> res :mean first)
[scale unit] (criterium/scale-time (abs mean))]
{:sample (-> res :sample-count)
:mean mean
:mean-time (criterium/format-value mean scale unit)
:type type}))
(defn format-mean-time
[mean]
(let [[scale unit] (criterium/scale-time (abs mean))]
(criterium/format-value mean scale unit)))
;; TEST TRANSACTIONS
TODO - not working with 0.11.0
(defn add-and-delete-data
[conn ledger-id]
(let [txn [{:_id "person" :favNums [1]}]
res (async/<!! (fdb/transact-async conn ledger-id txn))
_id (-> res :tempids (get "person$1"))
deleteTxn [{:_id _id :_action "delete"}]
deleteRes (async/<!! (fdb/transact-async conn ledger-id deleteTxn))]
deleteRes))
(defn add-and-update-data
[conn ledger-id]
(let [txn [{:_id "person" :favNums [1]}]
res (async/<!! (fdb/transact-async conn ledger-id txn))
_id (-> res :tempids (get "person$1"))
updateTxn [{:_id _id :favNums [2]}]
updateRes (async/<!! (fdb/transact-async conn ledger-id updateTxn))]
updateRes))
;; TEST QUERIES
(def queryTxnRanges
{:schemaTxns [1 3]
:txns [4 50]
:basic-query [51 100]
:analytical-query [101 150]
:block-query [151 200]
:history-query [201 250]
:graphql-query [251 300]
:multi-query [301 350]
:sparql-query [351 400]})
(defn get-query-type
"Offset is used for special query lists, i.e. PlaneQueryTxn. Rather than"
([queryTxns type]
(get-query-type queryTxns type 0))
([queryTxns type offset]
(select-keys queryTxns (range (+ offset (first (get queryTxnRanges type)))
(+ 1 offset (second (get queryTxnRanges type)))))))
(defn test-queries
"'query-map is a map in the format:
{ 1 [:query-type, QUERY ]
2 [:query-type, QUERY ] }
i.e { 1 [:basic-query, {:select [\"*\"], \"from\": \"_collection\" } ]
2 [:basic-query, {:select [\"*\"], \"from\": \"_predicate\" } ] }"
[db f query-map]
(reduce (fn [acc [q-num [type q]]]
(try (let [res (criterium/benchmark (f db q) nil)]
(assoc acc q-num (format-res res type)))
(catch Exception e {:issued q :error true}))) {} query-map))
(defn add-schema-performance-check
[conn ledger-id]
(let [collections (-> "schemas/chat.edn" io/resource slurp edn/read-string)
coll-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections)))
conn ledger-id collections)
predicates (-> "schema/chat-preds.edn" io/resource slurp edn/read-string)
pred-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections))) conn ledger-id predicates)
data (-> "data/chat.edn" io/resource slurp edn/read-string)
data-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections))) conn ledger-id data)
;; For now, these are hard-coded
keyCollTxn 1
keyPredTxn 2
keyDataTxn 3]
{keyCollTxn {:mean (str coll-txn " ms") :mean-time (/ coll-txn 1000)}
keyPredTxn {:mean (str pred-txn " ms") :mean-time (/ pred-txn 1000)}
keyDataTxn {:mean (str data-txn " ms") :mean-time (/ data-txn 1000)}}))
( add - schema - performance - check )
TODO - recommend turning off transact and block - range logging beforehand
I did n't turn off either . IDK if results affected .
(defn performance-check
"NOTE: This performance check will take more than an hour."
([conn ledger-id]
(performance-check conn ledger-id "../test/fluree/db/ledger/Performance/QueryTxnList.edn" 0 true))
([conn ledger-id queryTxnFile offset schema?]
(let [add-schema-res (when schema? (add-schema-performance-check conn ledger-id))
_ (log/info "Schema timing results: " add-schema-res)
queries (-> queryTxnFile io/resource slurp read-string)
myDb (fdb/db conn ledger-id)
basic-query-coll (get-query-type queries :basic-query offset)
query-bench (test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) basic-query-coll)
_ (log/info "Basic query bench results: " query-bench)
analytical-query-coll (get-query-type queries :analytical-query offset)
analytical-query-bench (test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) analytical-query-coll)
_ (log/info "Analytical query bench results: " analytical-query-bench)
;block-query-coll (get-query-type queries :block-query offset)
;block-query-bench (test-queries myDb (fn [db q]
; (async/<!!
; (fdb/block-query-async conn ledger-id q))) block-query-coll)
;_ (log/info "Block query bench results: " block-query-bench)
;history-query-bench (test-queries myDb (fn [db q]
; (async/<!!
; (fdb/history-query-async db q))) history-query-coll
; :history-query)
;_ (log/info "History query bench results" history-query-bench)
- bench ( test - queries myDb ( fn [ db q ]
; (async/<!!
; (fdb/sparql-async db q))) sparql-query-coll :sparql-query)
;_ (log/info "SPARQL query bench results: " sparql-query-bench)
;graphql-query-bench (test-queries myDb (fn [db q]
( async/ < ! ! ( / graphql - async conn ledger - id q nil ) ) ) : graphql - query graphql - query - coll )
;_ (log/info "GraphQL query bench results:" graphql-query-bench)
;multi-query-bench (test-queries myDb (fn [db q]
; (async/<!!
; (fdb/multi-query-async db q))) multi-query-coll :multi-query)
;_ (log/info "Multi-query bench results: " multi-query-bench)
;add-data-bench (->> (criterium/benchmark
( async/ < ! ! ( / transact - async conn ledger - id [ { : _ i d " person " : favNums [ 1 ] } ] ) ) nil )
; (format-res :addData :txn))
;_ (log/info "Add data bench: " add-data-bench)
add - update - bench ( - > > ( criterium / benchmark ( add - and - update - data ) nil )
; (format-res :addUpdateData :txn))
;_ (log/info "Add and update data bench: " add-update-bench)
add - delete - bench ( - > > ( criterium / benchmark ( add - and - delete - data ) nil )
; (format-res :addDeleteData :txn))
;_ (log/info "Add and delete data bench: " add-delete-bench)
] (merge
;add-schema-res
query-bench analytical-query-bench
;block-query-bench
; history-query-bench sparql-query-bench graphql-query-bench
; multi-query-bench add-data-bench
;add-update-bench add-delete-bench
))))
COMPARE TWO SETS OF RESULTS , i.e 0.10.4 and 0.11.0
;; TODO - reconfigure for new results formats.
( defn compare - results
; ([res1 res2]
( compare - results res1 res2 0.5 ) )
; ([res1 res2 percentChange]
; (reduce (fn [acc [res2Key res2Val]]
; (if-let [res1Time (-> (get res1 res2Key) :mean)]
; (let [res2Time (-> res2Val :mean)
; diff (- res2Time res1Time)
; percent (/ diff res1Time)]
; (if (>= (abs percent) percentChange)
; (let [[scale unit] (criterium/scale-time (abs diff))
; diff-formatted (criterium/format-value diff scale unit)
; txn? (= :txn (:type res2Val))
; key (if (neg? percent)
; (if txn? :decreased-txn :decreased-query)
; (if txn? :increased-txn :increased-query))
; typeV (:type res2Val)]
( update acc key { : query res2Key : oldTime : newTime res2Time
; :diff diff :percentDiff percent
; :diff-formatted diff-formatted :type typeV}))
; acc))
( update acc : no - match ) ) )
; {} res2)))
;
( defn format - results
; [compare-res diff-label diff-key]
; (let [diff (map #(-> % :diff abs) (diff-key compare-res))
; percent-diff (map #(-> % :percentDiff abs) (diff-key compare-res))
;
; old-time (map #(-> % :oldTime abs) (diff-key compare-res))
; new-time (map #(-> % :newTime abs) (diff-key compare-res))
;
; cnt (count (diff-key compare-res))]
( if (= 0 )
; [(str "There are no results for " diff-label)]
; (let [avg (format-mean-time (average diff))
mx ( format - mean - time ( apply diff ) )
; mn (format-mean-time (apply min diff))
str ( * ( average percent - diff ) 100 ) " % " )
( format - mean - time ( average old - time ) )
; newAvg (format-mean-time (average new-time))]
; [(str "Results for " diff-label)
; (str "There are: " cnt " samples.")
; (str "The average difference is: " avg)
( str " The difference is : " mx )
( str " The average old time is : " )
; (str "The average new time is: " newAvg)
; (str "The percent different is: " pDAvg)
; (str "--------------------------------")]))))
;
( defn compare - res - formatted
; ([compare-res]
; (compare-res-formatted compare-res nil))
; ([compare-res type]
; (let [type-vec [[(str "increased " (if type (str type " ")) "queries") :increased-query]
; [(str "decreased " (if type (str type " ")) "queries") :decreased-query]]
; type-vec' (if type type-vec
; (concat type-vec [["increased transactions" :increased-txn]
; ["decreased transactions" :decreased-txn]]))
; res (map (fn [[label res-key]]
; (format-results compare-res label res-key))
; type-vec')
; res' (if type res
; (concat res
; [(str "There are " (count (:no-match compare-res)) " no matches.")]))]
; (into [] res'))))
;
( defn filter - comparisons
; [compare-res type]
; (let [inc-q (filter #(= (:type %) type) (:increased-query compare-res))
; dec-q (filter #(= (:type %) type) (:decreased-query compare-res))]
; {:increased-query inc-q :decreased-query dec-q}))
;
( defn generate - full - report
; ([res1 res2]
; (generate-full-report res1 res2 0))
; ([res1 res2 percent-change]
; (let [compare-res (compare-results res1 res2 percent-change)
; all-res-formatted (-> compare-res
; compare-res-formatted)
; basic-res-formatted (-> (filter-comparisons compare-res :basic-query)
; (compare-res-formatted "basic"))
; block-res-formatted (-> (filter-comparisons compare-res :block-query)
; (compare-res-formatted "block"))
; analytical-res-formatted (-> (filter-comparisons compare-res :analytical-query)
; (compare-res-formatted "analytical"))
; history-res-formatted (-> (filter-comparisons compare-res :history-query)
; (compare-res-formatted "history"))
;
( - > ( filter - comparisons compare - res : graphql - query )
; (compare-res-formatted "graphql"))
;
formatted ( - > ( filter - comparisons compare - res : sparql - query )
; (compare-res-formatted "sparql"))
;
; multi-res-formatted (-> (filter-comparisons compare-res :multi-query)
; (compare-res-formatted "multi"))]
; [all-res-formatted basic-res-formatted block-res-formatted
; analytical-res-formatted history-res-formatted grapqhl-res-formatted
; sparql-res-formatted multi-res-formatted])))
(comment
(def conn (:conn user/system))
(def q {:select ["handle" {"person/follows" ["handle"]}], :from "person"})
(def db (fdb/db conn "fluree/test"))
(def queries (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string))
(def query-test {53 (get (get-query-type queries :basic-query) 53)
54 (get (get-query-type queries :basic-query) 54)
55 (get (get-query-type queries :basic-query) 55)
56 (get (get-query-type queries :basic-query) 56)})
query-test
(test-queries db (fn [db q]
(async/<!! (fdb/query-async db q))) query-test)
(def res (criterium/bench (+ 1 1)))
(criterium/-bench ((fn [db q]
(async/<!! (fdb/query-async db q))) db q))
51 {:sample 60, :mean 9.138601788989441E-5, :mean-time " 91.386018 µs", :type :basic-query},
(criterium/report-result (criterium/quick-bench (+ 1 1)))
(test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) )
(def myPerformanceCheck (performance-check conn "fluree/test"))
(def myPerformanceCheck (performance-check conn "plane/demo" "../test/fluree/db/ledger/Performance/PlaneQueryList.edn" 1000
false))
(into (sorted-map) myPerformanceCheck)
(+ 1 1)
(def compare-res (compare-results res1 res2))
compare-res
(def res1 (-> "performanceMetrics/0-10-4.edn" io/resource slurp read-string))
(def res2 (-> "performanceMetrics/0-11-0-old-version.edn" io/resource slurp read-string))
(generate-full-report res1 res2)
(add-schema-performance-check conn "fluree/new")
(count (keys (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string)))
(def queryTxns (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string))
(select-keys queryTxns (range 1 50))
)
| null | https://raw.githubusercontent.com/fluree/ledger/2b7819f38c6d9f8347028e4458fb8a301a3d9a35/test/fluree/db/ledger/Performance/performance.clj | clojure | UTILITY FUNCTIONS - Time and Results Formatting
TEST TRANSACTIONS
TEST QUERIES
For now, these are hard-coded
block-query-coll (get-query-type queries :block-query offset)
block-query-bench (test-queries myDb (fn [db q]
(async/<!!
(fdb/block-query-async conn ledger-id q))) block-query-coll)
_ (log/info "Block query bench results: " block-query-bench)
history-query-bench (test-queries myDb (fn [db q]
(async/<!!
(fdb/history-query-async db q))) history-query-coll
:history-query)
_ (log/info "History query bench results" history-query-bench)
(async/<!!
(fdb/sparql-async db q))) sparql-query-coll :sparql-query)
_ (log/info "SPARQL query bench results: " sparql-query-bench)
graphql-query-bench (test-queries myDb (fn [db q]
_ (log/info "GraphQL query bench results:" graphql-query-bench)
multi-query-bench (test-queries myDb (fn [db q]
(async/<!!
(fdb/multi-query-async db q))) multi-query-coll :multi-query)
_ (log/info "Multi-query bench results: " multi-query-bench)
add-data-bench (->> (criterium/benchmark
(format-res :addData :txn))
_ (log/info "Add data bench: " add-data-bench)
(format-res :addUpdateData :txn))
_ (log/info "Add and update data bench: " add-update-bench)
(format-res :addDeleteData :txn))
_ (log/info "Add and delete data bench: " add-delete-bench)
add-schema-res
block-query-bench
history-query-bench sparql-query-bench graphql-query-bench
multi-query-bench add-data-bench
add-update-bench add-delete-bench
TODO - reconfigure for new results formats.
([res1 res2]
([res1 res2 percentChange]
(reduce (fn [acc [res2Key res2Val]]
(if-let [res1Time (-> (get res1 res2Key) :mean)]
(let [res2Time (-> res2Val :mean)
diff (- res2Time res1Time)
percent (/ diff res1Time)]
(if (>= (abs percent) percentChange)
(let [[scale unit] (criterium/scale-time (abs diff))
diff-formatted (criterium/format-value diff scale unit)
txn? (= :txn (:type res2Val))
key (if (neg? percent)
(if txn? :decreased-txn :decreased-query)
(if txn? :increased-txn :increased-query))
typeV (:type res2Val)]
:diff diff :percentDiff percent
:diff-formatted diff-formatted :type typeV}))
acc))
{} res2)))
[compare-res diff-label diff-key]
(let [diff (map #(-> % :diff abs) (diff-key compare-res))
percent-diff (map #(-> % :percentDiff abs) (diff-key compare-res))
old-time (map #(-> % :oldTime abs) (diff-key compare-res))
new-time (map #(-> % :newTime abs) (diff-key compare-res))
cnt (count (diff-key compare-res))]
[(str "There are no results for " diff-label)]
(let [avg (format-mean-time (average diff))
mn (format-mean-time (apply min diff))
newAvg (format-mean-time (average new-time))]
[(str "Results for " diff-label)
(str "There are: " cnt " samples.")
(str "The average difference is: " avg)
(str "The average new time is: " newAvg)
(str "The percent different is: " pDAvg)
(str "--------------------------------")]))))
([compare-res]
(compare-res-formatted compare-res nil))
([compare-res type]
(let [type-vec [[(str "increased " (if type (str type " ")) "queries") :increased-query]
[(str "decreased " (if type (str type " ")) "queries") :decreased-query]]
type-vec' (if type type-vec
(concat type-vec [["increased transactions" :increased-txn]
["decreased transactions" :decreased-txn]]))
res (map (fn [[label res-key]]
(format-results compare-res label res-key))
type-vec')
res' (if type res
(concat res
[(str "There are " (count (:no-match compare-res)) " no matches.")]))]
(into [] res'))))
[compare-res type]
(let [inc-q (filter #(= (:type %) type) (:increased-query compare-res))
dec-q (filter #(= (:type %) type) (:decreased-query compare-res))]
{:increased-query inc-q :decreased-query dec-q}))
([res1 res2]
(generate-full-report res1 res2 0))
([res1 res2 percent-change]
(let [compare-res (compare-results res1 res2 percent-change)
all-res-formatted (-> compare-res
compare-res-formatted)
basic-res-formatted (-> (filter-comparisons compare-res :basic-query)
(compare-res-formatted "basic"))
block-res-formatted (-> (filter-comparisons compare-res :block-query)
(compare-res-formatted "block"))
analytical-res-formatted (-> (filter-comparisons compare-res :analytical-query)
(compare-res-formatted "analytical"))
history-res-formatted (-> (filter-comparisons compare-res :history-query)
(compare-res-formatted "history"))
(compare-res-formatted "graphql"))
(compare-res-formatted "sparql"))
multi-res-formatted (-> (filter-comparisons compare-res :multi-query)
(compare-res-formatted "multi"))]
[all-res-formatted basic-res-formatted block-res-formatted
analytical-res-formatted history-res-formatted grapqhl-res-formatted
sparql-res-formatted multi-res-formatted]))) | (ns fluree.db.ledger.Performance.performance
(:require [clojure.java.io :as io]
[criterium.core :as criterium]
[fluree.db.api :as fdb]
[clojure.tools.logging :as log]
[clojure.core.async :as async]
[fluree.db.test-helpers :as test]
[clojure.edn :as edn]))
(defn time-return-data
[f & args]
(let [start-time (System/nanoTime)
_ (apply f args)
end-time (System/nanoTime)]
(float (/ (- end-time start-time) 1000000))))
(defn average
[numbers]
(if (empty? numbers)
0
(/ (reduce + numbers) (count numbers))))
(defn format-res
[res type]
(let [mean (-> res :mean first)
[scale unit] (criterium/scale-time (abs mean))]
{:sample (-> res :sample-count)
:mean mean
:mean-time (criterium/format-value mean scale unit)
:type type}))
(defn format-mean-time
[mean]
(let [[scale unit] (criterium/scale-time (abs mean))]
(criterium/format-value mean scale unit)))
TODO - not working with 0.11.0
(defn add-and-delete-data
[conn ledger-id]
(let [txn [{:_id "person" :favNums [1]}]
res (async/<!! (fdb/transact-async conn ledger-id txn))
_id (-> res :tempids (get "person$1"))
deleteTxn [{:_id _id :_action "delete"}]
deleteRes (async/<!! (fdb/transact-async conn ledger-id deleteTxn))]
deleteRes))
(defn add-and-update-data
[conn ledger-id]
(let [txn [{:_id "person" :favNums [1]}]
res (async/<!! (fdb/transact-async conn ledger-id txn))
_id (-> res :tempids (get "person$1"))
updateTxn [{:_id _id :favNums [2]}]
updateRes (async/<!! (fdb/transact-async conn ledger-id updateTxn))]
updateRes))
(def queryTxnRanges
{:schemaTxns [1 3]
:txns [4 50]
:basic-query [51 100]
:analytical-query [101 150]
:block-query [151 200]
:history-query [201 250]
:graphql-query [251 300]
:multi-query [301 350]
:sparql-query [351 400]})
(defn get-query-type
"Offset is used for special query lists, i.e. PlaneQueryTxn. Rather than"
([queryTxns type]
(get-query-type queryTxns type 0))
([queryTxns type offset]
(select-keys queryTxns (range (+ offset (first (get queryTxnRanges type)))
(+ 1 offset (second (get queryTxnRanges type)))))))
(defn test-queries
"'query-map is a map in the format:
{ 1 [:query-type, QUERY ]
2 [:query-type, QUERY ] }
i.e { 1 [:basic-query, {:select [\"*\"], \"from\": \"_collection\" } ]
2 [:basic-query, {:select [\"*\"], \"from\": \"_predicate\" } ] }"
[db f query-map]
(reduce (fn [acc [q-num [type q]]]
(try (let [res (criterium/benchmark (f db q) nil)]
(assoc acc q-num (format-res res type)))
(catch Exception e {:issued q :error true}))) {} query-map))
(defn add-schema-performance-check
[conn ledger-id]
(let [collections (-> "schemas/chat.edn" io/resource slurp edn/read-string)
coll-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections)))
conn ledger-id collections)
predicates (-> "schema/chat-preds.edn" io/resource slurp edn/read-string)
pred-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections))) conn ledger-id predicates)
data (-> "data/chat.edn" io/resource slurp edn/read-string)
data-txn (time-return-data (fn [conn ledger-id collections]
(async/<!! (fdb/transact-async conn ledger-id collections))) conn ledger-id data)
keyCollTxn 1
keyPredTxn 2
keyDataTxn 3]
{keyCollTxn {:mean (str coll-txn " ms") :mean-time (/ coll-txn 1000)}
keyPredTxn {:mean (str pred-txn " ms") :mean-time (/ pred-txn 1000)}
keyDataTxn {:mean (str data-txn " ms") :mean-time (/ data-txn 1000)}}))
( add - schema - performance - check )
TODO - recommend turning off transact and block - range logging beforehand
I did n't turn off either . IDK if results affected .
(defn performance-check
"NOTE: This performance check will take more than an hour."
([conn ledger-id]
(performance-check conn ledger-id "../test/fluree/db/ledger/Performance/QueryTxnList.edn" 0 true))
([conn ledger-id queryTxnFile offset schema?]
(let [add-schema-res (when schema? (add-schema-performance-check conn ledger-id))
_ (log/info "Schema timing results: " add-schema-res)
queries (-> queryTxnFile io/resource slurp read-string)
myDb (fdb/db conn ledger-id)
basic-query-coll (get-query-type queries :basic-query offset)
query-bench (test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) basic-query-coll)
_ (log/info "Basic query bench results: " query-bench)
analytical-query-coll (get-query-type queries :analytical-query offset)
analytical-query-bench (test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) analytical-query-coll)
_ (log/info "Analytical query bench results: " analytical-query-bench)
- bench ( test - queries myDb ( fn [ db q ]
( async/ < ! ! ( / graphql - async conn ledger - id q nil ) ) ) : graphql - query graphql - query - coll )
( async/ < ! ! ( / transact - async conn ledger - id [ { : _ i d " person " : favNums [ 1 ] } ] ) ) nil )
add - update - bench ( - > > ( criterium / benchmark ( add - and - update - data ) nil )
add - delete - bench ( - > > ( criterium / benchmark ( add - and - delete - data ) nil )
] (merge
query-bench analytical-query-bench
))))
COMPARE TWO SETS OF RESULTS , i.e 0.10.4 and 0.11.0
( defn compare - results
( compare - results res1 res2 0.5 ) )
( update acc key { : query res2Key : oldTime : newTime res2Time
( update acc : no - match ) ) )
( defn format - results
( if (= 0 )
mx ( format - mean - time ( apply diff ) )
str ( * ( average percent - diff ) 100 ) " % " )
( format - mean - time ( average old - time ) )
( str " The difference is : " mx )
( str " The average old time is : " )
( defn compare - res - formatted
( defn filter - comparisons
( defn generate - full - report
( - > ( filter - comparisons compare - res : graphql - query )
formatted ( - > ( filter - comparisons compare - res : sparql - query )
(comment
(def conn (:conn user/system))
(def q {:select ["handle" {"person/follows" ["handle"]}], :from "person"})
(def db (fdb/db conn "fluree/test"))
(def queries (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string))
(def query-test {53 (get (get-query-type queries :basic-query) 53)
54 (get (get-query-type queries :basic-query) 54)
55 (get (get-query-type queries :basic-query) 55)
56 (get (get-query-type queries :basic-query) 56)})
query-test
(test-queries db (fn [db q]
(async/<!! (fdb/query-async db q))) query-test)
(def res (criterium/bench (+ 1 1)))
(criterium/-bench ((fn [db q]
(async/<!! (fdb/query-async db q))) db q))
51 {:sample 60, :mean 9.138601788989441E-5, :mean-time " 91.386018 µs", :type :basic-query},
(criterium/report-result (criterium/quick-bench (+ 1 1)))
(test-queries myDb (fn [db q]
(async/<!! (fdb/query-async db q))) )
(def myPerformanceCheck (performance-check conn "fluree/test"))
(def myPerformanceCheck (performance-check conn "plane/demo" "../test/fluree/db/ledger/Performance/PlaneQueryList.edn" 1000
false))
(into (sorted-map) myPerformanceCheck)
(+ 1 1)
(def compare-res (compare-results res1 res2))
compare-res
(def res1 (-> "performanceMetrics/0-10-4.edn" io/resource slurp read-string))
(def res2 (-> "performanceMetrics/0-11-0-old-version.edn" io/resource slurp read-string))
(generate-full-report res1 res2)
(add-schema-performance-check conn "fluree/new")
(count (keys (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string)))
(def queryTxns (-> "../test/fluree/db/ledger/Performance/QueryTxnList.edn" io/resource slurp read-string))
(select-keys queryTxns (range 1 50))
)
|
76b08e305faaf9bcbd3750322cdc797f2398d898a28e1168c99ec581012b6e31 | MLstate/opalang | weakResArray.mli |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
exception MaxSize
type 'a t =
{
mutable array : 'a Weak.t;
mutable length : int
}
val make : ?size:int -> int -> 'a t
val create : ?size:int -> int -> 'a t
val get : 'a t -> int -> 'a option
val set : 'a t -> int -> 'a option -> unit
val remove : 'a t -> int -> unit
val length : 'a t -> int
val real_length : 'a t -> int
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_left_i : ('a -> 'b -> i:int -> 'a) -> 'a -> 'b t -> 'a
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/ocamllib/libbase/weakResArray.mli | ocaml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
exception MaxSize
type 'a t =
{
mutable array : 'a Weak.t;
mutable length : int
}
val make : ?size:int -> int -> 'a t
val create : ?size:int -> int -> 'a t
val get : 'a t -> int -> 'a option
val set : 'a t -> int -> 'a option -> unit
val remove : 'a t -> int -> unit
val length : 'a t -> int
val real_length : 'a t -> int
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_left_i : ('a -> 'b -> i:int -> 'a) -> 'a -> 'b t -> 'a
| |
f67cdce67afa2c29c1c52c4f27da88db507c957d728c66c516a296a536621f8a | stchang/mlish | sweet-map.rkt | #lang sweet-exp mlish
define
sum [lst : (List Int)] → Int
match lst with
[] -> 0
x :: xs ->
{x + sum(xs)}
define
map [f : (→ X Y)] [lst : (List X)] → (List Y)
match lst with
[] -> nil
x :: xs ->
cons
f x
map f xs
sum
map string->number (list "1" "2" "3")
| null | https://raw.githubusercontent.com/stchang/mlish/1c79d71b686fc734b7994eb9d412f84d518a64b1/mlish-test/tests/mlish/sweet-map.rkt | racket | #lang sweet-exp mlish
define
sum [lst : (List Int)] → Int
match lst with
[] -> 0
x :: xs ->
{x + sum(xs)}
define
map [f : (→ X Y)] [lst : (List X)] → (List Y)
match lst with
[] -> nil
x :: xs ->
cons
f x
map f xs
sum
map string->number (list "1" "2" "3")
| |
0339ebb2ea3f38f466c0b8b1f0ac91414c99ba025d81abbdd7ec155dad25f7fd | fogus/minderbinder | solid_angle.clj | (ns minderbinder.solid-angle
(:require [minderbinder.core :refer (defunits-of)]))
(defunits-of solid-angle :steradian
""
;; Solid angle which cuts off an area of the surface
;; of the sphere equal to that of a square with
;; sides of length equal to the radius of the sphere.
:steradian #{:sr}) | null | https://raw.githubusercontent.com/fogus/minderbinder/35b10b279b241fbeab38f63af568e6302f40930a/src/minderbinder/solid_angle.clj | clojure | Solid angle which cuts off an area of the surface
of the sphere equal to that of a square with
sides of length equal to the radius of the sphere. | (ns minderbinder.solid-angle
(:require [minderbinder.core :refer (defunits-of)]))
(defunits-of solid-angle :steradian
""
:steradian #{:sr}) |
0146f706a2e7816a55f058ab7c07837191e3c3093626c0c3ca679af7f89aee17 | erlang/otp | erl_eval_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1998 - 2023 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
-module(erl_eval_SUITE).
-feature(maybe_expr, enable).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
-export([guard_1/1, guard_2/1,
match_pattern/1,
match_bin/1,
string_plusplus/1,
pattern_expr/1,
guard_3/1, guard_4/1, guard_5/1,
lc/1,
simple_cases/1,
unary_plus/1,
apply_atom/1,
otp_5269/1,
otp_6539/1,
otp_6543/1,
otp_6787/1,
otp_6977/1,
otp_7550/1,
otp_8133/1,
otp_10622/1,
otp_13228/1,
otp_14826/1,
funs/1,
custom_stacktrace/1,
try_catch/1,
eval_expr_5/1,
zero_width/1,
eep37/1,
eep43/1,
otp_15035/1,
otp_16439/1,
otp_14708/1,
otp_16545/1,
otp_16865/1,
eep49/1,
binary_and_map_aliases/1,
eep58/1]).
%%
%% Define to run outside of test server
%%
%%-define(STANDALONE,1).
-import(lists,[concat/1, sort/1]).
-export([count_down/2, count_down_fun/0, do_apply/2,
local_func/3, local_func_value/2]).
-export([simple/0]).
-ifdef(STANDALONE).
-define(config(A,B),config(A,B)).
-export([config/2]).
-define(line, noop, ).
config(priv_dir,_) ->
".".
-else.
-include_lib("common_test/include/ct.hrl").
-endif.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,1}}].
all() ->
[guard_1, guard_2, match_pattern, string_plusplus,
pattern_expr, match_bin, guard_3, guard_4, guard_5, lc,
simple_cases, unary_plus, apply_atom, otp_5269,
otp_6539, otp_6543, otp_6787, otp_6977, otp_7550,
otp_8133, otp_10622, otp_13228, otp_14826,
funs, custom_stacktrace, try_catch, eval_expr_5, zero_width,
eep37, eep43, otp_15035, otp_16439, otp_14708, otp_16545, otp_16865,
eep49, binary_and_map_aliases, eep58].
groups() ->
[].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
%% OTP-2405
guard_1(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if a+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
no = guard_1_compiled(),
{value, no, []} = erl_eval:expr(Expr, []),
ok.
guard_1_compiled() ->
if a+4 == 4 -> yes; true -> no end.
%% Similar to guard_1, but type-correct.
guard_2(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if 6+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
no = guard_2_compiled(),
{value, no, []} = erl_eval:expr(Expr, []),
ok.
guard_2_compiled() ->
if 6+4 == 4 -> yes; true -> no end.
%% OTP-3069: syntactic sugar string ++ ...
string_plusplus(Config) when is_list(Config) ->
check(fun() -> case "abc" of "ab" ++ L -> L end end,
"case \"abc\" of \"ab\" ++ L -> L end. ",
"c"),
check(fun() -> case "abcde" of "ab" ++ "cd" ++ L -> L end end,
"case \"abcde\" of \"ab\" ++ \"cd\" ++ L -> L end. ",
"e"),
check(fun() -> case "abc" of [97, 98] ++ L -> L end end,
"case \"abc\" of [97, 98] ++ L -> L end. ",
"c"),
ok.
%% OTP-2983: match operator in pattern.
match_pattern(Config) when is_list(Config) ->
check(fun() -> case {a, b} of {a, _X}=Y -> {x,Y} end end,
"case {a, b} of {a, X}=Y -> {x,Y} end. ",
{x, {a, b}}),
check(fun() -> case {a, b} of Y={a, _X} -> {x,Y} end end,
"case {a, b} of Y={a, X} -> {x,Y} end. ",
{x, {a, b}}),
check(fun() -> case {a, b} of Y={a, _X}=Z -> {Z,Y} end end,
"case {a, b} of Y={a, X}=Z -> {Z,Y} end. ",
{{a, b}, {a, b}}),
check(fun() -> A = 4, B = 28, <<13:(A+(X=B))>>, X end,
"begin A = 4, B = 28, <<13:(A+(X=B))>>, X end.",
28),
ok.
%% Binary match problems.
match_bin(Config) when is_list(Config) ->
check(fun() -> <<"abc">> = <<"abc">> end,
"<<\"abc\">> = <<\"abc\">>. ",
<<"abc">>),
check(fun() ->
<<Size,B:Size/binary,Rest/binary>> = <<2,"AB","CD">>,
{Size,B,Rest}
end,
"begin <<Size,B:Size/binary,Rest/binary>> = <<2,\"AB\",\"CD\">>, "
"{Size,B,Rest} end. ",
{2,<<"AB">>,<<"CD">>}),
ok.
%% OTP-3144: compile-time expressions in pattern.
pattern_expr(Config) when is_list(Config) ->
check(fun() -> case 4 of 2+2 -> ok end end,
"case 4 of 2+2 -> ok end. ",
ok),
check(fun() -> case 2 of +2 -> ok end end,
"case 2 of +2 -> ok end. ",
ok),
ok.
%% OTP-4518.
guard_3(Config) when is_list(Config) ->
check(fun() -> if false -> false; true -> true end end,
"if false -> false; true -> true end.",
true),
check(fun() -> if <<"hej">> == <<"hopp">> -> true;
true -> false end end,
"begin if <<\"hej\">> == <<\"hopp\">> -> true;
true -> false end end.",
false),
check(fun() -> if <<"hej">> == <<"hej">> -> true;
true -> false end end,
"begin if <<\"hej\">> == <<\"hej\">> -> true;
true -> false end end.",
true),
ok.
%% OTP-4885.
guard_4(Config) when is_list(Config) ->
check(fun() -> if erlang:'+'(3,a) -> true ; true -> false end end,
"if erlang:'+'(3,a) -> true ; true -> false end.",
false),
check(fun() -> if erlang:is_integer(3) -> true ; true -> false end
end,
"if erlang:is_integer(3) -> true ; true -> false end.",
true),
check(fun() -> [X || X <- [1,2,3], erlang:is_integer(X)] end,
"[X || X <- [1,2,3], erlang:is_integer(X)].",
[1,2,3]),
check(fun() -> if is_atom(is_integer(a)) -> true ; true -> false end
end,
"if is_atom(is_integer(a)) -> true ; true -> false end.",
true),
check(fun() -> if erlang:is_atom(erlang:is_integer(a)) -> true;
true -> false end end,
"if erlang:is_atom(erlang:is_integer(a)) -> true; "
"true -> false end.",
true),
check(fun() -> if is_atom(3+a) -> true ; true -> false end end,
"if is_atom(3+a) -> true ; true -> false end.",
false),
check(fun() -> if erlang:is_atom(3+a) -> true ; true -> false end
end,
"if erlang:is_atom(3+a) -> true ; true -> false end.",
false),
ok.
%% Guards with erlang:'=='/2.
guard_5(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("case 1 of A when erlang:'=='(A, 1) -> true end."),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
true = guard_5_compiled(),
{value, true, [{'A',1}]} = erl_eval:expr(Expr, []),
ok.
guard_5_compiled() ->
case 1 of A when erlang:'=='(A, 1) -> true end.
%% OTP-4518.
lc(Config) when is_list(Config) ->
check(fun() -> X = 32, [X || X <- [1,2,3]] end,
"begin X = 32, [X || X <- [1,2,3]] end.",
[1,2,3]),
check(fun() -> X = 32,
[X || <<X:X>> <- [<<1:32>>,<<2:32>>,<<3:8>>]] end,
%% "binsize variable" ^
"begin X = 32,
[X || <<X:X>> <- [<<1:32>>,<<2:32>>,<<3:8>>]] end.",
[1,2]),
check(fun() -> Y = 13,[X || {X,Y} <- [{1,2}]] end,
"begin Y = 13,[X || {X,Y} <- [{1,2}]] end.",
[1]),
error_check("begin [A || X <- [{1,2}], 1 == A] end.",
{unbound_var,'A'}),
error_check("begin X = 32,
[{Y,W} || X <- [1,2,32,Y=4], Z <- [1,2,W=3]] end.",
{unbound_var,'Y'}),
error_check("begin X = 32,<<A:B>> = <<100:X>> end.",
{unbound_var,'B'}),
check(fun() -> [X || X <- [1,2,3,4], not (X < 2)] end,
"begin [X || X <- [1,2,3,4], not (X < 2)] end.",
[2,3,4]),
check(fun() -> [X || X <- [true,false], X] end,
"[X || X <- [true,false], X].", [true]),
ok.
%% Simple cases, just to cover some code.
simple_cases(Config) when is_list(Config) ->
check(fun() -> A = $C end, "A = $C.", $C),
check(fun ( ) - > A = 3.14 end , " A = 3.14 . " , 3.14 ) ,
check(fun() -> self() ! a, A = receive a -> true end end,
"begin self() ! a, A = receive a -> true end end.",
true),
check(fun() -> c:flush(), self() ! a, self() ! b, self() ! c,
receive b -> b end,
{messages, [a,c]} =
erlang:process_info(self(), messages),
c:flush() end,
"begin c:flush(), self() ! a, self() ! b, self() ! c,"
"receive b -> b end,"
"{messages, [a,c]} ="
" erlang:process_info(self(), messages), c:flush() end.",
ok),
check(fun() -> self() ! a, A = receive a -> true
after 0 -> false end end,
"begin self() ! a, A = receive a -> true"
" after 0 -> false end end.",
true),
check(fun() -> c:flush(), self() ! a, self() ! b, self() ! c,
receive b -> b after 0 -> true end,
{messages, [a,c]} =
erlang:process_info(self(), messages),
c:flush() end,
"begin c:flush(), self() ! a, self() ! b, self() ! c,"
"receive b -> b after 0 -> true end,"
"{messages, [a,c]} ="
" erlang:process_info(self(), messages), c:flush() end.",
ok),
check(fun() -> receive _ -> true after 10 -> false end end,
"receive _ -> true after 10 -> false end.",
false),
check(fun() -> F = fun(A) -> A end, true = 3 == F(3) end,
"begin F = fun(A) -> A end, true = 3 == F(3) end.",
true),
check(fun() -> F = fun(A) -> A end, true = 3 == apply(F, [3]) end,
"begin F = fun(A) -> A end, true = 3 == apply(F,[3]) end.",
true),
check(fun() -> catch throw(a) end, "catch throw(a).", a),
check(fun() -> catch a end, "catch a.", a),
check(fun() -> 4 == 3 end, "4 == 3.", false),
check(fun() -> not true end, "not true.", false),
check(fun() -> -3 end, "-3.", -3),
error_check("3.0 = 4.0.", {badmatch,4.0}),
check(fun() -> <<(3.0+2.0):32/float>> = <<5.0:32/float>> end,
"<<(3.0+2.0):32/float>> = <<5.0:32/float>>.",
<<5.0:32/float>>),
check(fun() -> false andalso kludd end, "false andalso kludd.",
false),
check(fun() -> true andalso true end, "true andalso true.",
true),
check(fun() -> true andalso false end, "true andalso false.",
false),
check(fun() -> true andalso kludd end, "true andalso kludd.",
kludd),
error_check("kladd andalso kludd.", {badarg,kladd}),
check(fun() -> if false andalso kludd -> a; true -> b end end,
"if false andalso kludd -> a; true -> b end.",
b),
check(fun() -> if true andalso true -> a; true -> b end end,
"if true andalso true -> a; true -> b end.",
a),
check(fun() -> if true andalso false -> a; true -> b end end,
"if true andalso false -> a; true -> b end.",
b),
check(fun() -> true orelse kludd end,
"true orelse kludd.", true),
check(fun() -> false orelse false end,
"false orelse false.", false),
check(fun() -> false orelse true end,
"false orelse true.", true),
check(fun() -> false orelse kludd end,
"false orelse kludd.", kludd),
error_check("kladd orelse kludd.", {badarg,kladd}),
error_check("[X || X <- [1,2,3], begin 1 end].",{bad_filter,1}),
error_check("[X || X <- a].",{bad_generator,a}),
check(fun() -> if true orelse kludd -> a; true -> b end end,
"if true orelse kludd -> a; true -> b end.", a),
check(fun() -> if false orelse false -> a; true -> b end end,
"if false orelse false -> a; true -> b end.", b),
check(fun() -> if false orelse true -> a; true -> b end end,
"if false orelse true -> a; true -> b end.", a),
check(fun() -> [X || X <- [1,2,3], X+2] end,
"[X || X <- [1,2,3], X+2].", []),
check(fun() -> [X || X <- [1,2,3], [X] == [X || X <- [2]]] end,
"[X || X <- [1,2,3], [X] == [X || X <- [2]]].",
[2]),
check(fun() -> F = fun(1) -> ett; (2) -> zwei end,
ett = F(1), zwei = F(2) end,
"begin F = fun(1) -> ett; (2) -> zwei end,
ett = F(1), zwei = F(2) end.",
zwei),
check(fun() -> F = fun(X) when X == 1 -> ett;
(X) when X == 2 -> zwei end,
ett = F(1), zwei = F(2) end,
"begin F = fun(X) when X == 1 -> ett;
(X) when X == 2 -> zwei end,
ett = F(1), zwei = F(2) end.",
zwei),
error_check("begin F = fun(1) -> ett end, zwei = F(2) end.",
function_clause),
check(fun() -> if length([1]) == 1 -> yes;
true -> no end end,
"if length([1]) == 1 -> yes;
true -> no end.",
yes),
check(fun() -> if is_integer(3) -> true; true -> false end end,
"if is_integer(3) -> true; true -> false end.", true),
check(fun() -> if integer(3) -> true; true -> false end end,
"if integer(3) -> true; true -> false end.", true),
check(fun() -> if is_float(3) -> true; true -> false end end,
"if is_float(3) -> true; true -> false end.", false),
check(fun() -> if float(3) -> true; true -> false end end,
"if float(3) -> true; true -> false end.", false),
check(fun() -> if is_number(3) -> true; true -> false end end,
"if is_number(3) -> true; true -> false end.", true),
check(fun() -> if number(3) -> true; true -> false end end,
"if number(3) -> true; true -> false end.", true),
check(fun() -> if is_atom(a) -> true; true -> false end end,
"if is_atom(a) -> true; true -> false end.", true),
check(fun() -> if atom(a) -> true; true -> false end end,
"if atom(a) -> true; true -> false end.", true),
check(fun() -> if is_list([]) -> true; true -> false end end,
"if is_list([]) -> true; true -> false end.", true),
check(fun() -> if list([]) -> true; true -> false end end,
"if list([]) -> true; true -> false end.", true),
check(fun() -> if is_tuple({}) -> true; true -> false end end,
"if is_tuple({}) -> true; true -> false end.", true),
check(fun() -> if tuple({}) -> true; true -> false end end,
"if tuple({}) -> true; true -> false end.", true),
check(fun() -> if is_pid(self()) -> true; true -> false end end,
"if is_pid(self()) -> true; true -> false end.", true),
check(fun() -> if pid(self()) -> true; true -> false end end,
"if pid(self()) -> true; true -> false end.", true),
check(fun() -> R = make_ref(), if is_reference(R) -> true;
true -> false end end,
"begin R = make_ref(), if is_reference(R) -> true;"
"true -> false end end.", true),
check(fun() -> R = make_ref(), if reference(R) -> true;
true -> false end end,
"begin R = make_ref(), if reference(R) -> true;"
"true -> false end end.", true),
check(fun() -> if is_port(a) -> true; true -> false end end,
"if is_port(a) -> true; true -> false end.", false),
check(fun() -> if port(a) -> true; true -> false end end,
"if port(a) -> true; true -> false end.", false),
check(fun() -> if is_function(a) -> true; true -> false end end,
"if is_function(a) -> true; true -> false end.", false),
check(fun() -> if function(a) -> true; true -> false end end,
"if function(a) -> true; true -> false end.", false),
check(fun() -> if is_binary(<<>>) -> true; true -> false end end,
"if is_binary(<<>>) -> true; true -> false end.", true),
check(fun() -> if binary(<<>>) -> true; true -> false end end,
"if binary(<<>>) -> true; true -> false end.", true),
check(fun() -> if is_integer(a) == true -> yes;
true -> no end end,
"if is_integer(a) == true -> yes;
true -> no end.",
no),
check(fun() -> if [] -> true; true -> false end end,
"if [] -> true; true -> false end.", false),
error_check("if lists:member(1,[1]) -> true; true -> false end.",
illegal_guard_expr),
error_check("if false -> true end.", if_clause),
check(fun() -> if a+b -> true; true -> false end end,
"if a + b -> true; true -> false end.", false),
check(fun() -> if + b -> true; true -> false end end,
"if + b -> true; true -> false end.", false),
error_check("case foo of bar -> true end.", {case_clause,foo}),
error_check("case 4 of 2+a -> true; _ -> false end.",
illegal_pattern),
error_check("case 4 of +a -> true; _ -> false end.",
illegal_pattern),
check(fun() -> case a of
X when X == b -> one;
X when X == a -> two
end end,
"begin case a of
X when X == b -> one;
X when X == a -> two
end end.", two),
error_check("3 = 4.", {badmatch,4}),
error_check("a = 3.", {badmatch,3}),
error_check("3.1 = 2.7.",{badmatch,2.7 } ) ,
error_check("$c = 4.", {badmatch,4}),
check(fun() -> $c = $c end, "$c = $c.", $c),
check(fun() -> _ = bar end, "_ = bar.", bar),
check(fun() -> A = 14, A = 14 end,
"begin A = 14, A = 14 end.", 14),
error_check("begin A = 14, A = 16 end.", {badmatch,16}),
error_check("\"hej\" = \"san\".", {badmatch,"san"}),
check(fun() -> "hej" = "hej" end,
"\"hej\" = \"hej\".", "hej"),
error_check("[] = [a].", {badmatch,[a]}),
check(fun() -> [] = [] end, "[] = [].", []),
error_check("[a] = [].", {badmatch,[]}),
error_check("{a,b} = 34.", {badmatch,34}),
check(fun() -> <<X:7>> = <<8:7>>, X end,
"begin <<X:7>> = <<8:7>>, X end.", 8),
error_check("<<34:32>> = \"hej\".", {badmatch,"hej"}),
check(fun() -> trunc((1 * 3 div 3 + 4 - 3) / 1) rem 2 end,
"begin trunc((1 * 3 div 3 + 4 - 3) / 1) rem 2 end.", 0),
check(fun() -> (2#101 band 2#10101) bor (2#110 bxor 2#010) end,
"(2#101 band 2#10101) bor (2#110 bxor 2#010).", 5),
check(fun() -> (2#1 bsl 4) + (2#10000 bsr 3) end,
"(2#1 bsl 4) + (2#10000 bsr 3).", 18),
check(fun() -> ((1<3) and ((1 =:= 2) or (1 =/= 2))) xor (1=<2) end,
"((1<3) and ((1 =:= 2) or (1 =/= 2))) xor (1=<2).", false),
check(fun() -> (a /= b) or (2 > 4) or (3 >= 3) end,
"(a /= b) or (2 > 4) or (3 >= 3).", true),
check(fun() -> "hej" ++ "san" =/= "hejsan" -- "san" end,
"\"hej\" ++ \"san\" =/= \"hejsan\" -- \"san\".", true),
check(fun() -> (bnot 1) < -0 end, "(bnot (+1)) < -0.", true),
ok.
%% OTP-4929. Unary plus rejects non-numbers.
unary_plus(Config) when is_list(Config) ->
check(fun() -> F = fun(X) -> + X end,
true = -1 == F(-1) end,
"begin F = fun(X) -> + X end,"
" true = -1 == F(-1) end.", true, ['F'], none, none),
error_check("+a.", badarith),
ok.
%% OTP-5064. Can no longer apply atoms.
apply_atom(Config) when is_list(Config) ->
error_check("[X || X <- [[1],[2]],
begin L = length, L(X) =:= 1 end].",
{badfun,length}),
ok.
%% OTP-5269. Bugs in the bit syntax.
otp_5269(Config) when is_list(Config) ->
check(fun() -> L = 8,
F = fun(<<A:L,B:A>>) -> B end,
F(<<16:8, 7:16>>)
end,
"begin
L = 8, F = fun(<<A:L,B:A>>) -> B end, F(<<16:8, 7:16>>)
end.",
7),
check(fun() -> L = 8,
F = fun(<<L:L,B:L>>) -> B end,
F(<<16:8, 7:16>>)
end,
"begin
L = 8, F = fun(<<L:L,B:L>>) -> B end, F(<<16:8, 7:16>>)
end.",
7),
check(fun() -> L = 8, <<A:L,B:A>> = <<16:8, 7:16>>, B end,
"begin L = 8, <<A:L,B:A>> = <<16:8, 7:16>>, B end.",
7),
error_check("begin L = 8, <<L:L,B:L>> = <<16:8, 7:16>> end.",
{badmatch,<<16:8,7:16>>}),
error_check("begin <<L:16,L:L>> = <<16:16,8:16>>, L end.",
{badmatch, <<16:16,8:16>>}),
check(fun() -> U = 8, (fun(<<U:U>>) -> U end)(<<32:8>>) end,
"begin U = 8, (fun(<<U:U>>) -> U end)(<<32:8>>) end.",
32),
check(fun() -> U = 8, [U || <<U:U>> <- [<<32:8>>]] end,
"begin U = 8, [U || <<U:U>> <- [<<32:8>>]] end.",
[32]),
error_check("(fun({3,<<A:32,A:32>>}) -> a end)
({3,<<17:32,19:32>>}).",
function_clause),
check(fun() -> [X || <<A:8,
B:A>> <- [<<16:8,19:16>>],
<<X:8>> <- [<<B:8>>]] end,
"[X || <<A:8,
B:A>> <- [<<16:8,19:16>>],
<<X:8>> <- [<<B:8>>]].",
[19]),
check(fun() ->
(fun (<<A:1/binary, B:8/integer, _C:B/binary>>) ->
case A of
B -> wrong;
_ -> ok
end
end)(<<1,2,3,4>>) end,
"(fun(<<A:1/binary, B:8/integer, _C:B/binary>>) ->"
" case A of B -> wrong; _ -> ok end"
" end)(<<1, 2, 3, 4>>).",
ok),
ok.
%% OTP-6539. try/catch bugs.
otp_6539(Config) when is_list(Config) ->
check(fun() ->
F = fun(A,B) ->
try A+B
catch _:_ -> dontthinkso
end
end,
lists:zipwith(F, [1,2], [2,3])
end,
"begin
F = fun(A,B) ->
try A+B
catch _:_ -> dontthinkso
end
end,
lists:zipwith(F, [1,2], [2,3])
end.",
[3, 5]),
ok.
OTP-6543 . bitlevel binaries .
otp_6543(Config) when is_list(Config) ->
check(fun() ->
<< <<X>> || <<X>> <- [1,2,3] >>
end,
"<< <<X>> || <<X>> <- [1,2,3] >>.",
<<>>),
check(fun() ->
<< <<X>> || X <- [1,2,3] >>
end,
"<< <<X>> || X <- [1,2,3] >>.",
<<1,2,3>>),
check(fun() ->
<< <<X:8>> || <<X:2>> <= <<"hej">> >>
end,
"<< <<X:8>> || <<X:2>> <= <<\"hej\">> >>.",
<<1,2,2,0,1,2,1,1,1,2,2,2>>),
check(fun() ->
<< <<X:8>> ||
<<65,X:4>> <= <<65,7:4,65,3:4,66,8:4>> >>
end,
"<< <<X:8>> ||
<<65,X:4>> <= <<65,7:4,65,3:4,66,8:4>> >>.",
<<7,3>>),
check(fun() -> <<34:18/big>> end,
"<<34:18/big>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/big-unit:2>> end,
"<<34:18/big-unit:2>>.",
<<0,0,0,2,2:4>>),
check(fun() -> <<34:18/little>> end,
"<<34:18/little>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<34:18/big-signed>> end,
"<<34:18/big-signed>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/little-signed>> end,
"<<34:18/little-signed>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native-signed>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<34:18/big-unsigned>> end,
"<<34:18/big-unsigned>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/little-unsigned>> end,
"<<34:18/little-unsigned>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native-unsigned>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<3.14:32/float-big>> end,
"<<3.14:32/float-big>>.",
<<64,72,245,195>>),
check(fun() -> <<3.14:32/float-little>> end,
"<<3.14:32/float-little>>.",
<<195,245,72,64>>),
case eval_string("<<3.14:32/float-native>>.") of
<<64,72,245,195>> -> ok;
<<195,245,72,64>> -> ok
end,
error_check("<<(<<17,3:2>>)/binary>>.", badarg),
check(fun() -> <<(<<17,3:2>>)/bitstring>> end,
"<<(<<17,3:2>>)/bitstring>>.",
<<17,3:2>>),
check(fun() -> <<(<<17,3:2>>):10/bitstring>> end,
"<<(<<17,3:2>>):10/bitstring>>.",
<<17,3:2>>),
check(fun() -> <<<<344:17>>/binary-unit:17>> end,
"<<<<344:17>>/binary-unit:17>>.",
<<344:17>>),
check(fun() -> <<X:18/big>> = <<34:18/big>>, X end,
"begin <<X:18/big>> = <<34:18/big>>, X end.",
34),
check(fun() -> <<X:18/big-unit:2>> = <<34:18/big-unit:2>>, X end,
"begin <<X:18/big-unit:2>> = <<34:18/big-unit:2>>, X end.",
34),
check(fun() -> <<X:18/little>> = <<34:18/little>>, X end,
"begin <<X:18/little>> = <<34:18/little>>, X end.",
34),
check(fun() -> <<X:18/native>> = <<34:18/native>>, X end,
"begin <<X:18/native>> = <<34:18/native>>, X end.",
34),
check(fun() -> <<X:18/big-signed>> = <<34:18/big-signed>>, X end,
"begin <<X:18/big-signed>> = <<34:18/big-signed>>, X end.",
34),
check(fun() -> <<X:18/little-signed>> = <<34:18/little-signed>>,
X end,
"begin <<X:18/little-signed>> = <<34:18/little-signed>>,
X end.",
34),
check(fun() -> <<X:18/native-signed>> = <<34:18/native-signed>>,
X end,
"begin <<X:18/native-signed>> = <<34:18/native-signed>>,
X end.",
34),
check(fun() -> <<X:18/big-unsigned>> = <<34:18/big-unsigned>>,
X end,
"begin <<X:18/big-unsigned>> = <<34:18/big-unsigned>>,
X end.",
34),
check(fun() ->
<<X:18/little-unsigned>> = <<34:18/little-unsigned>>,
X end,
"begin <<X:18/little-unsigned>> = <<34:18/little-unsigned>>,
X end.",
34),
check(fun() ->
<<X:18/native-unsigned>> = <<34:18/native-unsigned>>,
X end,
"begin <<X:18/native-unsigned>> = <<34:18/native-unsigned>>,
X end.",
34),
check(fun() -> <<X:32/float-big>> = <<2.0:32/float-big>>, X end,
"begin <<X:32/float-big>> = <<2.0:32/float-big>>,
X end.",
2.0),
check(fun() -> <<X:32/float-little>> = <<2.0:32/float-little>>,
X end,
"begin <<X:32/float-little>> = <<2.0:32/float-little>>,
X end.",
2.0),
check(fun() -> <<X:32/float-native>> = <<2.0:32/float-native>>,
X end,
"begin <<X:32/float-native>> = <<2.0:32/float-native>>,
X end.",
2.0),
check(
fun() ->
[X || <<"hej",X:8>> <= <<"hej",8,"san",9,"hej",17,"hej">>]
end,
"[X || <<\"hej\",X:8>> <=
<<\"hej\",8,\"san\",9,\"hej\",17,\"hej\">>].",
[8,17]),
check(
fun() ->
L = 8, << <<B:32>> || <<L:L,B:L>> <= <<16:8, 7:16>> >>
end,
"begin L = 8, << <<B:32>> || <<L:L,B:L>> <= <<16:8, 7:16>> >>
end.",
<<0,0,0,7>>),
%% Test the Value part of a binary segment.
%% "Old" bugs have been fixed (partial_eval is called on Value).
check(fun() -> [ 3 || <<17/float>> <= <<17.0/float>>] end,
"[ 3 || <<17/float>> <= <<17.0/float>>].",
[3]),
check(fun() -> [ 3 || <<17/float>> <- [<<17.0/float>>]] end,
"[ 3 || <<17/float>> <- [<<17.0/float>>]].",
[3]),
check(fun() -> [ X || <<17/float,X:3>> <= <<17.0/float,2:3>>] end,
"[ X || <<17/float,X:3>> <= <<17.0/float,2:3>>].",
[2]),
check(fun() ->
[ foo || <<(1 bsl 1023)/float>> <= <<(1 bsl 1023)/float>>]
end,
"[ foo || <<(1 bsl 1023)/float>> <= <<(1 bsl 1023)/float>>].",
[foo]),
check(fun() ->
[ foo || <<(1 bsl 1023)/float>> <- [<<(1 bsl 1023)/float>>]]
end,
"[ foo || <<(1 bsl 1023)/float>> <- [<<(1 bsl 1023)/float>>]].",
[foo]),
error_check("[ foo || <<(1 bsl 1024)/float>> <-
[<<(1 bsl 1024)/float>>]].",
badarg),
check(fun() ->
[ foo || <<(1 bsl 1024)/float>> <- [<<(1 bsl 1023)/float>>]]
end,
"[ foo || <<(1 bsl 1024)/float>> <-
[<<(1 bsl 1023)/float>>]].",
[]),
check(fun() ->
[ foo || <<(1 bsl 1024)/float>> <= <<(1 bsl 1023)/float>>]
end,
"[ foo || <<(1 bsl 1024)/float>> <=
<<(1 bsl 1023)/float>>].",
[]),
check(fun() ->
L = 8,
[{L,B} || <<L:L,B:L/float>> <= <<32:8,7:32/float>>]
end,
"begin L = 8,
[{L,B} || <<L:L,B:L/float>> <= <<32:8,7:32/float>>]
end.",
[{32,7.0}]),
check(fun() ->
L = 8,
[{L,B} || <<L:L,B:L/float>> <- [<<32:8,7:32/float>>]]
end,
"begin L = 8,
[{L,B} || <<L:L,B:L/float>> <- [<<32:8,7:32/float>>]]
end.",
[{32,7.0}]),
check(fun() ->
[foo || <<"s">> <= <<"st">>]
end,
"[foo || <<\"s\">> <= <<\"st\">>].",
[foo]),
check(fun() -> <<_:32>> = <<17:32>> end,
"<<_:32>> = <<17:32>>.",
<<17:32>>),
check(fun() -> [foo || <<_:32>> <= <<17:32,20:32>>] end,
"[foo || <<_:32>> <= <<17:32,20:32>>].",
[foo,foo]),
check(fun() -> << <<X:32>> || X <- [1,2,3], X > 1 >> end,
"<< <<X:32>> || X <- [1,2,3], X > 1 >>.",
<<0,0,0,2,0,0,0,3>>),
error_check("[X || <<X>> <= [a,b]].",{bad_generator,[a,b]}),
ok.
%% OTP-6787. bitlevel binaries.
otp_6787(Config) when is_list(Config) ->
check(
fun() -> <<16:(1024*1024)>> = <<16:(1024*1024)>> end,
"<<16:(1024*1024)>> = <<16:(1024*1024)>>.",
<<16:1048576>>),
ok.
%% OTP-6977. ++ bug.
otp_6977(Config) when is_list(Config) ->
check(
fun() -> (fun([$X] ++ _) -> ok end)("X") end,
"(fun([$X] ++ _) -> ok end)(\"X\").",
ok),
ok.
OTP-7550 . Support for UTF-8 , UTF-16 , UTF-32 .
otp_7550(Config) when is_list(Config) ->
UTF-8 .
check(
fun() -> <<65>> = <<65/utf8>> end,
"<<65>> = <<65/utf8>>.",
<<65>>),
check(
fun() -> <<350/utf8>> = <<197,158>> end,
"<<350/utf8>> = <<197,158>>.",
<<197,158>>),
check(
fun() -> <<$b,$j,$\303,$\266,$r,$n>> = <<"bj\366rn"/utf8>> end,
"<<$b,$j,$\303,$\266,$r,$n>> = <<\"bj\366rn\"/utf8>>.",
<<$b,$j,$\303,$\266,$r,$n>>),
%% UTF-16.
check(
fun() -> <<0,65>> = <<65/utf16>> end,
"<<0,65>> = <<65/utf16>>.",
<<0,65>>),
check(
fun() -> <<16#D8,16#08,16#DF,16#45>> = <<16#12345/utf16>> end,
"<<16#D8,16#08,16#DF,16#45>> = <<16#12345/utf16>>.",
<<16#D8,16#08,16#DF,16#45>>),
check(
fun() -> <<16#08,16#D8,16#45,16#DF>> = <<16#12345/little-utf16>> end,
"<<16#08,16#D8,16#45,16#DF>> = <<16#12345/little-utf16>>.",
<<16#08,16#D8,16#45,16#DF>>),
check(
fun() -> <<350/utf16>> = <<1,94>> end,
"<<350/utf16>> = <<1,94>>.",
<<1,94>>),
check(
fun() -> <<350/little-utf16>> = <<94,1>> end,
"<<350/little-utf16>> = <<94,1>>.",
<<94,1>>),
check(
fun() -> <<16#12345/utf16>> = <<16#D8,16#08,16#DF,16#45>> end,
"<<16#12345/utf16>> = <<16#D8,16#08,16#DF,16#45>>.",
<<16#D8,16#08,16#DF,16#45>>),
check(
fun() -> <<16#12345/little-utf16>> = <<16#08,16#D8,16#45,16#DF>> end,
"<<16#12345/little-utf16>> = <<16#08,16#D8,16#45,16#DF>>.",
<<16#08,16#D8,16#45,16#DF>>),
%% UTF-32.
check(
fun() -> <<16#12345/utf32>> = <<16#0,16#01,16#23,16#45>> end,
"<<16#12345/utf32>> = <<16#0,16#01,16#23,16#45>>.",
<<16#0,16#01,16#23,16#45>>),
check(
fun() -> <<16#0,16#01,16#23,16#45>> = <<16#12345/utf32>> end,
"<<16#0,16#01,16#23,16#45>> = <<16#12345/utf32>>.",
<<16#0,16#01,16#23,16#45>>),
check(
fun() -> <<16#12345/little-utf32>> = <<16#45,16#23,16#01,16#00>> end,
"<<16#12345/little-utf32>> = <<16#45,16#23,16#01,16#00>>.",
<<16#45,16#23,16#01,16#00>>),
check(
fun() -> <<16#12345/little-utf32>> end,
"<<16#12345/little-utf32>>.",
<<16#45,16#23,16#01,16#00>>),
%% Mixed.
check(
fun() -> <<16#41,16#12345/utf32,16#0391:16,16#2E:8>> end,
"<<16#41,16#12345/utf32,16#0391:16,16#2E:8>>.",
<<16#41,16#00,16#01,16#23,16#45,16#03,16#91,16#2E>>),
ok.
OTP-8133 . Bit comprehension bug .
otp_8133(Config) when is_list(Config) ->
check(
fun() ->
E = fun(N) ->
if
is_integer(N) -> <<N/integer>>;
true -> throw(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch foo -> ok
end
end,
"begin
E = fun(N) ->
if is_integer(N) -> <<N/integer>>;
true -> throw(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch foo -> ok
end
end.",
ok),
check(
fun() ->
E = fun(N) ->
if
is_integer(N) -> <<N/integer>>;
true -> erlang:error(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch error:foo -> ok
end
end,
"begin
E = fun(N) ->
if is_integer(N) -> <<N/integer>>;
true -> erlang:error(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch error:foo -> ok
end
end.",
ok),
ok.
%% OTP-10622. Bugs.
otp_10622(Config) when is_list(Config) ->
check(fun() -> <<0>> = <<"\x{400}">> end,
"<<0>> = <<\"\\x{400}\">>. ",
<<0>>),
check(fun() -> <<"\x{aa}ff"/utf8>> = <<"\x{aa}ff"/utf8>> end,
"<<\"\\x{aa}ff\"/utf8>> = <<\"\\x{aa}ff\"/utf8>>. ",
<<"Â\xaaff">>),
%% The same bug as last example:
check(fun() -> case <<"foo"/utf8>> of
<<"foo"/utf8>> -> true
end
end,
"case <<\"foo\"/utf8>> of <<\"foo\"/utf8>> -> true end.",
true),
check(fun() -> <<"\x{400}"/utf8>> = <<"\x{400}"/utf8>> end,
"<<\"\\x{400}\"/utf8>> = <<\"\\x{400}\"/utf8>>. ",
<<208,128>>),
error_check("<<\"\\x{aaa}\">> = <<\"\\x{aaa}\">>.",
{badmatch,<<"\xaa">>}),
check(fun() -> [a || <<"\x{aaa}">> <= <<2703:16>>] end,
"[a || <<\"\\x{aaa}\">> <= <<2703:16>>]. ",
[]),
check(fun() -> [a || <<"\x{aa}"/utf8>> <= <<"\x{aa}"/utf8>>] end,
"[a || <<\"\\x{aa}\"/utf8>> <= <<\"\\x{aa}\"/utf8>>]. ",
[a]),
check(fun() -> [a || <<"\x{aa}x"/utf8>> <= <<"\x{aa}y"/utf8>>] end,
"[a || <<\"\\x{aa}x\"/utf8>> <= <<\"\\x{aa}y\"/utf8>>]. ",
[]),
check(fun() -> [a || <<"\x{aaa}">> <= <<"\x{aaa}">>] end,
"[a || <<\"\\x{aaa}\">> <= <<\"\\x{aaa}\">>]. ",
[]),
check(fun() -> [a || <<"\x{aaa}"/utf8>> <= <<"\x{aaa}"/utf8>>] end,
"[a || <<\"\\x{aaa}\"/utf8>> <= <<\"\\x{aaa}\"/utf8>>]. ",
[a]),
ok.
OTP-13228 . ERL-32 : non - local function handler bug .
otp_13228(_Config) ->
LFH = {value, fun(foo, [io_fwrite]) -> worked end},
EFH = {value, fun({io, fwrite}, [atom]) -> io_fwrite end},
{value, worked, []} = parse_and_run("foo(io:fwrite(atom)).", LFH, EFH).
%% OTP-14826: more accurate stacktrace.
otp_14826(_Config) ->
backtrace_check("fun(P) when is_pid(P) -> true end(a).",
function_clause,
[{erl_eval,'-inside-an-interpreted-fun-',[a],[]},
{erl_eval,eval_fun,8},
?MODULE]),
backtrace_check("B.",
{unbound_var, 'B'},
[{erl_eval,expr,2}, ?MODULE]),
backtrace_check("B.",
{unbound, 'B'},
[{erl_eval,expr,6}, ?MODULE],
none, none),
backtrace_check("1/0.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("catch 1/0.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
check(fun() -> catch exit(foo) end,
"catch exit(foo).",
{'EXIT', foo}),
check(fun() -> catch throw(foo) end,
"catch throw(foo).",
foo),
backtrace_check("try 1/0 after foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("catch (try 1/0 after foo end).",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("try catch 1/0 after foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_check("try a of b -> bar after foo end.",
{try_clause,a},
[{erl_eval,try_clauses,10}]),
check(fun() -> X = try foo:bar() catch A:B:C -> {A,B} end, X end,
"try foo:bar() catch A:B:C -> {A,B} end.",
{error, undef}),
backtrace_check("C = 4, try foo:bar() catch A:B:C -> {A,B,C} end.",
stacktrace_bound,
[{erl_eval,check_stacktrace_vars,5},
{erl_eval,try_clauses,10}],
none, none),
backtrace_catch("catch (try a of b -> bar after foo end).",
{try_clause,a},
[{erl_eval,try_clauses,10}]),
backtrace_check("try 1/0 catch exit:a -> foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
Es = [{'try',1,[{call,1,{remote,1,{atom,1,foo},{atom,1,bar}},[]}],
[],
[{clause,1,[{tuple,1,[{var,1,'A'},{var,1,'B'},{atom,1,'C'}]}],
[],[{tuple,1,[{var,1,'A'},{var,1,'B'},{atom,1,'C'}]}]}],[]}],
try
erl_eval:exprs(Es, [], none, none),
ct:fail(stacktrace_variable)
catch
error:{illegal_stacktrace_variable,{atom,1,'C'}}:S ->
[{erl_eval,check_stacktrace_vars,5,_},
{erl_eval,try_clauses,10,_}|_] = S
end,
backtrace_check("{1,1} = {A = 1, A = 2}.",
{badmatch, 1},
[erl_eval, {lists,foldl,3}]),
backtrace_check("case a of a when foo:bar() -> x end.",
guard_expr,
[{erl_eval,guard0,4}], none, none),
backtrace_check("case a of foo() -> ok end.",
{illegal_pattern,{call,1,{atom,1,foo},[]}},
[{erl_eval,match,6}], none, none),
backtrace_check("case a of b -> ok end.",
{case_clause,a},
[{erl_eval,case_clauses,8}, ?MODULE]),
backtrace_check("if a =:= b -> ok end.",
if_clause,
[{erl_eval,if_clauses,7}, ?MODULE]),
backtrace_check("fun A(b) -> ok end(a).",
function_clause,
[{erl_eval,'-inside-an-interpreted-fun-',[a],[]},
{erl_eval,eval_named_fun,10},
?MODULE]),
backtrace_check("[A || A <- a].",
{bad_generator, a},
[{erl_eval,eval_generate,8}, {erl_eval, eval_lc, 7}]),
backtrace_check("<< <<A>> || <<A>> <= a>>.",
{bad_generator, a},
[{erl_eval,eval_b_generate,8}, {erl_eval, eval_bc, 7}]),
backtrace_check("[A || A <- [1], begin a end].",
{bad_filter, a},
[{erl_eval,eval_filter,7}, {erl_eval, eval_generate, 8}]),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun(A) -> A end().")),
check_backtrace([{erl_eval,do_apply,6}, ?MODULE], BT)
end(),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun F(A) -> A end().")),
check_backtrace([{erl_eval,do_apply,6}, ?MODULE], BT)
end(),
backtrace_check("foo().",
undef,
[{erl_eval,foo,0},{erl_eval,local_func,8}],
none, none),
backtrace_check("a orelse false.",
{badarg, a},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("a andalso false.",
{badarg, a},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("t = u.",
{badmatch, u},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("{math,sqrt}(2).",
{badfun, {math,sqrt}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("erl_eval_SUITE:simple().",
simple,
[{?MODULE,simple1,0},{?MODULE,simple,0},erl_eval]),
Args = [{integer,1,I} || I <- lists:seq(1, 30)],
backtrace_check("fun(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,"
"19,20,21,22,23,24,25,26,27,28,29,30) -> a end.",
{argument_limit,
{'fun',1,[{clause,1,Args,[],[{atom,1,a}]}]}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("fun F(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,"
"19,20,21,22,23,24,25,26,27,28,29,30) -> a end.",
{argument_limit,
{named_fun,1,'F',[{clause,1,Args,[],[{atom,1,a}]}]}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("#r{}.",
{undef_record,r},
[{erl_eval,expr,6}, ?MODULE],
none, none),
%% eval_bits
backtrace_check("<<100:8/bitstring>>.",
badarg,
[{eval_bits,eval_exp_field,6},
eval_bits,eval_bits,erl_eval]),
backtrace_check("<<100:8/foo>>.",
{undefined_bittype,foo},
[{eval_bits,make_bit_type,4},eval_bits,
eval_bits,eval_bits],
none, none),
backtrace_check("B = <<\"foo\">>, <<B/binary-unit:7>>.",
badarg,
[{eval_bits,eval_exp_field,6},
eval_bits,eval_bits,erl_eval],
none, none),
%% eval_bits with error info
{error_info, #{cause := _, override_segment_position := 1}} =
error_info_catch("<<100:8/bitstring>>.", badarg),
{error_info, #{cause := _, override_segment_position := 2}} =
error_info_catch("<<0:8, 100:8/bitstring>>.", badarg),
ok.
simple() ->
A = simple1(),
{A}.
simple1() ->
%% If the compiler could see that this function would always
%% throw an error exception, it would rewrite simple() like this:
%%
%% simple() -> simple1().
%%
%% That would change the stacktrace. To prevent the compiler from
%% doing that optimization, we must obfuscate the code.
case get(a_key_that_is_not_defined) of
undefined -> erlang:error(simple);
WillNeverHappen -> WillNeverHappen
end.
custom_stacktrace(Config) when is_list(Config) ->
EFH = {value, fun custom_stacktrace_eval_handler/3},
backtrace_check("1 + atom.", badarith,
[{erlang,'+',[1,atom]}, mystack(1)], none, EFH),
backtrace_check("\n1 + atom.", badarith,
[{erlang,'+',[1,atom]}, mystack(2)], none, EFH),
backtrace_check("lists:flatten(atom).", function_clause,
[{lists,flatten,[atom]}, mystack(1)], none, EFH),
backtrace_check("invalid andalso true.", {badarg, invalid},
[mystack(1)], none, EFH),
backtrace_check("invalid orelse true.", {badarg, invalid},
[mystack(1)], none, EFH),
backtrace_check("invalid = valid.", {badmatch, valid},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("1:2.", {badexpr, ':'},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("Unknown.", {unbound, 'Unknown'},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown{}.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown{foo=bar}.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown.index.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("fun foo/2.", undef,
[{erl_eval, foo, 2}, erl_eval, mystack(1)], none, EFH),
backtrace_check("foo(1, 2).", undef,
[{erl_eval, foo, 2}, erl_eval, mystack(1)], none, EFH),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun(A) -> A end().", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun F(A) -> A end().", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
backtrace_check("[X || X <- 1].", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("[X || <<X>> <= 1].", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("<<X || X <- 1>>.", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("<<X || <<X>> <= 1>>.", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("if false -> true end.", if_clause,
[erl_eval, mystack(1)], none, EFH),
backtrace_check("case 0 of 1 -> true end.", {case_clause, 0},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("try 0 of 1 -> true after ok end.", {try_clause, 0},
[mystack(1)], none, EFH),
backtrace_check("fun(0) -> 1 end(1).", function_clause,
[{erl_eval,'-inside-an-interpreted-fun-', [1]}, erl_eval, mystack(1)],
none, EFH),
backtrace_check("fun F(0) -> 1 end(1).", function_clause,
[{erl_eval,'-inside-an-interpreted-fun-', [1]}, erl_eval, mystack(1)],
none, EFH),
fun() ->
{'EXIT', {{illegal_pattern,_}, BT}} =
(catch parse_and_run("make_ref() = 1.", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
%% eval_bits
backtrace_check("<<100:8/bitstring>>.",
badarg,
[{eval_bits,eval_exp_field,6}, mystack(1)],
none, EFH),
backtrace_check("<<100:8/foo>>.",
{undefined_bittype,foo},
[{eval_bits,make_bit_type,4}, mystack(1)],
none, EFH),
backtrace_check("B = <<\"foo\">>, <<B/binary-unit:7>>.",
badarg,
[{eval_bits,eval_exp_field,6}, mystack(1)],
none, EFH),
ok.
mystack(Line) ->
{my_module, my_function, 0, [{file, "evaluator"}, {line, Line}]}.
custom_stacktrace_eval_handler(Ann, FunOrModFun, Args) ->
try
case FunOrModFun of
{Mod, Fun} -> apply(Mod, Fun, Args);
Fun -> apply(Fun, Args)
end
catch
Kind:Reason:Stacktrace ->
%% Take everything up to the evaluation function
Pruned =
lists:takewhile(fun
({erl_eval_SUITE,backtrace_check,5,_}) -> false;
(_) -> true
end, Stacktrace),
%% Now we prune any shared code path from erl_eval
{current_stacktrace, Current} =
erlang:process_info(self(), current_stacktrace),
Reversed = drop_common(lists:reverse(Current), lists:reverse(Pruned)),
Location = [{file, "evaluator"}, {line, erl_anno:line(Ann)}],
Add our file+line information at the bottom
Custom = lists:reverse([{my_module, my_function, 0, Location} | Reversed]),
erlang:raise(Kind, Reason, Custom)
end.
drop_common([H | T1], [H | T2]) -> drop_common(T1, T2);
drop_common([H | T1], T2) -> drop_common(T1, T2);
drop_common([], [{?MODULE, custom_stacktrace_eval_handler, _, _} | T2]) -> T2;
drop_common([], T2) -> T2.
%% Simple cases, just to cover some code.
funs(Config) when is_list(Config) ->
do_funs(none, none),
do_funs(lfh(), none),
do_funs(none, efh()),
do_funs(lfh(), efh()),
do_funs(none, ann_efh()),
do_funs(lfh(), ann_efh()),
error_check("nix:foo().", {access_not_allowed,nix}, lfh(), efh()),
error_check("nix:foo().", {access_not_allowed,nix}, lfh(), ann_efh()),
error_check("bar().", undef, none, none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh_value(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh_value_extra(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], {?MODULE,local_func_value}, none),
%% This is not documented, and only for backward compatibility (good!).
B0 = erl_eval:new_bindings(),
check(fun() -> is_function(?MODULE:count_down_fun()) end,
"begin is_function(count_down_fun()) end.",
true, [], {?MODULE,local_func,[B0]},none),
EF = fun({timer,sleep}, As) when length(As) == 1 -> exit({got_it,sleep});
({M,F}, As) -> apply(M, F, As)
end,
EFH = {value, EF},
error_check("apply(timer, sleep, [1]).", got_it, none, EFH),
error_check("begin F = fun(T) -> timer:sleep(T) end,F(1) end.",
got_it, none, EFH),
AnnEF = fun(1, {timer,sleep}, As) when length(As) == 1 -> exit({got_it,sleep});
(1, {M,F}, As) -> apply(M, F, As)
end,
AnnEFH = {value, AnnEF},
error_check("apply(timer, sleep, [1]).", got_it, none, AnnEFH),
error_check("begin F = fun(T) -> timer:sleep(T) end,F(1) end.",
got_it, none, AnnEFH),
error_check("fun c/1.", undef),
error_check("fun a:b/0().", undef),
MaxArgs = 20,
[true] =
lists:usort([run_many_args(SAs) || SAs <- many_args(MaxArgs)]),
{'EXIT',{{argument_limit,_},_}} =
(catch run_many_args(many_args1(MaxArgs+1))),
check(fun() -> M = lists, F = fun M:reverse/1,
[1,2] = F([2,1]), ok end,
"begin M = lists, F = fun M:reverse/1,"
" [1,2] = F([2,1]), ok end.",
ok),
%% Test that {M,F} is not accepted as a fun.
error_check("{" ?MODULE_STRING ",module_info}().",
{badfun,{?MODULE,module_info}}),
ok.
run_many_args({S, As}) ->
apply(eval_string(S), As) =:= As.
many_args(N) ->
[many_args1(I) || I <- lists:seq(1, N)].
many_args1(N) ->
F = fun(L, P) ->
tl(lists:flatten([","++P++integer_to_list(E) || E <- L]))
end,
L = lists:seq(1, N),
T = F(L, "V"),
S = lists:flatten(io_lib:format("fun(~s) -> [~s] end.", [T, T])),
{S, L}.
do_funs(LFH, EFH) ->
%% LFH is not really used by these examples...
%% These tests do not prove that tail recursive functions really
%% work (that the process does not grow); one should also run them
manually with 1000 replaced by 1000000 .
M = atom_to_list(?MODULE),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
concat(["begin F1 = fun(F,N) -> ", M,
":count_down(F, N) end, F1(F1,1000) end."]),
0, ['F1'], LFH, EFH),
check(fun() -> F1 = fun(F,N) -> apply(?MODULE,count_down,[F,N])
end, F1(F1, 1000) end,
concat(["begin F1 = fun(F,N) -> apply(", M,
",count_down,[F, N]) end, F1(F1,1000) end."]),
0, ['F1'], LFH, EFH),
check(fun() -> F = fun(F,N) when N > 0 -> apply(F,[F,N-1]);
(_F,0) -> ok end,
F(F, 1000)
end,
"begin F = fun(F,N) when N > 0 -> apply(F,[F,N-1]);"
"(_F,0) -> ok end,"
"F(F, 1000) end.",
ok, ['F'], LFH, EFH),
check(fun() -> F = fun(F,N) when N > 0 ->
apply(erlang,apply,[F,[F,N-1]]);
(_F,0) -> ok end,
F(F, 1000)
end,
"begin F = fun(F,N) when N > 0 ->"
"apply(erlang,apply,[F,[F,N-1]]);"
"(_F,0) -> ok end,"
"F(F, 1000) end.",
ok, ['F'], LFH, EFH),
check(fun() -> F = count_down_fun(),
SF = fun(SF, F1, N) -> F(SF, F1, N) end,
SF(SF, F, 1000) end,
concat(["begin F = ", M, ":count_down_fun(),"
"SF = fun(SF, F1, N) -> F(SF, F1, N) end,"
"SF(SF, F, 1000) end."]),
ok, ['F','SF'], LFH, EFH),
check(fun() -> F = fun(X) -> A = 1+X, {X,A} end,
true = {2,3} == F(2) end,
"begin F = fun(X) -> A = 1+X, {X,A} end,
true = {2,3} == F(2) end.", true, ['F'], LFH, EFH),
check(fun() -> F = fun(X) -> erlang:'+'(X,2) end,
true = 3 == F(1) end,
"begin F = fun(X) -> erlang:'+'(X,2) end,"
" true = 3 == F(1) end.", true, ['F'],
LFH, EFH),
check(fun() -> F = fun(X) -> byte_size(X) end,
?MODULE:do_apply(F,<<"hej">>) end,
concat(["begin F = fun(X) -> size(X) end,",
M,":do_apply(F,<<\"hej\">>) end."]),
3, ['F'], LFH, EFH),
check(fun() -> F1 = fun(X, Z) -> {X,Z} end,
Z = 5,
F2 = fun(X, Y) -> F1(Z,{X,Y}) end,
F3 = fun(X, Y) -> {a,F1(Z,{X,Y})} end,
{5,{x,y}} = F2(x,y),
{a,{5,{y,x}}} = F3(y,x),
{5,{5,y}} = F2(Z,y),
true = {5,{x,5}} == F2(x,Z) end,
"begin F1 = fun(X, Z) -> {X,Z} end,
Z = 5,
F2 = fun(X, Y) -> F1(Z,{X,Y}) end,
F3 = fun(X, Y) -> {a,F1(Z,{X,Y})} end,
{5,{x,y}} = F2(x,y),
{a,{5,{y,x}}} = F3(y,x),
{5,{5,y}} = F2(Z,y),
true = {5,{x,5}} == F2(x,Z) end.",
true, ['F1','Z','F2','F3'], LFH, EFH),
check(fun() -> F = fun(X) -> byte_size(X) end,
F2 = fun(Y) -> F(Y) end,
?MODULE:do_apply(F2,<<"hej">>) end,
concat(["begin F = fun(X) -> size(X) end,",
"F2 = fun(Y) -> F(Y) end,",
M,":do_apply(F2,<<\"hej\">>) end."]),
3, ['F','F2'], LFH, EFH),
check(fun() -> Z = 5, F = fun(X) -> {Z,X} end,
F2 = fun(Z) -> F(Z) end, F2(3) end,
"begin Z = 5, F = fun(X) -> {Z,X} end,
F2 = fun(Z) -> F(Z) end, F2(3) end.",
{5,3},['F','F2','Z'], LFH, EFH),
check(fun() -> F = fun(Z) -> Z end,
F2 = fun(X) -> F(X), Z = {X,X}, Z end,
{1,1} = F2(1), Z = 7, Z end,
"begin F = fun(Z) -> Z end,
F2 = fun(X) -> F(X), Z = {X,X}, Z end,
{1,1} = F2(1), Z = 7, Z end.", 7, ['F','F2','Z'],
LFH, EFH),
check(fun() -> F = fun(F, N) -> [?MODULE:count_down(F,N) || X <-[1]]
end, F(F,2) end,
concat(["begin F = fun(F, N) -> [", M,
":count_down(F,N) || X <-[1]] end, F(F,2) end."]),
[[[0]]], ['F'], LFH, EFH),
ok.
count_down(F, N) when N > 0 ->
F(F, N-1);
count_down(_F, N) ->
N.
count_down_fun() ->
fun(SF,F,N) when N > 0 -> SF(SF,F,N-1);
(_SF,_F,_N) -> ok
end.
do_apply(F, V) ->
F(V).
lfh() ->
{eval, fun(F, As, Bs) -> local_func(F, As, Bs) end}.
local_func(F, As0, Bs0) when is_atom(F) ->
{As,Bs} = erl_eval:expr_list(As0, Bs0, lfh()),
case erlang:function_exported(?MODULE, F, length(As)) of
true ->
{value,apply(?MODULE, F, As),Bs};
false ->
{value,apply(shell_default, F, As),Bs}
end.
lfh_value_extra() ->
%% Not documented.
{value, fun(F, As, a1, a2) -> local_func_value(F, As) end, [a1, a2]}.
lfh_value() ->
{value, fun(F, As) -> local_func_value(F, As) end}.
local_func_value(F, As) when is_atom(F) ->
case erlang:function_exported(?MODULE, F, length(As)) of
true ->
apply(?MODULE, F, As);
false ->
apply(shell_default, F, As)
end.
efh() ->
{value, fun(F, As) -> external_func(F, As) end}.
ann_efh() ->
{value, fun(_Ann, F, As) -> external_func(F, As) end}.
external_func({M,_}, _As) when M == nix ->
exit({{access_not_allowed,M},[mfa]});
external_func(F, As) when is_function(F) ->
apply(F, As);
external_func({M,F}, As) ->
apply(M, F, As).
%% Test try-of-catch-after-end statement.
try_catch(Config) when is_list(Config) ->
%% Match in of with catch
check(fun() -> try 1 of 1 -> 2 catch _:_ -> 3 end end,
"try 1 of 1 -> 2 catch _:_ -> 3 end.", 2),
check(fun() -> try 1 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end end,
"try 1 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end.", 2),
check(fun() -> try 3 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end end,
"try 3 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end.", 4),
%% Just after
check(fun () -> X = try 1 after put(try_catch, 2) end,
{X,get(try_catch)} end,
"begin X = try 1 after put(try_catch, 2) end, "
"{X,get(try_catch)} end.", {1,2}),
%% Match in of with after
check(fun() -> X = try 1 of 1 -> 2 after put(try_catch, 3) end,
{X,get(try_catch)} end,
"begin X = try 1 of 1 -> 2 after put(try_catch, 3) end, "
"{X,get(try_catch)} end.", {2,3}),
check(fun() -> X = try 1 of 1 -> 2; 3 -> 4
after put(try_catch, 5) end,
{X,get(try_catch)} end,
"begin X = try 1 of 1 -> 2; 3 -> 4 "
" after put(try_catch, 5) end, "
" {X,get(try_catch)} end.", {2,5}),
check(fun() -> X = try 3 of 1 -> 2; 3 -> 4
after put(try_catch, 5) end,
{X,get(try_catch)} end,
"begin X = try 3 of 1 -> 2; 3 -> 4 "
" after put(try_catch, 5) end, "
" {X,get(try_catch)} end.", {4,5}),
%% Nomatch in of
error_check("try 1 of 2 -> 3 catch _:_ -> 4 end.",
{try_clause,1}),
%% Nomatch in of with after
check(fun () -> {'EXIT',{{try_clause,1},_}} =
begin catch try 1 of 2 -> 3
after put(try_catch, 4) end end,
get(try_catch) end,
"begin {'EXIT',{{try_clause,1},_}} = "
" begin catch try 1 of 2 -> 3 "
" after put(try_catch, 4) end end, "
" get(try_catch) end. ", 4),
%% Exception in try
check(fun () -> try 1=2 catch error:{badmatch,2} -> 3 end end,
"try 1=2 catch error:{badmatch,2} -> 3 end.", 3),
check(fun () -> try 1=2 of 3 -> 4
catch error:{badmatch,2} -> 5 end end,
"try 1=2 of 3 -> 4 "
"catch error:{badmatch,2} -> 5 end.", 5),
%% Exception in try with after
check(fun () -> X = try 1=2
catch error:{badmatch,2} -> 3
after put(try_catch, 4) end,
{X,get(try_catch)} end,
"begin X = try 1=2 "
" catch error:{badmatch,2} -> 3 "
" after put(try_catch, 4) end, "
" {X,get(try_catch)} end. ", {3,4}),
check(fun () -> X = try 1=2 of 3 -> 4
catch error:{badmatch,2} -> 5
after put(try_catch, 6) end,
{X,get(try_catch)} end,
"begin X = try 1=2 of 3 -> 4"
" catch error:{badmatch,2} -> 5 "
" after put(try_catch, 6) end, "
" {X,get(try_catch)} end. ", {5,6}),
%% Uncaught exception
error_check("try 1=2 catch error:undefined -> 3 end. ",
{badmatch,2}),
error_check("try 1=2 of 3 -> 4 catch error:undefined -> 5 end. ",
{badmatch,2}),
%% Uncaught exception with after
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2
after put(try_catch, 3) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 "
" after put(try_catch, 3) end end, "
" get(try_catch) end. ", 3),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 of 3 -> 4
after put(try_catch, 5) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 of 3 -> 4"
" after put(try_catch, 5) end end, "
" get(try_catch) end. ", 5),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 catch error:undefined -> 3
after put(try_catch, 4) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 catch error:undefined -> 3 "
" after put(try_catch, 4) end end, "
" get(try_catch) end. ", 4),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 of 3 -> 4
catch error:undefined -> 5
after put(try_catch, 6) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 of 3 -> 4 "
" catch error:undefined -> 5 "
" after put(try_catch, 6) end end, "
" get(try_catch) end. ", 6),
ok.
%% OTP-7933.
eval_expr_5(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if a+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
{value, no, []} = erl_eval:expr(Expr, [], none, none, none),
no = erl_eval:expr(Expr, [], none, none, value),
try
erl_eval:expr(Expr, [], none, none, 4711),
function_clause = should_never_reach_here
catch
error:function_clause ->
ok
end.
zero_width(Config) when is_list(Config) ->
check(fun() ->
{'EXIT',{badarg,_}} = (catch <<not_a_number:0>>),
ok
end, "begin {'EXIT',{badarg,_}} = (catch <<not_a_number:0>>), "
"ok end.", ok),
ok.
eep37(Config) when is_list(Config) ->
check(fun () -> (fun _(X) -> X end)(42) end,
"(fun _(X) -> X end)(42).",
42),
check(fun () -> (fun _Id(X) -> X end)(42) end,
"(fun _Id(X) -> X end)(42).", 42),
check(fun () -> is_function((fun Self() -> Self end)(), 0) end,
"is_function((fun Self() -> Self end)(), 0).",
true),
check(fun () ->
F = fun Fact(N) when N > 0 ->
N * Fact(N - 1);
Fact(0) ->
1
end,
F(6)
end,
"(fun Fact(N) when N > 0 -> N * Fact(N - 1); Fact(0) -> 1 end)(6).",
720),
ok.
eep43(Config) when is_list(Config) ->
check(fun () -> #{} end, " #{}.", #{}),
check(fun () -> #{a => b} end, "#{a => b}.", #{a => b}),
check(fun () ->
Map = #{a => b},
{Map#{a := b},Map#{a => c},Map#{d => e}}
end,
"begin "
" Map = #{a => B=b}, "
" {Map#{a := B},Map#{a => c},Map#{d => e}} "
"end.",
{#{a => b},#{a => c},#{a => b,d => e}}),
check(fun () ->
lists:map(fun (X) -> X#{price := 0} end,
[#{hello => 0, price => nil}])
end,
"lists:map(fun (X) -> X#{price := 0} end,
[#{hello => 0, price => nil}]).",
[#{hello => 0, price => 0}]),
check(fun () ->
Map = #{ <<33:333>> => "wat" },
#{ <<33:333>> := "wat" } = Map
end,
"begin "
" Map = #{ <<33:333>> => \"wat\" }, "
" #{ <<33:333>> := \"wat\" } = Map "
"end.",
#{ <<33:333>> => "wat" }),
check(fun () ->
K1 = 1,
K2 = <<42:301>>,
K3 = {3,K2},
Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4},
#{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map
end,
"begin "
" K1 = 1, "
" K2 = <<42:301>>, "
" K3 = {3,K2}, "
" Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4}, "
" #{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map "
"end.",
#{ 1 => 1, <<42:301>> => 2, {3,<<42:301>>} => 3, {2,2} => 4}),
check(fun () ->
X = key,
(fun(#{X := value}) -> true end)(#{X => value})
end,
"begin "
" X = key, "
" (fun(#{X := value}) -> true end)(#{X => value}) "
"end.",
true),
error_check("[camembert]#{}.", {badmap,[camembert]}),
error_check("[camembert]#{nonexisting:=v}.", {badmap,[camembert]}),
error_check("#{} = 1.", {badmatch,1}),
error_check("[]#{a=>error(bad)}.", bad),
error_check("(#{})#{nonexisting:=value}.", {badkey,nonexisting}),
ok.
otp_15035(Config) when is_list(Config) ->
check(fun() ->
fun() when #{} ->
a;
() when #{a => b} ->
b;
() when #{a => b} =:= #{a => b} ->
c
end()
end,
"fun() when #{} ->
a;
() when #{a => b} ->
b;
() when #{a => b} =:= #{a => b} ->
c
end().",
c),
check(fun() ->
F = fun(M) when M#{} ->
a;
(M) when M#{a => b} ->
b;
(M) when M#{a := b} ->
c;
(M) when M#{a := b} =:= M#{a := b} ->
d;
(M) when M#{a => b} =:= M#{a => b} ->
e
end,
{F(#{}), F(#{a => b})}
end,
"fun() ->
F = fun(M) when M#{} ->
a;
(M) when M#{a => b} ->
b;
(M) when M#{a := b} ->
c;
(M) when M#{a := b} =:= M#{a := b} ->
d;
(M) when M#{a => b} =:= M#{a => b} ->
e
end,
{F(#{}), F(#{a => b})}
end().",
{e, d}),
ok.
otp_16439(Config) when is_list(Config) ->
check(fun() -> + - 5 end, "+ - 5.", -5),
check(fun() -> - + - 5 end, "- + - 5.", 5),
check(fun() -> case 7 of - - 7 -> seven end end,
"case 7 of - - 7 -> seven end.", seven),
{ok,Ts,_} = erl_scan:string("- #{}. "),
A = erl_anno:new(1),
{ok,[{op,A,'-',{map,A,[]}}]} = erl_parse:parse_exprs(Ts),
ok.
%% Test guard expressions in keys for maps and in sizes in binary matching.
otp_14708(Config) when is_list(Config) ->
check(fun() -> X = 42, #{{tag,X} := V} = #{{tag,X} => a}, V end,
"begin X = 42, #{{tag,X} := V} = #{{tag,X} => a}, V end.",
a),
check(fun() ->
T = {x,y,z},
Map = #{x => 99, y => 100},
#{element(1, T) := V1, element(2, T) := V2} = Map,
{V1, V2}
end,
"begin
T = {x,y,z},
Map = #{x => 99, y => 100},
#{element(1, T) := V1, element(2, T) := V2} = Map,
{V1, V2}
end.",
{99, 100}),
error_check("#{term_to_binary(42) := _} = #{}.", illegal_guard_expr),
check(fun() ->
<<Sz:16,Body:(Sz-1)/binary>> = <<4:16,1,2,3>>,
Body
end,
"begin
<<Sz:16,Body:(Sz-1)/binary>> = <<4:16,1,2,3>>,
Body
end.",
<<1,2,3>>),
check(fun() ->
Sizes = #{0 => 3, 1 => 7},
<<SzTag:1,Body:(map_get(SzTag, Sizes))/binary>> =
<<1:1,1,2,3,4,5,6,7>>,
Body
end,
"begin
Sizes = #{0 => 3, 1 => 7},
<<SzTag:1,Body:(map_get(SzTag, Sizes))/binary>> =
<<1:1,1,2,3,4,5,6,7>>,
Body
end.",
<<1,2,3,4,5,6,7>>),
error_check("<<X:(process_info(self()))>> = <<>>.", illegal_bitsize),
ok.
otp_16545(Config) when is_list(Config) ->
case eval_string("<<$W/utf16-native>> = <<$W/utf16-native>>.") of
<<$W/utf16-native>> -> ok
end,
case eval_string("<<$W/utf32-native>> = <<$W/utf32-native>>.") of
<<$W/utf32-native>> -> ok
end,
check(fun() -> <<10/unsigned,"fgbz":86>> end,
"<<10/unsigned,\"fgbz\":86>>.",
<<10,0,0,0,0,0,0,0,0,0,1,152,0,0,0,0,0,0,0,0,0,6,112,0,0,
0,0,0,0,0,0,0,24,128,0,0,0,0,0,0,0,0,0,122>>),
check(fun() -> <<"":16/signed>> end,
"<<\"\":16/signed>>.",
<<>>),
error_check("<<\"\":problem/signed>>.", badarg),
ok.
otp_16865(Config) when is_list(Config) ->
check(fun() -> << <<>> || <<34:(1/0)>> <= <<"string">> >> end,
"<< <<>> || <<34:(1/0)>> <= <<\"string\">> >>.",
<<>>),
%% The order of evaluation is important. Follow the example set by
%% compiled code:
error_check("<< <<>> || <<>> <= <<1:(-1), (fun() -> a = b end())>> >>.",
{badmatch, b}),
ok.
eep49(Config) when is_list(Config) ->
check(fun() ->
maybe empty end
end,
"maybe empty end.",
empty),
check(fun() ->
maybe ok ?= ok end
end,
"maybe ok ?= ok end.",
ok),
check(fun() ->
maybe {ok,A} ?= {ok,good}, A end
end,
"maybe {ok,A} ?= {ok,good}, A end.",
good),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} end.",
{good,also_good}),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,wrong}, {A,B} end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,wrong}, {A,B} end.",
{error,wrong}),
%% Test maybe ... else ... end.
check(fun() ->
maybe empty else _ -> error end
end,
"maybe empty else _ -> error end.",
empty),
check(fun() ->
maybe ok ?= ok else _ -> error end
end,
"maybe ok ?= ok else _ -> error end.",
ok),
check(fun() ->
maybe ok ?= other else _ -> error end
end,
"maybe ok ?= other else _ -> error end.",
error),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B}
else {error,_} -> error end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} "
"else {error,_} -> error end.",
{good,also_good}),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,other}, {A,B}
else {error,_} -> error end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,other}, {A,B} "
"else {error,_} -> error end.",
error),
error_check("maybe ok ?= simply_wrong else {error,_} -> error end.",
{else_clause,simply_wrong}),
ok.
%% GH-6348/OTP-18297: Lift restrictions for matching of binaries and maps.
binary_and_map_aliases(Config) when is_list(Config) ->
check(fun() ->
<<A:16>> = <<B:8,C:8>> = <<16#cafe:16>>,
{A,B,C}
end,
"begin <<A:16>> = <<B:8,C:8>> = <<16#cafe:16>>, {A,B,C} end.",
{16#cafe,16#ca,16#fe}),
check(fun() ->
<<A:8/bits,B:24/bits>> =
<<C:16,D:16>> =
<<E:8,F:8,G:8,H:8>> =
<<16#abcdef57:32>>,
{A,B,C,D,E,F,G,H}
end,
"begin <<A:8/bits,B:24/bits>> =
<<C:16,D:16>> =
<<E:8,F:8,G:8,H:8>> =
<<16#abcdef57:32>>,
{A,B,C,D,E,F,G,H}
end.",
{<<16#ab>>,<<16#cdef57:24>>, 16#abcd,16#ef57, 16#ab,16#cd,16#ef,16#57}),
check(fun() ->
#{K := V} = #{k := K} = #{k => my_key, my_key => 42},
V
end,
"begin #{K := V} = #{k := K} = #{k => my_key, my_key => 42}, V end.",
42),
ok.
EEP 58 : Map comprehensions .
eep58(Config) when is_list(Config) ->
check(fun() -> X = 32, #{X => X*X || X <- [1,2,3]} end,
"begin X = 32, #{X => X*X || X <- [1,2,3]} end.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() ->
K = V = none,
#{K => V*V || K := V <- #{1 => 1, 2 => 2, 3 => 3}}
end,
"begin K = V = none, #{K => V*V || K := V <- #{1 => 1, 2 => 2, 3 => 3}} end.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() ->
#{K => V*V || K := V <- maps:iterator(#{1 => 1, 2 => 2, 3 => 3})}
end,
"#{K => V*V || K := V <- maps:iterator(#{1 => 1, 2 => 2, 3 => 3})}.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() -> << <<K:8,V:24>> || K := V <- #{42 => 7777} >> end,
"<< <<K:8,V:24>> || K := V <- #{42 => 7777} >>.",
<<42:8,7777:24>>),
check(fun() -> [X || X := X <- #{a => 1, b => b}] end,
"[X || X := X <- #{a => 1, b => b}].",
[b]),
error_check("[K+V || K := V <- a].", {bad_generator,a}),
error_check("[K+V || K := V <- [-1|#{}]].", {bad_generator,[-1|#{}]}),
ok.
%% Check the string in different contexts: as is; in fun; from compiled code.
check(F, String, Result) ->
check1(F, String, Result),
FunString = concat(["fun() -> ", no_final_dot(String), " end(). "]),
check1(F, FunString, Result),
CompileString = concat(["hd(lists:map(fun(_) -> ", no_final_dot(String),
" end, [foo])). "]),
check1(F, CompileString, Result).
check1(F, String, Result) ->
Result = F(),
Expr = parse_expr(String),
case catch erl_eval:expr(Expr, []) of
{value, Result, Bs} when is_list(Bs) ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:expr(Expr, #{}) of
{value, Result, MapBs} when is_map(MapBs) ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
check(F, String, Result, BoundVars, LFH, EFH) ->
Result = F(),
Exprs = parse_exprs(String),
case catch erl_eval:exprs(Exprs, [], LFH, EFH) of
{value, Result, Bs} ->
We just assume that Bs is an orddict ...
Keys = orddict:fetch_keys(Bs),
case sort(BoundVars) == sort(Keys) of
true ->
ok;
false ->
ct:fail({check, BoundVars, Keys})
end,
ok;
Other1 ->
ct:fail({check, Other1, Result})
end,
case catch erl_eval:exprs(Exprs, #{}, LFH, EFH) of
{value, Result, MapBs} ->
MapKeys = maps:keys(MapBs),
case sort(BoundVars) == sort(MapKeys) of
true ->
ok;
false ->
ct:fail({check, BoundVars, MapKeys})
end,
ok;
Other2 ->
ct:fail({check, Other2, Result})
end.
error_check(String, Result) ->
Expr = parse_expr(String),
case catch erl_eval:expr(Expr, []) of
{'EXIT', {Result,_}} ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:expr(Expr, #{}) of
{'EXIT', {Result,_}} ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
error_check(String, Result, LFH, EFH) ->
Exprs = parse_exprs(String),
case catch erl_eval:exprs(Exprs, [], LFH, EFH) of
{'EXIT', {Result,_}} ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:exprs(Exprs, #{}, LFH, EFH) of
{'EXIT', {Result,_}} ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
backtrace_check(String, Result, Backtrace) ->
case catch parse_and_run(String) of
{'EXIT', {Result, BT}} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
backtrace_check(String, Result, Backtrace, LFH, EFH) ->
case catch parse_and_run(String, LFH, EFH) of
{'EXIT', {Result, BT}} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
remove_error_info([{M, F, As, Info} | T]) ->
[{M, F, As, lists:keydelete(error_info, 1, Info)} | T].
backtrace_catch(String, Result, Backtrace) ->
case parse_and_run(String) of
{value, {'EXIT', {Result, BT}}, _Bindings} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
error_info_catch(String, Result) ->
case catch parse_and_run(String) of
{'EXIT', {Result, [{_, _, _, Info}|_]}} ->
lists:keyfind(error_info, 1, Info);
Other ->
ct:fail({eval, Other, Result})
end.
check_backtrace([B1|Backtrace], [B2|BT]) ->
case {B1, B2} of
{M, {M,_,_,_}} ->
ok;
{{M,F,A}, {M,F,A,_}} ->
ok;
{B, B} ->
ok
end,
check_backtrace(Backtrace, BT);
check_backtrace([], _) ->
ok.
eval_string(String) ->
{value, Result, _} = parse_and_run(String),
Result.
parse_expr(String) ->
Tokens = erl_scan_string(String),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
Expr.
parse_exprs(String) ->
Tokens = erl_scan_string(String),
{ok, Exprs} = erl_parse:parse_exprs(Tokens),
Exprs.
erl_scan_string(String) ->
FIXME : When the experimental features EEP has been implemented , we should
%% dig out all keywords defined in all features.
ResWordFun =
fun('maybe') -> true;
('else') -> true;
(Other) -> erl_scan:reserved_word(Other)
end,
{ok,Tokens,_} = erl_scan:string(String, 1, [{reserved_word_fun,ResWordFun}]),
Tokens.
parse_and_run(String) ->
erl_eval:expr(parse_expr(String), []).
parse_and_run(String, LFH, EFH) ->
erl_eval:exprs(parse_exprs(String), [], LFH, EFH).
no_final_dot(S) ->
case lists:reverse(S) of
" ." ++ R -> lists:reverse(R);
"." ++ R -> lists:reverse(R);
_ -> S
end.
| null | https://raw.githubusercontent.com/erlang/otp/2b397d7e5580480dc32fa9751db95f4b89ff029e/lib/stdlib/test/erl_eval_SUITE.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Define to run outside of test server
-define(STANDALONE,1).
OTP-2405
Similar to guard_1, but type-correct.
OTP-3069: syntactic sugar string ++ ...
OTP-2983: match operator in pattern.
Binary match problems.
OTP-3144: compile-time expressions in pattern.
OTP-4518.
OTP-4885.
Guards with erlang:'=='/2.
OTP-4518.
"binsize variable" ^
Simple cases, just to cover some code.
OTP-4929. Unary plus rejects non-numbers.
OTP-5064. Can no longer apply atoms.
OTP-5269. Bugs in the bit syntax.
OTP-6539. try/catch bugs.
Test the Value part of a binary segment.
"Old" bugs have been fixed (partial_eval is called on Value).
OTP-6787. bitlevel binaries.
OTP-6977. ++ bug.
UTF-16.
UTF-32.
Mixed.
OTP-10622. Bugs.
The same bug as last example:
OTP-14826: more accurate stacktrace.
eval_bits
eval_bits with error info
If the compiler could see that this function would always
throw an error exception, it would rewrite simple() like this:
simple() -> simple1().
That would change the stacktrace. To prevent the compiler from
doing that optimization, we must obfuscate the code.
eval_bits
Take everything up to the evaluation function
Now we prune any shared code path from erl_eval
Simple cases, just to cover some code.
This is not documented, and only for backward compatibility (good!).
Test that {M,F} is not accepted as a fun.
LFH is not really used by these examples...
These tests do not prove that tail recursive functions really
work (that the process does not grow); one should also run them
Not documented.
Test try-of-catch-after-end statement.
Match in of with catch
Just after
Match in of with after
Nomatch in of
Nomatch in of with after
Exception in try
Exception in try with after
Uncaught exception
Uncaught exception with after
OTP-7933.
Test guard expressions in keys for maps and in sizes in binary matching.
The order of evaluation is important. Follow the example set by
compiled code:
Test maybe ... else ... end.
GH-6348/OTP-18297: Lift restrictions for matching of binaries and maps.
Check the string in different contexts: as is; in fun; from compiled code.
dig out all keywords defined in all features. | Copyright Ericsson AB 1998 - 2023 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(erl_eval_SUITE).
-feature(maybe_expr, enable).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
-export([guard_1/1, guard_2/1,
match_pattern/1,
match_bin/1,
string_plusplus/1,
pattern_expr/1,
guard_3/1, guard_4/1, guard_5/1,
lc/1,
simple_cases/1,
unary_plus/1,
apply_atom/1,
otp_5269/1,
otp_6539/1,
otp_6543/1,
otp_6787/1,
otp_6977/1,
otp_7550/1,
otp_8133/1,
otp_10622/1,
otp_13228/1,
otp_14826/1,
funs/1,
custom_stacktrace/1,
try_catch/1,
eval_expr_5/1,
zero_width/1,
eep37/1,
eep43/1,
otp_15035/1,
otp_16439/1,
otp_14708/1,
otp_16545/1,
otp_16865/1,
eep49/1,
binary_and_map_aliases/1,
eep58/1]).
-import(lists,[concat/1, sort/1]).
-export([count_down/2, count_down_fun/0, do_apply/2,
local_func/3, local_func_value/2]).
-export([simple/0]).
-ifdef(STANDALONE).
-define(config(A,B),config(A,B)).
-export([config/2]).
-define(line, noop, ).
config(priv_dir,_) ->
".".
-else.
-include_lib("common_test/include/ct.hrl").
-endif.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,1}}].
all() ->
[guard_1, guard_2, match_pattern, string_plusplus,
pattern_expr, match_bin, guard_3, guard_4, guard_5, lc,
simple_cases, unary_plus, apply_atom, otp_5269,
otp_6539, otp_6543, otp_6787, otp_6977, otp_7550,
otp_8133, otp_10622, otp_13228, otp_14826,
funs, custom_stacktrace, try_catch, eval_expr_5, zero_width,
eep37, eep43, otp_15035, otp_16439, otp_14708, otp_16545, otp_16865,
eep49, binary_and_map_aliases, eep58].
groups() ->
[].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
guard_1(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if a+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
no = guard_1_compiled(),
{value, no, []} = erl_eval:expr(Expr, []),
ok.
guard_1_compiled() ->
if a+4 == 4 -> yes; true -> no end.
guard_2(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if 6+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
no = guard_2_compiled(),
{value, no, []} = erl_eval:expr(Expr, []),
ok.
guard_2_compiled() ->
if 6+4 == 4 -> yes; true -> no end.
string_plusplus(Config) when is_list(Config) ->
check(fun() -> case "abc" of "ab" ++ L -> L end end,
"case \"abc\" of \"ab\" ++ L -> L end. ",
"c"),
check(fun() -> case "abcde" of "ab" ++ "cd" ++ L -> L end end,
"case \"abcde\" of \"ab\" ++ \"cd\" ++ L -> L end. ",
"e"),
check(fun() -> case "abc" of [97, 98] ++ L -> L end end,
"case \"abc\" of [97, 98] ++ L -> L end. ",
"c"),
ok.
match_pattern(Config) when is_list(Config) ->
check(fun() -> case {a, b} of {a, _X}=Y -> {x,Y} end end,
"case {a, b} of {a, X}=Y -> {x,Y} end. ",
{x, {a, b}}),
check(fun() -> case {a, b} of Y={a, _X} -> {x,Y} end end,
"case {a, b} of Y={a, X} -> {x,Y} end. ",
{x, {a, b}}),
check(fun() -> case {a, b} of Y={a, _X}=Z -> {Z,Y} end end,
"case {a, b} of Y={a, X}=Z -> {Z,Y} end. ",
{{a, b}, {a, b}}),
check(fun() -> A = 4, B = 28, <<13:(A+(X=B))>>, X end,
"begin A = 4, B = 28, <<13:(A+(X=B))>>, X end.",
28),
ok.
match_bin(Config) when is_list(Config) ->
check(fun() -> <<"abc">> = <<"abc">> end,
"<<\"abc\">> = <<\"abc\">>. ",
<<"abc">>),
check(fun() ->
<<Size,B:Size/binary,Rest/binary>> = <<2,"AB","CD">>,
{Size,B,Rest}
end,
"begin <<Size,B:Size/binary,Rest/binary>> = <<2,\"AB\",\"CD\">>, "
"{Size,B,Rest} end. ",
{2,<<"AB">>,<<"CD">>}),
ok.
pattern_expr(Config) when is_list(Config) ->
check(fun() -> case 4 of 2+2 -> ok end end,
"case 4 of 2+2 -> ok end. ",
ok),
check(fun() -> case 2 of +2 -> ok end end,
"case 2 of +2 -> ok end. ",
ok),
ok.
guard_3(Config) when is_list(Config) ->
check(fun() -> if false -> false; true -> true end end,
"if false -> false; true -> true end.",
true),
check(fun() -> if <<"hej">> == <<"hopp">> -> true;
true -> false end end,
"begin if <<\"hej\">> == <<\"hopp\">> -> true;
true -> false end end.",
false),
check(fun() -> if <<"hej">> == <<"hej">> -> true;
true -> false end end,
"begin if <<\"hej\">> == <<\"hej\">> -> true;
true -> false end end.",
true),
ok.
guard_4(Config) when is_list(Config) ->
check(fun() -> if erlang:'+'(3,a) -> true ; true -> false end end,
"if erlang:'+'(3,a) -> true ; true -> false end.",
false),
check(fun() -> if erlang:is_integer(3) -> true ; true -> false end
end,
"if erlang:is_integer(3) -> true ; true -> false end.",
true),
check(fun() -> [X || X <- [1,2,3], erlang:is_integer(X)] end,
"[X || X <- [1,2,3], erlang:is_integer(X)].",
[1,2,3]),
check(fun() -> if is_atom(is_integer(a)) -> true ; true -> false end
end,
"if is_atom(is_integer(a)) -> true ; true -> false end.",
true),
check(fun() -> if erlang:is_atom(erlang:is_integer(a)) -> true;
true -> false end end,
"if erlang:is_atom(erlang:is_integer(a)) -> true; "
"true -> false end.",
true),
check(fun() -> if is_atom(3+a) -> true ; true -> false end end,
"if is_atom(3+a) -> true ; true -> false end.",
false),
check(fun() -> if erlang:is_atom(3+a) -> true ; true -> false end
end,
"if erlang:is_atom(3+a) -> true ; true -> false end.",
false),
ok.
guard_5(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("case 1 of A when erlang:'=='(A, 1) -> true end."),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
true = guard_5_compiled(),
{value, true, [{'A',1}]} = erl_eval:expr(Expr, []),
ok.
guard_5_compiled() ->
case 1 of A when erlang:'=='(A, 1) -> true end.
lc(Config) when is_list(Config) ->
check(fun() -> X = 32, [X || X <- [1,2,3]] end,
"begin X = 32, [X || X <- [1,2,3]] end.",
[1,2,3]),
check(fun() -> X = 32,
[X || <<X:X>> <- [<<1:32>>,<<2:32>>,<<3:8>>]] end,
"begin X = 32,
[X || <<X:X>> <- [<<1:32>>,<<2:32>>,<<3:8>>]] end.",
[1,2]),
check(fun() -> Y = 13,[X || {X,Y} <- [{1,2}]] end,
"begin Y = 13,[X || {X,Y} <- [{1,2}]] end.",
[1]),
error_check("begin [A || X <- [{1,2}], 1 == A] end.",
{unbound_var,'A'}),
error_check("begin X = 32,
[{Y,W} || X <- [1,2,32,Y=4], Z <- [1,2,W=3]] end.",
{unbound_var,'Y'}),
error_check("begin X = 32,<<A:B>> = <<100:X>> end.",
{unbound_var,'B'}),
check(fun() -> [X || X <- [1,2,3,4], not (X < 2)] end,
"begin [X || X <- [1,2,3,4], not (X < 2)] end.",
[2,3,4]),
check(fun() -> [X || X <- [true,false], X] end,
"[X || X <- [true,false], X].", [true]),
ok.
simple_cases(Config) when is_list(Config) ->
check(fun() -> A = $C end, "A = $C.", $C),
check(fun ( ) - > A = 3.14 end , " A = 3.14 . " , 3.14 ) ,
check(fun() -> self() ! a, A = receive a -> true end end,
"begin self() ! a, A = receive a -> true end end.",
true),
check(fun() -> c:flush(), self() ! a, self() ! b, self() ! c,
receive b -> b end,
{messages, [a,c]} =
erlang:process_info(self(), messages),
c:flush() end,
"begin c:flush(), self() ! a, self() ! b, self() ! c,"
"receive b -> b end,"
"{messages, [a,c]} ="
" erlang:process_info(self(), messages), c:flush() end.",
ok),
check(fun() -> self() ! a, A = receive a -> true
after 0 -> false end end,
"begin self() ! a, A = receive a -> true"
" after 0 -> false end end.",
true),
check(fun() -> c:flush(), self() ! a, self() ! b, self() ! c,
receive b -> b after 0 -> true end,
{messages, [a,c]} =
erlang:process_info(self(), messages),
c:flush() end,
"begin c:flush(), self() ! a, self() ! b, self() ! c,"
"receive b -> b after 0 -> true end,"
"{messages, [a,c]} ="
" erlang:process_info(self(), messages), c:flush() end.",
ok),
check(fun() -> receive _ -> true after 10 -> false end end,
"receive _ -> true after 10 -> false end.",
false),
check(fun() -> F = fun(A) -> A end, true = 3 == F(3) end,
"begin F = fun(A) -> A end, true = 3 == F(3) end.",
true),
check(fun() -> F = fun(A) -> A end, true = 3 == apply(F, [3]) end,
"begin F = fun(A) -> A end, true = 3 == apply(F,[3]) end.",
true),
check(fun() -> catch throw(a) end, "catch throw(a).", a),
check(fun() -> catch a end, "catch a.", a),
check(fun() -> 4 == 3 end, "4 == 3.", false),
check(fun() -> not true end, "not true.", false),
check(fun() -> -3 end, "-3.", -3),
error_check("3.0 = 4.0.", {badmatch,4.0}),
check(fun() -> <<(3.0+2.0):32/float>> = <<5.0:32/float>> end,
"<<(3.0+2.0):32/float>> = <<5.0:32/float>>.",
<<5.0:32/float>>),
check(fun() -> false andalso kludd end, "false andalso kludd.",
false),
check(fun() -> true andalso true end, "true andalso true.",
true),
check(fun() -> true andalso false end, "true andalso false.",
false),
check(fun() -> true andalso kludd end, "true andalso kludd.",
kludd),
error_check("kladd andalso kludd.", {badarg,kladd}),
check(fun() -> if false andalso kludd -> a; true -> b end end,
"if false andalso kludd -> a; true -> b end.",
b),
check(fun() -> if true andalso true -> a; true -> b end end,
"if true andalso true -> a; true -> b end.",
a),
check(fun() -> if true andalso false -> a; true -> b end end,
"if true andalso false -> a; true -> b end.",
b),
check(fun() -> true orelse kludd end,
"true orelse kludd.", true),
check(fun() -> false orelse false end,
"false orelse false.", false),
check(fun() -> false orelse true end,
"false orelse true.", true),
check(fun() -> false orelse kludd end,
"false orelse kludd.", kludd),
error_check("kladd orelse kludd.", {badarg,kladd}),
error_check("[X || X <- [1,2,3], begin 1 end].",{bad_filter,1}),
error_check("[X || X <- a].",{bad_generator,a}),
check(fun() -> if true orelse kludd -> a; true -> b end end,
"if true orelse kludd -> a; true -> b end.", a),
check(fun() -> if false orelse false -> a; true -> b end end,
"if false orelse false -> a; true -> b end.", b),
check(fun() -> if false orelse true -> a; true -> b end end,
"if false orelse true -> a; true -> b end.", a),
check(fun() -> [X || X <- [1,2,3], X+2] end,
"[X || X <- [1,2,3], X+2].", []),
check(fun() -> [X || X <- [1,2,3], [X] == [X || X <- [2]]] end,
"[X || X <- [1,2,3], [X] == [X || X <- [2]]].",
[2]),
check(fun() -> F = fun(1) -> ett; (2) -> zwei end,
ett = F(1), zwei = F(2) end,
"begin F = fun(1) -> ett; (2) -> zwei end,
ett = F(1), zwei = F(2) end.",
zwei),
check(fun() -> F = fun(X) when X == 1 -> ett;
(X) when X == 2 -> zwei end,
ett = F(1), zwei = F(2) end,
"begin F = fun(X) when X == 1 -> ett;
(X) when X == 2 -> zwei end,
ett = F(1), zwei = F(2) end.",
zwei),
error_check("begin F = fun(1) -> ett end, zwei = F(2) end.",
function_clause),
check(fun() -> if length([1]) == 1 -> yes;
true -> no end end,
"if length([1]) == 1 -> yes;
true -> no end.",
yes),
check(fun() -> if is_integer(3) -> true; true -> false end end,
"if is_integer(3) -> true; true -> false end.", true),
check(fun() -> if integer(3) -> true; true -> false end end,
"if integer(3) -> true; true -> false end.", true),
check(fun() -> if is_float(3) -> true; true -> false end end,
"if is_float(3) -> true; true -> false end.", false),
check(fun() -> if float(3) -> true; true -> false end end,
"if float(3) -> true; true -> false end.", false),
check(fun() -> if is_number(3) -> true; true -> false end end,
"if is_number(3) -> true; true -> false end.", true),
check(fun() -> if number(3) -> true; true -> false end end,
"if number(3) -> true; true -> false end.", true),
check(fun() -> if is_atom(a) -> true; true -> false end end,
"if is_atom(a) -> true; true -> false end.", true),
check(fun() -> if atom(a) -> true; true -> false end end,
"if atom(a) -> true; true -> false end.", true),
check(fun() -> if is_list([]) -> true; true -> false end end,
"if is_list([]) -> true; true -> false end.", true),
check(fun() -> if list([]) -> true; true -> false end end,
"if list([]) -> true; true -> false end.", true),
check(fun() -> if is_tuple({}) -> true; true -> false end end,
"if is_tuple({}) -> true; true -> false end.", true),
check(fun() -> if tuple({}) -> true; true -> false end end,
"if tuple({}) -> true; true -> false end.", true),
check(fun() -> if is_pid(self()) -> true; true -> false end end,
"if is_pid(self()) -> true; true -> false end.", true),
check(fun() -> if pid(self()) -> true; true -> false end end,
"if pid(self()) -> true; true -> false end.", true),
check(fun() -> R = make_ref(), if is_reference(R) -> true;
true -> false end end,
"begin R = make_ref(), if is_reference(R) -> true;"
"true -> false end end.", true),
check(fun() -> R = make_ref(), if reference(R) -> true;
true -> false end end,
"begin R = make_ref(), if reference(R) -> true;"
"true -> false end end.", true),
check(fun() -> if is_port(a) -> true; true -> false end end,
"if is_port(a) -> true; true -> false end.", false),
check(fun() -> if port(a) -> true; true -> false end end,
"if port(a) -> true; true -> false end.", false),
check(fun() -> if is_function(a) -> true; true -> false end end,
"if is_function(a) -> true; true -> false end.", false),
check(fun() -> if function(a) -> true; true -> false end end,
"if function(a) -> true; true -> false end.", false),
check(fun() -> if is_binary(<<>>) -> true; true -> false end end,
"if is_binary(<<>>) -> true; true -> false end.", true),
check(fun() -> if binary(<<>>) -> true; true -> false end end,
"if binary(<<>>) -> true; true -> false end.", true),
check(fun() -> if is_integer(a) == true -> yes;
true -> no end end,
"if is_integer(a) == true -> yes;
true -> no end.",
no),
check(fun() -> if [] -> true; true -> false end end,
"if [] -> true; true -> false end.", false),
error_check("if lists:member(1,[1]) -> true; true -> false end.",
illegal_guard_expr),
error_check("if false -> true end.", if_clause),
check(fun() -> if a+b -> true; true -> false end end,
"if a + b -> true; true -> false end.", false),
check(fun() -> if + b -> true; true -> false end end,
"if + b -> true; true -> false end.", false),
error_check("case foo of bar -> true end.", {case_clause,foo}),
error_check("case 4 of 2+a -> true; _ -> false end.",
illegal_pattern),
error_check("case 4 of +a -> true; _ -> false end.",
illegal_pattern),
check(fun() -> case a of
X when X == b -> one;
X when X == a -> two
end end,
"begin case a of
X when X == b -> one;
X when X == a -> two
end end.", two),
error_check("3 = 4.", {badmatch,4}),
error_check("a = 3.", {badmatch,3}),
error_check("3.1 = 2.7.",{badmatch,2.7 } ) ,
error_check("$c = 4.", {badmatch,4}),
check(fun() -> $c = $c end, "$c = $c.", $c),
check(fun() -> _ = bar end, "_ = bar.", bar),
check(fun() -> A = 14, A = 14 end,
"begin A = 14, A = 14 end.", 14),
error_check("begin A = 14, A = 16 end.", {badmatch,16}),
error_check("\"hej\" = \"san\".", {badmatch,"san"}),
check(fun() -> "hej" = "hej" end,
"\"hej\" = \"hej\".", "hej"),
error_check("[] = [a].", {badmatch,[a]}),
check(fun() -> [] = [] end, "[] = [].", []),
error_check("[a] = [].", {badmatch,[]}),
error_check("{a,b} = 34.", {badmatch,34}),
check(fun() -> <<X:7>> = <<8:7>>, X end,
"begin <<X:7>> = <<8:7>>, X end.", 8),
error_check("<<34:32>> = \"hej\".", {badmatch,"hej"}),
check(fun() -> trunc((1 * 3 div 3 + 4 - 3) / 1) rem 2 end,
"begin trunc((1 * 3 div 3 + 4 - 3) / 1) rem 2 end.", 0),
check(fun() -> (2#101 band 2#10101) bor (2#110 bxor 2#010) end,
"(2#101 band 2#10101) bor (2#110 bxor 2#010).", 5),
check(fun() -> (2#1 bsl 4) + (2#10000 bsr 3) end,
"(2#1 bsl 4) + (2#10000 bsr 3).", 18),
check(fun() -> ((1<3) and ((1 =:= 2) or (1 =/= 2))) xor (1=<2) end,
"((1<3) and ((1 =:= 2) or (1 =/= 2))) xor (1=<2).", false),
check(fun() -> (a /= b) or (2 > 4) or (3 >= 3) end,
"(a /= b) or (2 > 4) or (3 >= 3).", true),
check(fun() -> "hej" ++ "san" =/= "hejsan" -- "san" end,
"\"hej\" ++ \"san\" =/= \"hejsan\" -- \"san\".", true),
check(fun() -> (bnot 1) < -0 end, "(bnot (+1)) < -0.", true),
ok.
unary_plus(Config) when is_list(Config) ->
check(fun() -> F = fun(X) -> + X end,
true = -1 == F(-1) end,
"begin F = fun(X) -> + X end,"
" true = -1 == F(-1) end.", true, ['F'], none, none),
error_check("+a.", badarith),
ok.
apply_atom(Config) when is_list(Config) ->
error_check("[X || X <- [[1],[2]],
begin L = length, L(X) =:= 1 end].",
{badfun,length}),
ok.
otp_5269(Config) when is_list(Config) ->
check(fun() -> L = 8,
F = fun(<<A:L,B:A>>) -> B end,
F(<<16:8, 7:16>>)
end,
"begin
L = 8, F = fun(<<A:L,B:A>>) -> B end, F(<<16:8, 7:16>>)
end.",
7),
check(fun() -> L = 8,
F = fun(<<L:L,B:L>>) -> B end,
F(<<16:8, 7:16>>)
end,
"begin
L = 8, F = fun(<<L:L,B:L>>) -> B end, F(<<16:8, 7:16>>)
end.",
7),
check(fun() -> L = 8, <<A:L,B:A>> = <<16:8, 7:16>>, B end,
"begin L = 8, <<A:L,B:A>> = <<16:8, 7:16>>, B end.",
7),
error_check("begin L = 8, <<L:L,B:L>> = <<16:8, 7:16>> end.",
{badmatch,<<16:8,7:16>>}),
error_check("begin <<L:16,L:L>> = <<16:16,8:16>>, L end.",
{badmatch, <<16:16,8:16>>}),
check(fun() -> U = 8, (fun(<<U:U>>) -> U end)(<<32:8>>) end,
"begin U = 8, (fun(<<U:U>>) -> U end)(<<32:8>>) end.",
32),
check(fun() -> U = 8, [U || <<U:U>> <- [<<32:8>>]] end,
"begin U = 8, [U || <<U:U>> <- [<<32:8>>]] end.",
[32]),
error_check("(fun({3,<<A:32,A:32>>}) -> a end)
({3,<<17:32,19:32>>}).",
function_clause),
check(fun() -> [X || <<A:8,
B:A>> <- [<<16:8,19:16>>],
<<X:8>> <- [<<B:8>>]] end,
"[X || <<A:8,
B:A>> <- [<<16:8,19:16>>],
<<X:8>> <- [<<B:8>>]].",
[19]),
check(fun() ->
(fun (<<A:1/binary, B:8/integer, _C:B/binary>>) ->
case A of
B -> wrong;
_ -> ok
end
end)(<<1,2,3,4>>) end,
"(fun(<<A:1/binary, B:8/integer, _C:B/binary>>) ->"
" case A of B -> wrong; _ -> ok end"
" end)(<<1, 2, 3, 4>>).",
ok),
ok.
otp_6539(Config) when is_list(Config) ->
check(fun() ->
F = fun(A,B) ->
try A+B
catch _:_ -> dontthinkso
end
end,
lists:zipwith(F, [1,2], [2,3])
end,
"begin
F = fun(A,B) ->
try A+B
catch _:_ -> dontthinkso
end
end,
lists:zipwith(F, [1,2], [2,3])
end.",
[3, 5]),
ok.
OTP-6543 . bitlevel binaries .
otp_6543(Config) when is_list(Config) ->
check(fun() ->
<< <<X>> || <<X>> <- [1,2,3] >>
end,
"<< <<X>> || <<X>> <- [1,2,3] >>.",
<<>>),
check(fun() ->
<< <<X>> || X <- [1,2,3] >>
end,
"<< <<X>> || X <- [1,2,3] >>.",
<<1,2,3>>),
check(fun() ->
<< <<X:8>> || <<X:2>> <= <<"hej">> >>
end,
"<< <<X:8>> || <<X:2>> <= <<\"hej\">> >>.",
<<1,2,2,0,1,2,1,1,1,2,2,2>>),
check(fun() ->
<< <<X:8>> ||
<<65,X:4>> <= <<65,7:4,65,3:4,66,8:4>> >>
end,
"<< <<X:8>> ||
<<65,X:4>> <= <<65,7:4,65,3:4,66,8:4>> >>.",
<<7,3>>),
check(fun() -> <<34:18/big>> end,
"<<34:18/big>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/big-unit:2>> end,
"<<34:18/big-unit:2>>.",
<<0,0,0,2,2:4>>),
check(fun() -> <<34:18/little>> end,
"<<34:18/little>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<34:18/big-signed>> end,
"<<34:18/big-signed>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/little-signed>> end,
"<<34:18/little-signed>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native-signed>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<34:18/big-unsigned>> end,
"<<34:18/big-unsigned>>.",
<<0,8,2:2>>),
check(fun() -> <<34:18/little-unsigned>> end,
"<<34:18/little-unsigned>>.",
<<34,0,0:2>>),
case eval_string("<<34:18/native-unsigned>>.") of
<<0,8,2:2>> -> ok;
<<34,0,0:2>> -> ok
end,
check(fun() -> <<3.14:32/float-big>> end,
"<<3.14:32/float-big>>.",
<<64,72,245,195>>),
check(fun() -> <<3.14:32/float-little>> end,
"<<3.14:32/float-little>>.",
<<195,245,72,64>>),
case eval_string("<<3.14:32/float-native>>.") of
<<64,72,245,195>> -> ok;
<<195,245,72,64>> -> ok
end,
error_check("<<(<<17,3:2>>)/binary>>.", badarg),
check(fun() -> <<(<<17,3:2>>)/bitstring>> end,
"<<(<<17,3:2>>)/bitstring>>.",
<<17,3:2>>),
check(fun() -> <<(<<17,3:2>>):10/bitstring>> end,
"<<(<<17,3:2>>):10/bitstring>>.",
<<17,3:2>>),
check(fun() -> <<<<344:17>>/binary-unit:17>> end,
"<<<<344:17>>/binary-unit:17>>.",
<<344:17>>),
check(fun() -> <<X:18/big>> = <<34:18/big>>, X end,
"begin <<X:18/big>> = <<34:18/big>>, X end.",
34),
check(fun() -> <<X:18/big-unit:2>> = <<34:18/big-unit:2>>, X end,
"begin <<X:18/big-unit:2>> = <<34:18/big-unit:2>>, X end.",
34),
check(fun() -> <<X:18/little>> = <<34:18/little>>, X end,
"begin <<X:18/little>> = <<34:18/little>>, X end.",
34),
check(fun() -> <<X:18/native>> = <<34:18/native>>, X end,
"begin <<X:18/native>> = <<34:18/native>>, X end.",
34),
check(fun() -> <<X:18/big-signed>> = <<34:18/big-signed>>, X end,
"begin <<X:18/big-signed>> = <<34:18/big-signed>>, X end.",
34),
check(fun() -> <<X:18/little-signed>> = <<34:18/little-signed>>,
X end,
"begin <<X:18/little-signed>> = <<34:18/little-signed>>,
X end.",
34),
check(fun() -> <<X:18/native-signed>> = <<34:18/native-signed>>,
X end,
"begin <<X:18/native-signed>> = <<34:18/native-signed>>,
X end.",
34),
check(fun() -> <<X:18/big-unsigned>> = <<34:18/big-unsigned>>,
X end,
"begin <<X:18/big-unsigned>> = <<34:18/big-unsigned>>,
X end.",
34),
check(fun() ->
<<X:18/little-unsigned>> = <<34:18/little-unsigned>>,
X end,
"begin <<X:18/little-unsigned>> = <<34:18/little-unsigned>>,
X end.",
34),
check(fun() ->
<<X:18/native-unsigned>> = <<34:18/native-unsigned>>,
X end,
"begin <<X:18/native-unsigned>> = <<34:18/native-unsigned>>,
X end.",
34),
check(fun() -> <<X:32/float-big>> = <<2.0:32/float-big>>, X end,
"begin <<X:32/float-big>> = <<2.0:32/float-big>>,
X end.",
2.0),
check(fun() -> <<X:32/float-little>> = <<2.0:32/float-little>>,
X end,
"begin <<X:32/float-little>> = <<2.0:32/float-little>>,
X end.",
2.0),
check(fun() -> <<X:32/float-native>> = <<2.0:32/float-native>>,
X end,
"begin <<X:32/float-native>> = <<2.0:32/float-native>>,
X end.",
2.0),
check(
fun() ->
[X || <<"hej",X:8>> <= <<"hej",8,"san",9,"hej",17,"hej">>]
end,
"[X || <<\"hej\",X:8>> <=
<<\"hej\",8,\"san\",9,\"hej\",17,\"hej\">>].",
[8,17]),
check(
fun() ->
L = 8, << <<B:32>> || <<L:L,B:L>> <= <<16:8, 7:16>> >>
end,
"begin L = 8, << <<B:32>> || <<L:L,B:L>> <= <<16:8, 7:16>> >>
end.",
<<0,0,0,7>>),
check(fun() -> [ 3 || <<17/float>> <= <<17.0/float>>] end,
"[ 3 || <<17/float>> <= <<17.0/float>>].",
[3]),
check(fun() -> [ 3 || <<17/float>> <- [<<17.0/float>>]] end,
"[ 3 || <<17/float>> <- [<<17.0/float>>]].",
[3]),
check(fun() -> [ X || <<17/float,X:3>> <= <<17.0/float,2:3>>] end,
"[ X || <<17/float,X:3>> <= <<17.0/float,2:3>>].",
[2]),
check(fun() ->
[ foo || <<(1 bsl 1023)/float>> <= <<(1 bsl 1023)/float>>]
end,
"[ foo || <<(1 bsl 1023)/float>> <= <<(1 bsl 1023)/float>>].",
[foo]),
check(fun() ->
[ foo || <<(1 bsl 1023)/float>> <- [<<(1 bsl 1023)/float>>]]
end,
"[ foo || <<(1 bsl 1023)/float>> <- [<<(1 bsl 1023)/float>>]].",
[foo]),
error_check("[ foo || <<(1 bsl 1024)/float>> <-
[<<(1 bsl 1024)/float>>]].",
badarg),
check(fun() ->
[ foo || <<(1 bsl 1024)/float>> <- [<<(1 bsl 1023)/float>>]]
end,
"[ foo || <<(1 bsl 1024)/float>> <-
[<<(1 bsl 1023)/float>>]].",
[]),
check(fun() ->
[ foo || <<(1 bsl 1024)/float>> <= <<(1 bsl 1023)/float>>]
end,
"[ foo || <<(1 bsl 1024)/float>> <=
<<(1 bsl 1023)/float>>].",
[]),
check(fun() ->
L = 8,
[{L,B} || <<L:L,B:L/float>> <= <<32:8,7:32/float>>]
end,
"begin L = 8,
[{L,B} || <<L:L,B:L/float>> <= <<32:8,7:32/float>>]
end.",
[{32,7.0}]),
check(fun() ->
L = 8,
[{L,B} || <<L:L,B:L/float>> <- [<<32:8,7:32/float>>]]
end,
"begin L = 8,
[{L,B} || <<L:L,B:L/float>> <- [<<32:8,7:32/float>>]]
end.",
[{32,7.0}]),
check(fun() ->
[foo || <<"s">> <= <<"st">>]
end,
"[foo || <<\"s\">> <= <<\"st\">>].",
[foo]),
check(fun() -> <<_:32>> = <<17:32>> end,
"<<_:32>> = <<17:32>>.",
<<17:32>>),
check(fun() -> [foo || <<_:32>> <= <<17:32,20:32>>] end,
"[foo || <<_:32>> <= <<17:32,20:32>>].",
[foo,foo]),
check(fun() -> << <<X:32>> || X <- [1,2,3], X > 1 >> end,
"<< <<X:32>> || X <- [1,2,3], X > 1 >>.",
<<0,0,0,2,0,0,0,3>>),
error_check("[X || <<X>> <= [a,b]].",{bad_generator,[a,b]}),
ok.
otp_6787(Config) when is_list(Config) ->
check(
fun() -> <<16:(1024*1024)>> = <<16:(1024*1024)>> end,
"<<16:(1024*1024)>> = <<16:(1024*1024)>>.",
<<16:1048576>>),
ok.
otp_6977(Config) when is_list(Config) ->
check(
fun() -> (fun([$X] ++ _) -> ok end)("X") end,
"(fun([$X] ++ _) -> ok end)(\"X\").",
ok),
ok.
OTP-7550 . Support for UTF-8 , UTF-16 , UTF-32 .
otp_7550(Config) when is_list(Config) ->
UTF-8 .
check(
fun() -> <<65>> = <<65/utf8>> end,
"<<65>> = <<65/utf8>>.",
<<65>>),
check(
fun() -> <<350/utf8>> = <<197,158>> end,
"<<350/utf8>> = <<197,158>>.",
<<197,158>>),
check(
fun() -> <<$b,$j,$\303,$\266,$r,$n>> = <<"bj\366rn"/utf8>> end,
"<<$b,$j,$\303,$\266,$r,$n>> = <<\"bj\366rn\"/utf8>>.",
<<$b,$j,$\303,$\266,$r,$n>>),
check(
fun() -> <<0,65>> = <<65/utf16>> end,
"<<0,65>> = <<65/utf16>>.",
<<0,65>>),
check(
fun() -> <<16#D8,16#08,16#DF,16#45>> = <<16#12345/utf16>> end,
"<<16#D8,16#08,16#DF,16#45>> = <<16#12345/utf16>>.",
<<16#D8,16#08,16#DF,16#45>>),
check(
fun() -> <<16#08,16#D8,16#45,16#DF>> = <<16#12345/little-utf16>> end,
"<<16#08,16#D8,16#45,16#DF>> = <<16#12345/little-utf16>>.",
<<16#08,16#D8,16#45,16#DF>>),
check(
fun() -> <<350/utf16>> = <<1,94>> end,
"<<350/utf16>> = <<1,94>>.",
<<1,94>>),
check(
fun() -> <<350/little-utf16>> = <<94,1>> end,
"<<350/little-utf16>> = <<94,1>>.",
<<94,1>>),
check(
fun() -> <<16#12345/utf16>> = <<16#D8,16#08,16#DF,16#45>> end,
"<<16#12345/utf16>> = <<16#D8,16#08,16#DF,16#45>>.",
<<16#D8,16#08,16#DF,16#45>>),
check(
fun() -> <<16#12345/little-utf16>> = <<16#08,16#D8,16#45,16#DF>> end,
"<<16#12345/little-utf16>> = <<16#08,16#D8,16#45,16#DF>>.",
<<16#08,16#D8,16#45,16#DF>>),
check(
fun() -> <<16#12345/utf32>> = <<16#0,16#01,16#23,16#45>> end,
"<<16#12345/utf32>> = <<16#0,16#01,16#23,16#45>>.",
<<16#0,16#01,16#23,16#45>>),
check(
fun() -> <<16#0,16#01,16#23,16#45>> = <<16#12345/utf32>> end,
"<<16#0,16#01,16#23,16#45>> = <<16#12345/utf32>>.",
<<16#0,16#01,16#23,16#45>>),
check(
fun() -> <<16#12345/little-utf32>> = <<16#45,16#23,16#01,16#00>> end,
"<<16#12345/little-utf32>> = <<16#45,16#23,16#01,16#00>>.",
<<16#45,16#23,16#01,16#00>>),
check(
fun() -> <<16#12345/little-utf32>> end,
"<<16#12345/little-utf32>>.",
<<16#45,16#23,16#01,16#00>>),
check(
fun() -> <<16#41,16#12345/utf32,16#0391:16,16#2E:8>> end,
"<<16#41,16#12345/utf32,16#0391:16,16#2E:8>>.",
<<16#41,16#00,16#01,16#23,16#45,16#03,16#91,16#2E>>),
ok.
OTP-8133 . Bit comprehension bug .
otp_8133(Config) when is_list(Config) ->
check(
fun() ->
E = fun(N) ->
if
is_integer(N) -> <<N/integer>>;
true -> throw(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch foo -> ok
end
end,
"begin
E = fun(N) ->
if is_integer(N) -> <<N/integer>>;
true -> throw(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch foo -> ok
end
end.",
ok),
check(
fun() ->
E = fun(N) ->
if
is_integer(N) -> <<N/integer>>;
true -> erlang:error(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch error:foo -> ok
end
end,
"begin
E = fun(N) ->
if is_integer(N) -> <<N/integer>>;
true -> erlang:error(foo)
end
end,
try << << (E(V))/binary >> || V <- [1,2,3,a] >>
catch error:foo -> ok
end
end.",
ok),
ok.
otp_10622(Config) when is_list(Config) ->
check(fun() -> <<0>> = <<"\x{400}">> end,
"<<0>> = <<\"\\x{400}\">>. ",
<<0>>),
check(fun() -> <<"\x{aa}ff"/utf8>> = <<"\x{aa}ff"/utf8>> end,
"<<\"\\x{aa}ff\"/utf8>> = <<\"\\x{aa}ff\"/utf8>>. ",
<<"Â\xaaff">>),
check(fun() -> case <<"foo"/utf8>> of
<<"foo"/utf8>> -> true
end
end,
"case <<\"foo\"/utf8>> of <<\"foo\"/utf8>> -> true end.",
true),
check(fun() -> <<"\x{400}"/utf8>> = <<"\x{400}"/utf8>> end,
"<<\"\\x{400}\"/utf8>> = <<\"\\x{400}\"/utf8>>. ",
<<208,128>>),
error_check("<<\"\\x{aaa}\">> = <<\"\\x{aaa}\">>.",
{badmatch,<<"\xaa">>}),
check(fun() -> [a || <<"\x{aaa}">> <= <<2703:16>>] end,
"[a || <<\"\\x{aaa}\">> <= <<2703:16>>]. ",
[]),
check(fun() -> [a || <<"\x{aa}"/utf8>> <= <<"\x{aa}"/utf8>>] end,
"[a || <<\"\\x{aa}\"/utf8>> <= <<\"\\x{aa}\"/utf8>>]. ",
[a]),
check(fun() -> [a || <<"\x{aa}x"/utf8>> <= <<"\x{aa}y"/utf8>>] end,
"[a || <<\"\\x{aa}x\"/utf8>> <= <<\"\\x{aa}y\"/utf8>>]. ",
[]),
check(fun() -> [a || <<"\x{aaa}">> <= <<"\x{aaa}">>] end,
"[a || <<\"\\x{aaa}\">> <= <<\"\\x{aaa}\">>]. ",
[]),
check(fun() -> [a || <<"\x{aaa}"/utf8>> <= <<"\x{aaa}"/utf8>>] end,
"[a || <<\"\\x{aaa}\"/utf8>> <= <<\"\\x{aaa}\"/utf8>>]. ",
[a]),
ok.
OTP-13228 . ERL-32 : non - local function handler bug .
otp_13228(_Config) ->
LFH = {value, fun(foo, [io_fwrite]) -> worked end},
EFH = {value, fun({io, fwrite}, [atom]) -> io_fwrite end},
{value, worked, []} = parse_and_run("foo(io:fwrite(atom)).", LFH, EFH).
otp_14826(_Config) ->
backtrace_check("fun(P) when is_pid(P) -> true end(a).",
function_clause,
[{erl_eval,'-inside-an-interpreted-fun-',[a],[]},
{erl_eval,eval_fun,8},
?MODULE]),
backtrace_check("B.",
{unbound_var, 'B'},
[{erl_eval,expr,2}, ?MODULE]),
backtrace_check("B.",
{unbound, 'B'},
[{erl_eval,expr,6}, ?MODULE],
none, none),
backtrace_check("1/0.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("catch 1/0.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
check(fun() -> catch exit(foo) end,
"catch exit(foo).",
{'EXIT', foo}),
check(fun() -> catch throw(foo) end,
"catch throw(foo).",
foo),
backtrace_check("try 1/0 after foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("catch (try 1/0 after foo end).",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_catch("try catch 1/0 after foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
backtrace_check("try a of b -> bar after foo end.",
{try_clause,a},
[{erl_eval,try_clauses,10}]),
check(fun() -> X = try foo:bar() catch A:B:C -> {A,B} end, X end,
"try foo:bar() catch A:B:C -> {A,B} end.",
{error, undef}),
backtrace_check("C = 4, try foo:bar() catch A:B:C -> {A,B,C} end.",
stacktrace_bound,
[{erl_eval,check_stacktrace_vars,5},
{erl_eval,try_clauses,10}],
none, none),
backtrace_catch("catch (try a of b -> bar after foo end).",
{try_clause,a},
[{erl_eval,try_clauses,10}]),
backtrace_check("try 1/0 catch exit:a -> foo end.",
badarith,
[{erlang,'/',[1,0],[]},
{erl_eval,do_apply,7}]),
Es = [{'try',1,[{call,1,{remote,1,{atom,1,foo},{atom,1,bar}},[]}],
[],
[{clause,1,[{tuple,1,[{var,1,'A'},{var,1,'B'},{atom,1,'C'}]}],
[],[{tuple,1,[{var,1,'A'},{var,1,'B'},{atom,1,'C'}]}]}],[]}],
try
erl_eval:exprs(Es, [], none, none),
ct:fail(stacktrace_variable)
catch
error:{illegal_stacktrace_variable,{atom,1,'C'}}:S ->
[{erl_eval,check_stacktrace_vars,5,_},
{erl_eval,try_clauses,10,_}|_] = S
end,
backtrace_check("{1,1} = {A = 1, A = 2}.",
{badmatch, 1},
[erl_eval, {lists,foldl,3}]),
backtrace_check("case a of a when foo:bar() -> x end.",
guard_expr,
[{erl_eval,guard0,4}], none, none),
backtrace_check("case a of foo() -> ok end.",
{illegal_pattern,{call,1,{atom,1,foo},[]}},
[{erl_eval,match,6}], none, none),
backtrace_check("case a of b -> ok end.",
{case_clause,a},
[{erl_eval,case_clauses,8}, ?MODULE]),
backtrace_check("if a =:= b -> ok end.",
if_clause,
[{erl_eval,if_clauses,7}, ?MODULE]),
backtrace_check("fun A(b) -> ok end(a).",
function_clause,
[{erl_eval,'-inside-an-interpreted-fun-',[a],[]},
{erl_eval,eval_named_fun,10},
?MODULE]),
backtrace_check("[A || A <- a].",
{bad_generator, a},
[{erl_eval,eval_generate,8}, {erl_eval, eval_lc, 7}]),
backtrace_check("<< <<A>> || <<A>> <= a>>.",
{bad_generator, a},
[{erl_eval,eval_b_generate,8}, {erl_eval, eval_bc, 7}]),
backtrace_check("[A || A <- [1], begin a end].",
{bad_filter, a},
[{erl_eval,eval_filter,7}, {erl_eval, eval_generate, 8}]),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun(A) -> A end().")),
check_backtrace([{erl_eval,do_apply,6}, ?MODULE], BT)
end(),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun F(A) -> A end().")),
check_backtrace([{erl_eval,do_apply,6}, ?MODULE], BT)
end(),
backtrace_check("foo().",
undef,
[{erl_eval,foo,0},{erl_eval,local_func,8}],
none, none),
backtrace_check("a orelse false.",
{badarg, a},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("a andalso false.",
{badarg, a},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("t = u.",
{badmatch, u},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("{math,sqrt}(2).",
{badfun, {math,sqrt}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("erl_eval_SUITE:simple().",
simple,
[{?MODULE,simple1,0},{?MODULE,simple,0},erl_eval]),
Args = [{integer,1,I} || I <- lists:seq(1, 30)],
backtrace_check("fun(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,"
"19,20,21,22,23,24,25,26,27,28,29,30) -> a end.",
{argument_limit,
{'fun',1,[{clause,1,Args,[],[{atom,1,a}]}]}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("fun F(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,"
"19,20,21,22,23,24,25,26,27,28,29,30) -> a end.",
{argument_limit,
{named_fun,1,'F',[{clause,1,Args,[],[{atom,1,a}]}]}},
[{erl_eval,expr,6}, ?MODULE]),
backtrace_check("#r{}.",
{undef_record,r},
[{erl_eval,expr,6}, ?MODULE],
none, none),
backtrace_check("<<100:8/bitstring>>.",
badarg,
[{eval_bits,eval_exp_field,6},
eval_bits,eval_bits,erl_eval]),
backtrace_check("<<100:8/foo>>.",
{undefined_bittype,foo},
[{eval_bits,make_bit_type,4},eval_bits,
eval_bits,eval_bits],
none, none),
backtrace_check("B = <<\"foo\">>, <<B/binary-unit:7>>.",
badarg,
[{eval_bits,eval_exp_field,6},
eval_bits,eval_bits,erl_eval],
none, none),
{error_info, #{cause := _, override_segment_position := 1}} =
error_info_catch("<<100:8/bitstring>>.", badarg),
{error_info, #{cause := _, override_segment_position := 2}} =
error_info_catch("<<0:8, 100:8/bitstring>>.", badarg),
ok.
simple() ->
A = simple1(),
{A}.
simple1() ->
case get(a_key_that_is_not_defined) of
undefined -> erlang:error(simple);
WillNeverHappen -> WillNeverHappen
end.
custom_stacktrace(Config) when is_list(Config) ->
EFH = {value, fun custom_stacktrace_eval_handler/3},
backtrace_check("1 + atom.", badarith,
[{erlang,'+',[1,atom]}, mystack(1)], none, EFH),
backtrace_check("\n1 + atom.", badarith,
[{erlang,'+',[1,atom]}, mystack(2)], none, EFH),
backtrace_check("lists:flatten(atom).", function_clause,
[{lists,flatten,[atom]}, mystack(1)], none, EFH),
backtrace_check("invalid andalso true.", {badarg, invalid},
[mystack(1)], none, EFH),
backtrace_check("invalid orelse true.", {badarg, invalid},
[mystack(1)], none, EFH),
backtrace_check("invalid = valid.", {badmatch, valid},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("1:2.", {badexpr, ':'},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("Unknown.", {unbound, 'Unknown'},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown{}.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown{foo=bar}.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("#unknown.index.", {undef_record,unknown},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("fun foo/2.", undef,
[{erl_eval, foo, 2}, erl_eval, mystack(1)], none, EFH),
backtrace_check("foo(1, 2).", undef,
[{erl_eval, foo, 2}, erl_eval, mystack(1)], none, EFH),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun(A) -> A end().", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
fun() ->
{'EXIT', {{badarity, {_Fun, []}}, BT}} =
(catch parse_and_run("fun F(A) -> A end().", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
backtrace_check("[X || X <- 1].", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("[X || <<X>> <= 1].", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("<<X || X <- 1>>.", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("<<X || <<X>> <= 1>>.", {bad_generator, 1},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("if false -> true end.", if_clause,
[erl_eval, mystack(1)], none, EFH),
backtrace_check("case 0 of 1 -> true end.", {case_clause, 0},
[erl_eval, mystack(1)], none, EFH),
backtrace_check("try 0 of 1 -> true after ok end.", {try_clause, 0},
[mystack(1)], none, EFH),
backtrace_check("fun(0) -> 1 end(1).", function_clause,
[{erl_eval,'-inside-an-interpreted-fun-', [1]}, erl_eval, mystack(1)],
none, EFH),
backtrace_check("fun F(0) -> 1 end(1).", function_clause,
[{erl_eval,'-inside-an-interpreted-fun-', [1]}, erl_eval, mystack(1)],
none, EFH),
fun() ->
{'EXIT', {{illegal_pattern,_}, BT}} =
(catch parse_and_run("make_ref() = 1.", none, EFH)),
check_backtrace([erl_eval, mystack(1)], BT)
end(),
backtrace_check("<<100:8/bitstring>>.",
badarg,
[{eval_bits,eval_exp_field,6}, mystack(1)],
none, EFH),
backtrace_check("<<100:8/foo>>.",
{undefined_bittype,foo},
[{eval_bits,make_bit_type,4}, mystack(1)],
none, EFH),
backtrace_check("B = <<\"foo\">>, <<B/binary-unit:7>>.",
badarg,
[{eval_bits,eval_exp_field,6}, mystack(1)],
none, EFH),
ok.
mystack(Line) ->
{my_module, my_function, 0, [{file, "evaluator"}, {line, Line}]}.
custom_stacktrace_eval_handler(Ann, FunOrModFun, Args) ->
try
case FunOrModFun of
{Mod, Fun} -> apply(Mod, Fun, Args);
Fun -> apply(Fun, Args)
end
catch
Kind:Reason:Stacktrace ->
Pruned =
lists:takewhile(fun
({erl_eval_SUITE,backtrace_check,5,_}) -> false;
(_) -> true
end, Stacktrace),
{current_stacktrace, Current} =
erlang:process_info(self(), current_stacktrace),
Reversed = drop_common(lists:reverse(Current), lists:reverse(Pruned)),
Location = [{file, "evaluator"}, {line, erl_anno:line(Ann)}],
Add our file+line information at the bottom
Custom = lists:reverse([{my_module, my_function, 0, Location} | Reversed]),
erlang:raise(Kind, Reason, Custom)
end.
drop_common([H | T1], [H | T2]) -> drop_common(T1, T2);
drop_common([H | T1], T2) -> drop_common(T1, T2);
drop_common([], [{?MODULE, custom_stacktrace_eval_handler, _, _} | T2]) -> T2;
drop_common([], T2) -> T2.
funs(Config) when is_list(Config) ->
do_funs(none, none),
do_funs(lfh(), none),
do_funs(none, efh()),
do_funs(lfh(), efh()),
do_funs(none, ann_efh()),
do_funs(lfh(), ann_efh()),
error_check("nix:foo().", {access_not_allowed,nix}, lfh(), efh()),
error_check("nix:foo().", {access_not_allowed,nix}, lfh(), ann_efh()),
error_check("bar().", undef, none, none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh_value(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], lfh_value_extra(), none),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
"begin F1 = fun(F,N) -> count_down(F, N) end,"
"F1(F1,1000) end.",
0, ['F1'], {?MODULE,local_func_value}, none),
B0 = erl_eval:new_bindings(),
check(fun() -> is_function(?MODULE:count_down_fun()) end,
"begin is_function(count_down_fun()) end.",
true, [], {?MODULE,local_func,[B0]},none),
EF = fun({timer,sleep}, As) when length(As) == 1 -> exit({got_it,sleep});
({M,F}, As) -> apply(M, F, As)
end,
EFH = {value, EF},
error_check("apply(timer, sleep, [1]).", got_it, none, EFH),
error_check("begin F = fun(T) -> timer:sleep(T) end,F(1) end.",
got_it, none, EFH),
AnnEF = fun(1, {timer,sleep}, As) when length(As) == 1 -> exit({got_it,sleep});
(1, {M,F}, As) -> apply(M, F, As)
end,
AnnEFH = {value, AnnEF},
error_check("apply(timer, sleep, [1]).", got_it, none, AnnEFH),
error_check("begin F = fun(T) -> timer:sleep(T) end,F(1) end.",
got_it, none, AnnEFH),
error_check("fun c/1.", undef),
error_check("fun a:b/0().", undef),
MaxArgs = 20,
[true] =
lists:usort([run_many_args(SAs) || SAs <- many_args(MaxArgs)]),
{'EXIT',{{argument_limit,_},_}} =
(catch run_many_args(many_args1(MaxArgs+1))),
check(fun() -> M = lists, F = fun M:reverse/1,
[1,2] = F([2,1]), ok end,
"begin M = lists, F = fun M:reverse/1,"
" [1,2] = F([2,1]), ok end.",
ok),
error_check("{" ?MODULE_STRING ",module_info}().",
{badfun,{?MODULE,module_info}}),
ok.
run_many_args({S, As}) ->
apply(eval_string(S), As) =:= As.
many_args(N) ->
[many_args1(I) || I <- lists:seq(1, N)].
many_args1(N) ->
F = fun(L, P) ->
tl(lists:flatten([","++P++integer_to_list(E) || E <- L]))
end,
L = lists:seq(1, N),
T = F(L, "V"),
S = lists:flatten(io_lib:format("fun(~s) -> [~s] end.", [T, T])),
{S, L}.
do_funs(LFH, EFH) ->
manually with 1000 replaced by 1000000 .
M = atom_to_list(?MODULE),
check(fun() -> F1 = fun(F,N) -> ?MODULE:count_down(F, N) end,
F1(F1, 1000) end,
concat(["begin F1 = fun(F,N) -> ", M,
":count_down(F, N) end, F1(F1,1000) end."]),
0, ['F1'], LFH, EFH),
check(fun() -> F1 = fun(F,N) -> apply(?MODULE,count_down,[F,N])
end, F1(F1, 1000) end,
concat(["begin F1 = fun(F,N) -> apply(", M,
",count_down,[F, N]) end, F1(F1,1000) end."]),
0, ['F1'], LFH, EFH),
check(fun() -> F = fun(F,N) when N > 0 -> apply(F,[F,N-1]);
(_F,0) -> ok end,
F(F, 1000)
end,
"begin F = fun(F,N) when N > 0 -> apply(F,[F,N-1]);"
"(_F,0) -> ok end,"
"F(F, 1000) end.",
ok, ['F'], LFH, EFH),
check(fun() -> F = fun(F,N) when N > 0 ->
apply(erlang,apply,[F,[F,N-1]]);
(_F,0) -> ok end,
F(F, 1000)
end,
"begin F = fun(F,N) when N > 0 ->"
"apply(erlang,apply,[F,[F,N-1]]);"
"(_F,0) -> ok end,"
"F(F, 1000) end.",
ok, ['F'], LFH, EFH),
check(fun() -> F = count_down_fun(),
SF = fun(SF, F1, N) -> F(SF, F1, N) end,
SF(SF, F, 1000) end,
concat(["begin F = ", M, ":count_down_fun(),"
"SF = fun(SF, F1, N) -> F(SF, F1, N) end,"
"SF(SF, F, 1000) end."]),
ok, ['F','SF'], LFH, EFH),
check(fun() -> F = fun(X) -> A = 1+X, {X,A} end,
true = {2,3} == F(2) end,
"begin F = fun(X) -> A = 1+X, {X,A} end,
true = {2,3} == F(2) end.", true, ['F'], LFH, EFH),
check(fun() -> F = fun(X) -> erlang:'+'(X,2) end,
true = 3 == F(1) end,
"begin F = fun(X) -> erlang:'+'(X,2) end,"
" true = 3 == F(1) end.", true, ['F'],
LFH, EFH),
check(fun() -> F = fun(X) -> byte_size(X) end,
?MODULE:do_apply(F,<<"hej">>) end,
concat(["begin F = fun(X) -> size(X) end,",
M,":do_apply(F,<<\"hej\">>) end."]),
3, ['F'], LFH, EFH),
check(fun() -> F1 = fun(X, Z) -> {X,Z} end,
Z = 5,
F2 = fun(X, Y) -> F1(Z,{X,Y}) end,
F3 = fun(X, Y) -> {a,F1(Z,{X,Y})} end,
{5,{x,y}} = F2(x,y),
{a,{5,{y,x}}} = F3(y,x),
{5,{5,y}} = F2(Z,y),
true = {5,{x,5}} == F2(x,Z) end,
"begin F1 = fun(X, Z) -> {X,Z} end,
Z = 5,
F2 = fun(X, Y) -> F1(Z,{X,Y}) end,
F3 = fun(X, Y) -> {a,F1(Z,{X,Y})} end,
{5,{x,y}} = F2(x,y),
{a,{5,{y,x}}} = F3(y,x),
{5,{5,y}} = F2(Z,y),
true = {5,{x,5}} == F2(x,Z) end.",
true, ['F1','Z','F2','F3'], LFH, EFH),
check(fun() -> F = fun(X) -> byte_size(X) end,
F2 = fun(Y) -> F(Y) end,
?MODULE:do_apply(F2,<<"hej">>) end,
concat(["begin F = fun(X) -> size(X) end,",
"F2 = fun(Y) -> F(Y) end,",
M,":do_apply(F2,<<\"hej\">>) end."]),
3, ['F','F2'], LFH, EFH),
check(fun() -> Z = 5, F = fun(X) -> {Z,X} end,
F2 = fun(Z) -> F(Z) end, F2(3) end,
"begin Z = 5, F = fun(X) -> {Z,X} end,
F2 = fun(Z) -> F(Z) end, F2(3) end.",
{5,3},['F','F2','Z'], LFH, EFH),
check(fun() -> F = fun(Z) -> Z end,
F2 = fun(X) -> F(X), Z = {X,X}, Z end,
{1,1} = F2(1), Z = 7, Z end,
"begin F = fun(Z) -> Z end,
F2 = fun(X) -> F(X), Z = {X,X}, Z end,
{1,1} = F2(1), Z = 7, Z end.", 7, ['F','F2','Z'],
LFH, EFH),
check(fun() -> F = fun(F, N) -> [?MODULE:count_down(F,N) || X <-[1]]
end, F(F,2) end,
concat(["begin F = fun(F, N) -> [", M,
":count_down(F,N) || X <-[1]] end, F(F,2) end."]),
[[[0]]], ['F'], LFH, EFH),
ok.
count_down(F, N) when N > 0 ->
F(F, N-1);
count_down(_F, N) ->
N.
count_down_fun() ->
fun(SF,F,N) when N > 0 -> SF(SF,F,N-1);
(_SF,_F,_N) -> ok
end.
do_apply(F, V) ->
F(V).
lfh() ->
{eval, fun(F, As, Bs) -> local_func(F, As, Bs) end}.
local_func(F, As0, Bs0) when is_atom(F) ->
{As,Bs} = erl_eval:expr_list(As0, Bs0, lfh()),
case erlang:function_exported(?MODULE, F, length(As)) of
true ->
{value,apply(?MODULE, F, As),Bs};
false ->
{value,apply(shell_default, F, As),Bs}
end.
lfh_value_extra() ->
{value, fun(F, As, a1, a2) -> local_func_value(F, As) end, [a1, a2]}.
lfh_value() ->
{value, fun(F, As) -> local_func_value(F, As) end}.
local_func_value(F, As) when is_atom(F) ->
case erlang:function_exported(?MODULE, F, length(As)) of
true ->
apply(?MODULE, F, As);
false ->
apply(shell_default, F, As)
end.
efh() ->
{value, fun(F, As) -> external_func(F, As) end}.
ann_efh() ->
{value, fun(_Ann, F, As) -> external_func(F, As) end}.
external_func({M,_}, _As) when M == nix ->
exit({{access_not_allowed,M},[mfa]});
external_func(F, As) when is_function(F) ->
apply(F, As);
external_func({M,F}, As) ->
apply(M, F, As).
try_catch(Config) when is_list(Config) ->
check(fun() -> try 1 of 1 -> 2 catch _:_ -> 3 end end,
"try 1 of 1 -> 2 catch _:_ -> 3 end.", 2),
check(fun() -> try 1 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end end,
"try 1 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end.", 2),
check(fun() -> try 3 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end end,
"try 3 of 1 -> 2; 3 -> 4 catch _:_ -> 5 end.", 4),
check(fun () -> X = try 1 after put(try_catch, 2) end,
{X,get(try_catch)} end,
"begin X = try 1 after put(try_catch, 2) end, "
"{X,get(try_catch)} end.", {1,2}),
check(fun() -> X = try 1 of 1 -> 2 after put(try_catch, 3) end,
{X,get(try_catch)} end,
"begin X = try 1 of 1 -> 2 after put(try_catch, 3) end, "
"{X,get(try_catch)} end.", {2,3}),
check(fun() -> X = try 1 of 1 -> 2; 3 -> 4
after put(try_catch, 5) end,
{X,get(try_catch)} end,
"begin X = try 1 of 1 -> 2; 3 -> 4 "
" after put(try_catch, 5) end, "
" {X,get(try_catch)} end.", {2,5}),
check(fun() -> X = try 3 of 1 -> 2; 3 -> 4
after put(try_catch, 5) end,
{X,get(try_catch)} end,
"begin X = try 3 of 1 -> 2; 3 -> 4 "
" after put(try_catch, 5) end, "
" {X,get(try_catch)} end.", {4,5}),
error_check("try 1 of 2 -> 3 catch _:_ -> 4 end.",
{try_clause,1}),
check(fun () -> {'EXIT',{{try_clause,1},_}} =
begin catch try 1 of 2 -> 3
after put(try_catch, 4) end end,
get(try_catch) end,
"begin {'EXIT',{{try_clause,1},_}} = "
" begin catch try 1 of 2 -> 3 "
" after put(try_catch, 4) end end, "
" get(try_catch) end. ", 4),
check(fun () -> try 1=2 catch error:{badmatch,2} -> 3 end end,
"try 1=2 catch error:{badmatch,2} -> 3 end.", 3),
check(fun () -> try 1=2 of 3 -> 4
catch error:{badmatch,2} -> 5 end end,
"try 1=2 of 3 -> 4 "
"catch error:{badmatch,2} -> 5 end.", 5),
check(fun () -> X = try 1=2
catch error:{badmatch,2} -> 3
after put(try_catch, 4) end,
{X,get(try_catch)} end,
"begin X = try 1=2 "
" catch error:{badmatch,2} -> 3 "
" after put(try_catch, 4) end, "
" {X,get(try_catch)} end. ", {3,4}),
check(fun () -> X = try 1=2 of 3 -> 4
catch error:{badmatch,2} -> 5
after put(try_catch, 6) end,
{X,get(try_catch)} end,
"begin X = try 1=2 of 3 -> 4"
" catch error:{badmatch,2} -> 5 "
" after put(try_catch, 6) end, "
" {X,get(try_catch)} end. ", {5,6}),
error_check("try 1=2 catch error:undefined -> 3 end. ",
{badmatch,2}),
error_check("try 1=2 of 3 -> 4 catch error:undefined -> 5 end. ",
{badmatch,2}),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2
after put(try_catch, 3) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 "
" after put(try_catch, 3) end end, "
" get(try_catch) end. ", 3),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 of 3 -> 4
after put(try_catch, 5) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 of 3 -> 4"
" after put(try_catch, 5) end end, "
" get(try_catch) end. ", 5),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 catch error:undefined -> 3
after put(try_catch, 4) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 catch error:undefined -> 3 "
" after put(try_catch, 4) end end, "
" get(try_catch) end. ", 4),
check(fun () -> {'EXIT',{{badmatch,2},_}} =
begin catch try 1=2 of 3 -> 4
catch error:undefined -> 5
after put(try_catch, 6) end end,
get(try_catch) end,
"begin {'EXIT',{{badmatch,2},_}} = "
" begin catch try 1=2 of 3 -> 4 "
" catch error:undefined -> 5 "
" after put(try_catch, 6) end end, "
" get(try_catch) end. ", 6),
ok.
eval_expr_5(Config) when is_list(Config) ->
{ok,Tokens ,_} =
erl_scan:string("if a+4 == 4 -> yes; true -> no end. "),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
{value, no, []} = erl_eval:expr(Expr, [], none, none, none),
no = erl_eval:expr(Expr, [], none, none, value),
try
erl_eval:expr(Expr, [], none, none, 4711),
function_clause = should_never_reach_here
catch
error:function_clause ->
ok
end.
zero_width(Config) when is_list(Config) ->
check(fun() ->
{'EXIT',{badarg,_}} = (catch <<not_a_number:0>>),
ok
end, "begin {'EXIT',{badarg,_}} = (catch <<not_a_number:0>>), "
"ok end.", ok),
ok.
eep37(Config) when is_list(Config) ->
check(fun () -> (fun _(X) -> X end)(42) end,
"(fun _(X) -> X end)(42).",
42),
check(fun () -> (fun _Id(X) -> X end)(42) end,
"(fun _Id(X) -> X end)(42).", 42),
check(fun () -> is_function((fun Self() -> Self end)(), 0) end,
"is_function((fun Self() -> Self end)(), 0).",
true),
check(fun () ->
F = fun Fact(N) when N > 0 ->
N * Fact(N - 1);
Fact(0) ->
1
end,
F(6)
end,
"(fun Fact(N) when N > 0 -> N * Fact(N - 1); Fact(0) -> 1 end)(6).",
720),
ok.
eep43(Config) when is_list(Config) ->
check(fun () -> #{} end, " #{}.", #{}),
check(fun () -> #{a => b} end, "#{a => b}.", #{a => b}),
check(fun () ->
Map = #{a => b},
{Map#{a := b},Map#{a => c},Map#{d => e}}
end,
"begin "
" Map = #{a => B=b}, "
" {Map#{a := B},Map#{a => c},Map#{d => e}} "
"end.",
{#{a => b},#{a => c},#{a => b,d => e}}),
check(fun () ->
lists:map(fun (X) -> X#{price := 0} end,
[#{hello => 0, price => nil}])
end,
"lists:map(fun (X) -> X#{price := 0} end,
[#{hello => 0, price => nil}]).",
[#{hello => 0, price => 0}]),
check(fun () ->
Map = #{ <<33:333>> => "wat" },
#{ <<33:333>> := "wat" } = Map
end,
"begin "
" Map = #{ <<33:333>> => \"wat\" }, "
" #{ <<33:333>> := \"wat\" } = Map "
"end.",
#{ <<33:333>> => "wat" }),
check(fun () ->
K1 = 1,
K2 = <<42:301>>,
K3 = {3,K2},
Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4},
#{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map
end,
"begin "
" K1 = 1, "
" K2 = <<42:301>>, "
" K3 = {3,K2}, "
" Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4}, "
" #{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map "
"end.",
#{ 1 => 1, <<42:301>> => 2, {3,<<42:301>>} => 3, {2,2} => 4}),
check(fun () ->
X = key,
(fun(#{X := value}) -> true end)(#{X => value})
end,
"begin "
" X = key, "
" (fun(#{X := value}) -> true end)(#{X => value}) "
"end.",
true),
error_check("[camembert]#{}.", {badmap,[camembert]}),
error_check("[camembert]#{nonexisting:=v}.", {badmap,[camembert]}),
error_check("#{} = 1.", {badmatch,1}),
error_check("[]#{a=>error(bad)}.", bad),
error_check("(#{})#{nonexisting:=value}.", {badkey,nonexisting}),
ok.
otp_15035(Config) when is_list(Config) ->
check(fun() ->
fun() when #{} ->
a;
() when #{a => b} ->
b;
() when #{a => b} =:= #{a => b} ->
c
end()
end,
"fun() when #{} ->
a;
() when #{a => b} ->
b;
() when #{a => b} =:= #{a => b} ->
c
end().",
c),
check(fun() ->
F = fun(M) when M#{} ->
a;
(M) when M#{a => b} ->
b;
(M) when M#{a := b} ->
c;
(M) when M#{a := b} =:= M#{a := b} ->
d;
(M) when M#{a => b} =:= M#{a => b} ->
e
end,
{F(#{}), F(#{a => b})}
end,
"fun() ->
F = fun(M) when M#{} ->
a;
(M) when M#{a => b} ->
b;
(M) when M#{a := b} ->
c;
(M) when M#{a := b} =:= M#{a := b} ->
d;
(M) when M#{a => b} =:= M#{a => b} ->
e
end,
{F(#{}), F(#{a => b})}
end().",
{e, d}),
ok.
otp_16439(Config) when is_list(Config) ->
check(fun() -> + - 5 end, "+ - 5.", -5),
check(fun() -> - + - 5 end, "- + - 5.", 5),
check(fun() -> case 7 of - - 7 -> seven end end,
"case 7 of - - 7 -> seven end.", seven),
{ok,Ts,_} = erl_scan:string("- #{}. "),
A = erl_anno:new(1),
{ok,[{op,A,'-',{map,A,[]}}]} = erl_parse:parse_exprs(Ts),
ok.
otp_14708(Config) when is_list(Config) ->
check(fun() -> X = 42, #{{tag,X} := V} = #{{tag,X} => a}, V end,
"begin X = 42, #{{tag,X} := V} = #{{tag,X} => a}, V end.",
a),
check(fun() ->
T = {x,y,z},
Map = #{x => 99, y => 100},
#{element(1, T) := V1, element(2, T) := V2} = Map,
{V1, V2}
end,
"begin
T = {x,y,z},
Map = #{x => 99, y => 100},
#{element(1, T) := V1, element(2, T) := V2} = Map,
{V1, V2}
end.",
{99, 100}),
error_check("#{term_to_binary(42) := _} = #{}.", illegal_guard_expr),
check(fun() ->
<<Sz:16,Body:(Sz-1)/binary>> = <<4:16,1,2,3>>,
Body
end,
"begin
<<Sz:16,Body:(Sz-1)/binary>> = <<4:16,1,2,3>>,
Body
end.",
<<1,2,3>>),
check(fun() ->
Sizes = #{0 => 3, 1 => 7},
<<SzTag:1,Body:(map_get(SzTag, Sizes))/binary>> =
<<1:1,1,2,3,4,5,6,7>>,
Body
end,
"begin
Sizes = #{0 => 3, 1 => 7},
<<SzTag:1,Body:(map_get(SzTag, Sizes))/binary>> =
<<1:1,1,2,3,4,5,6,7>>,
Body
end.",
<<1,2,3,4,5,6,7>>),
error_check("<<X:(process_info(self()))>> = <<>>.", illegal_bitsize),
ok.
otp_16545(Config) when is_list(Config) ->
case eval_string("<<$W/utf16-native>> = <<$W/utf16-native>>.") of
<<$W/utf16-native>> -> ok
end,
case eval_string("<<$W/utf32-native>> = <<$W/utf32-native>>.") of
<<$W/utf32-native>> -> ok
end,
check(fun() -> <<10/unsigned,"fgbz":86>> end,
"<<10/unsigned,\"fgbz\":86>>.",
<<10,0,0,0,0,0,0,0,0,0,1,152,0,0,0,0,0,0,0,0,0,6,112,0,0,
0,0,0,0,0,0,0,24,128,0,0,0,0,0,0,0,0,0,122>>),
check(fun() -> <<"":16/signed>> end,
"<<\"\":16/signed>>.",
<<>>),
error_check("<<\"\":problem/signed>>.", badarg),
ok.
otp_16865(Config) when is_list(Config) ->
check(fun() -> << <<>> || <<34:(1/0)>> <= <<"string">> >> end,
"<< <<>> || <<34:(1/0)>> <= <<\"string\">> >>.",
<<>>),
error_check("<< <<>> || <<>> <= <<1:(-1), (fun() -> a = b end())>> >>.",
{badmatch, b}),
ok.
eep49(Config) when is_list(Config) ->
check(fun() ->
maybe empty end
end,
"maybe empty end.",
empty),
check(fun() ->
maybe ok ?= ok end
end,
"maybe ok ?= ok end.",
ok),
check(fun() ->
maybe {ok,A} ?= {ok,good}, A end
end,
"maybe {ok,A} ?= {ok,good}, A end.",
good),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} end.",
{good,also_good}),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,wrong}, {A,B} end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,wrong}, {A,B} end.",
{error,wrong}),
check(fun() ->
maybe empty else _ -> error end
end,
"maybe empty else _ -> error end.",
empty),
check(fun() ->
maybe ok ?= ok else _ -> error end
end,
"maybe ok ?= ok else _ -> error end.",
ok),
check(fun() ->
maybe ok ?= other else _ -> error end
end,
"maybe ok ?= other else _ -> error end.",
error),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B}
else {error,_} -> error end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {ok,also_good}, {A,B} "
"else {error,_} -> error end.",
{good,also_good}),
check(fun() ->
maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,other}, {A,B}
else {error,_} -> error end
end,
"maybe {ok,A} ?= {ok,good}, {ok,B} ?= {error,other}, {A,B} "
"else {error,_} -> error end.",
error),
error_check("maybe ok ?= simply_wrong else {error,_} -> error end.",
{else_clause,simply_wrong}),
ok.
binary_and_map_aliases(Config) when is_list(Config) ->
check(fun() ->
<<A:16>> = <<B:8,C:8>> = <<16#cafe:16>>,
{A,B,C}
end,
"begin <<A:16>> = <<B:8,C:8>> = <<16#cafe:16>>, {A,B,C} end.",
{16#cafe,16#ca,16#fe}),
check(fun() ->
<<A:8/bits,B:24/bits>> =
<<C:16,D:16>> =
<<E:8,F:8,G:8,H:8>> =
<<16#abcdef57:32>>,
{A,B,C,D,E,F,G,H}
end,
"begin <<A:8/bits,B:24/bits>> =
<<C:16,D:16>> =
<<E:8,F:8,G:8,H:8>> =
<<16#abcdef57:32>>,
{A,B,C,D,E,F,G,H}
end.",
{<<16#ab>>,<<16#cdef57:24>>, 16#abcd,16#ef57, 16#ab,16#cd,16#ef,16#57}),
check(fun() ->
#{K := V} = #{k := K} = #{k => my_key, my_key => 42},
V
end,
"begin #{K := V} = #{k := K} = #{k => my_key, my_key => 42}, V end.",
42),
ok.
EEP 58 : Map comprehensions .
eep58(Config) when is_list(Config) ->
check(fun() -> X = 32, #{X => X*X || X <- [1,2,3]} end,
"begin X = 32, #{X => X*X || X <- [1,2,3]} end.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() ->
K = V = none,
#{K => V*V || K := V <- #{1 => 1, 2 => 2, 3 => 3}}
end,
"begin K = V = none, #{K => V*V || K := V <- #{1 => 1, 2 => 2, 3 => 3}} end.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() ->
#{K => V*V || K := V <- maps:iterator(#{1 => 1, 2 => 2, 3 => 3})}
end,
"#{K => V*V || K := V <- maps:iterator(#{1 => 1, 2 => 2, 3 => 3})}.",
#{1 => 1, 2 => 4, 3 => 9}),
check(fun() -> << <<K:8,V:24>> || K := V <- #{42 => 7777} >> end,
"<< <<K:8,V:24>> || K := V <- #{42 => 7777} >>.",
<<42:8,7777:24>>),
check(fun() -> [X || X := X <- #{a => 1, b => b}] end,
"[X || X := X <- #{a => 1, b => b}].",
[b]),
error_check("[K+V || K := V <- a].", {bad_generator,a}),
error_check("[K+V || K := V <- [-1|#{}]].", {bad_generator,[-1|#{}]}),
ok.
check(F, String, Result) ->
check1(F, String, Result),
FunString = concat(["fun() -> ", no_final_dot(String), " end(). "]),
check1(F, FunString, Result),
CompileString = concat(["hd(lists:map(fun(_) -> ", no_final_dot(String),
" end, [foo])). "]),
check1(F, CompileString, Result).
check1(F, String, Result) ->
Result = F(),
Expr = parse_expr(String),
case catch erl_eval:expr(Expr, []) of
{value, Result, Bs} when is_list(Bs) ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:expr(Expr, #{}) of
{value, Result, MapBs} when is_map(MapBs) ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
check(F, String, Result, BoundVars, LFH, EFH) ->
Result = F(),
Exprs = parse_exprs(String),
case catch erl_eval:exprs(Exprs, [], LFH, EFH) of
{value, Result, Bs} ->
We just assume that Bs is an orddict ...
Keys = orddict:fetch_keys(Bs),
case sort(BoundVars) == sort(Keys) of
true ->
ok;
false ->
ct:fail({check, BoundVars, Keys})
end,
ok;
Other1 ->
ct:fail({check, Other1, Result})
end,
case catch erl_eval:exprs(Exprs, #{}, LFH, EFH) of
{value, Result, MapBs} ->
MapKeys = maps:keys(MapBs),
case sort(BoundVars) == sort(MapKeys) of
true ->
ok;
false ->
ct:fail({check, BoundVars, MapKeys})
end,
ok;
Other2 ->
ct:fail({check, Other2, Result})
end.
error_check(String, Result) ->
Expr = parse_expr(String),
case catch erl_eval:expr(Expr, []) of
{'EXIT', {Result,_}} ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:expr(Expr, #{}) of
{'EXIT', {Result,_}} ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
error_check(String, Result, LFH, EFH) ->
Exprs = parse_exprs(String),
case catch erl_eval:exprs(Exprs, [], LFH, EFH) of
{'EXIT', {Result,_}} ->
ok;
Other1 ->
ct:fail({eval, Other1, Result})
end,
case catch erl_eval:exprs(Exprs, #{}, LFH, EFH) of
{'EXIT', {Result,_}} ->
ok;
Other2 ->
ct:fail({eval, Other2, Result})
end.
backtrace_check(String, Result, Backtrace) ->
case catch parse_and_run(String) of
{'EXIT', {Result, BT}} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
backtrace_check(String, Result, Backtrace, LFH, EFH) ->
case catch parse_and_run(String, LFH, EFH) of
{'EXIT', {Result, BT}} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
remove_error_info([{M, F, As, Info} | T]) ->
[{M, F, As, lists:keydelete(error_info, 1, Info)} | T].
backtrace_catch(String, Result, Backtrace) ->
case parse_and_run(String) of
{value, {'EXIT', {Result, BT}}, _Bindings} ->
check_backtrace(Backtrace, remove_error_info(BT));
Other ->
ct:fail({eval, Other, Result})
end.
error_info_catch(String, Result) ->
case catch parse_and_run(String) of
{'EXIT', {Result, [{_, _, _, Info}|_]}} ->
lists:keyfind(error_info, 1, Info);
Other ->
ct:fail({eval, Other, Result})
end.
check_backtrace([B1|Backtrace], [B2|BT]) ->
case {B1, B2} of
{M, {M,_,_,_}} ->
ok;
{{M,F,A}, {M,F,A,_}} ->
ok;
{B, B} ->
ok
end,
check_backtrace(Backtrace, BT);
check_backtrace([], _) ->
ok.
eval_string(String) ->
{value, Result, _} = parse_and_run(String),
Result.
parse_expr(String) ->
Tokens = erl_scan_string(String),
{ok, [Expr]} = erl_parse:parse_exprs(Tokens),
Expr.
parse_exprs(String) ->
Tokens = erl_scan_string(String),
{ok, Exprs} = erl_parse:parse_exprs(Tokens),
Exprs.
erl_scan_string(String) ->
FIXME : When the experimental features EEP has been implemented , we should
ResWordFun =
fun('maybe') -> true;
('else') -> true;
(Other) -> erl_scan:reserved_word(Other)
end,
{ok,Tokens,_} = erl_scan:string(String, 1, [{reserved_word_fun,ResWordFun}]),
Tokens.
parse_and_run(String) ->
erl_eval:expr(parse_expr(String), []).
parse_and_run(String, LFH, EFH) ->
erl_eval:exprs(parse_exprs(String), [], LFH, EFH).
no_final_dot(S) ->
case lists:reverse(S) of
" ." ++ R -> lists:reverse(R);
"." ++ R -> lists:reverse(R);
_ -> S
end.
|
3a8287876275533754576b852fe1965dc06f77b490f8173d101f63dd5c9d4103 | korya/efuns | testmain.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
$ I d : testmain.ml , v 1.1 1999/11/22 10:36:01 lefessan Exp $
(* The lexer generator. Command-line parsing. *)
#open "syntax";;
#open "testscanner";;
#open "grammar";;
#open "lexgen";;
#open "output";;
let main () =
ic := stdin;
oc := stdout;
let lexbuf = lexing.from_channel ic in
let (Lexdef(header,_) as def) =
try
grammar.lexer_definition testscanner.main lexbuf
with
parsing.Parse_error x ->
prerr_string "Syntax error around char ";
prerr_int (lexing.lexeme_start lexbuf);
prerr_endline ".";
sys.exit 2
| scan_aux.Lexical_error s ->
prerr_string "Lexical error around char ";
prerr_int (lexing.lexeme_start lexbuf);
prerr_string ": ";
prerr_string s;
prerr_endline ".";
sys.exit 2 in
let ((init, states, acts) as dfa) = make_dfa def in
output_lexdef header dfa
;;
main(); sys.exit 0
;;
| null | https://raw.githubusercontent.com/korya/efuns/78b21d9dff45b7eec764c63132c7a564f5367c30/inliner/perf/Lex/testmain.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
The lexer generator. Command-line parsing. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
$ I d : testmain.ml , v 1.1 1999/11/22 10:36:01 lefessan Exp $
#open "syntax";;
#open "testscanner";;
#open "grammar";;
#open "lexgen";;
#open "output";;
let main () =
ic := stdin;
oc := stdout;
let lexbuf = lexing.from_channel ic in
let (Lexdef(header,_) as def) =
try
grammar.lexer_definition testscanner.main lexbuf
with
parsing.Parse_error x ->
prerr_string "Syntax error around char ";
prerr_int (lexing.lexeme_start lexbuf);
prerr_endline ".";
sys.exit 2
| scan_aux.Lexical_error s ->
prerr_string "Lexical error around char ";
prerr_int (lexing.lexeme_start lexbuf);
prerr_string ": ";
prerr_string s;
prerr_endline ".";
sys.exit 2 in
let ((init, states, acts) as dfa) = make_dfa def in
output_lexdef header dfa
;;
main(); sys.exit 0
;;
|
54e438b313b31a7f19283fde36d7108830f2bba88f5d986ccb3e3a345b24b311 | agrafix/users | Definitions.hs | {-# LANGUAGE EmptyDataDecls #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
module Web.Users.Persistent.Definitions where
import Database.Persist.TH
import Data.Time.Clock
import Data.Typeable
import qualified Data.Text as T
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Login
createdAt UTCTime
username T.Text
email T.Text
password T.Text
active Bool
UniqueUsername username
UniqueEmail email
deriving Show
deriving Eq
deriving Typeable
LoginToken
token T.Text
tokenType T.Text
createdAt UTCTime
validUntil UTCTime
owner LoginId
UniqueToken token
UniqueTypedToken token tokenType
deriving Show
deriving Eq
deriving Typeable
|]
| null | https://raw.githubusercontent.com/agrafix/users/55fb8571aa6a6a0a20eb696635734defb09f0da1/users-persistent/src/Web/Users/Persistent/Definitions.hs | haskell | # LANGUAGE EmptyDataDecls #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE TypeSynonymInstances # | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module Web.Users.Persistent.Definitions where
import Database.Persist.TH
import Data.Time.Clock
import Data.Typeable
import qualified Data.Text as T
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Login
createdAt UTCTime
username T.Text
email T.Text
password T.Text
active Bool
UniqueUsername username
UniqueEmail email
deriving Show
deriving Eq
deriving Typeable
LoginToken
token T.Text
tokenType T.Text
createdAt UTCTime
validUntil UTCTime
owner LoginId
UniqueToken token
UniqueTypedToken token tokenType
deriving Show
deriving Eq
deriving Typeable
|]
|
ec5a88f1ca04557201c1f91e924f872895aad4bb79243ce7918a450b554b9db6 | SamB/coq | cic2acic.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
// * The HELM Project / The EU MoWGLI Project
* University of Bologna
(************************************************************************)
(* This file is distributed under the terms of the *)
GNU Lesser General Public License Version 2.1
(* *)
Copyright ( C ) 2000 - 2004 , HELM Team .
(* *)
(************************************************************************)
(* Utility Functions *)
exception TwoModulesWhoseDirPathIsOneAPrefixOfTheOther;;
let get_module_path_of_section_path path =
let dirpath = fst (Libnames.repr_path path) in
let modules = Lib.library_dp () :: (Library.loaded_libraries ()) in
match
List.filter
(function modul -> Libnames.is_dirpath_prefix_of modul dirpath) modules
with
[] ->
Pp.warning ("Modules not supported: reference to "^
Libnames.string_of_path path^" will be wrong");
dirpath
| [modul] -> modul
| _ ->
raise TwoModulesWhoseDirPathIsOneAPrefixOfTheOther
;;
CSC : Problem : here we are using the wrong ( ? ? ? ) hypothesis that there do
CSC : not exist two modules whose dir_paths are one a prefix of the other
let remove_module_dirpath_from_dirpath ~basedir dir =
let module Ln = Libnames in
if Ln.is_dirpath_prefix_of basedir dir then
let ids = Names.repr_dirpath dir in
let rec remove_firsts n l =
match n,l with
(0,l) -> l
| (n,he::tl) -> remove_firsts (n-1) tl
| _ -> assert false
in
let ids' =
List.rev
(remove_firsts
(List.length (Names.repr_dirpath basedir))
(List.rev ids))
in
ids'
else Names.repr_dirpath dir
;;
let get_uri_of_var v pvars =
let module D = Decls in
let module N = Names in
let rec search_in_open_sections =
function
[] -> Util.error ("Variable "^v^" not found")
| he::tl as modules ->
let dirpath = N.make_dirpath modules in
if List.mem (N.id_of_string v) (D.last_section_hyps dirpath) then
modules
else
search_in_open_sections tl
in
let path =
if List.mem v pvars then
[]
else
search_in_open_sections (N.repr_dirpath (Lib.cwd ()))
in
"cic:" ^
List.fold_left
(fun i x -> "/" ^ N.string_of_id x ^ i) "" path
;;
type tag =
Constant of Names.constant
| Inductive of Names.kernel_name
| Variable of Names.kernel_name
;;
type etag =
TConstant
| TInductive
| TVariable
;;
let etag_of_tag =
function
Constant _ -> TConstant
| Inductive _ -> TInductive
| Variable _ -> TVariable
let ext_of_tag =
function
TConstant -> "con"
| TInductive -> "ind"
| TVariable -> "var"
;;
exception FunctorsXMLExportationNotImplementedYet;;
let subtract l1 l2 =
let l1' = List.rev (Names.repr_dirpath l1) in
let l2' = List.rev (Names.repr_dirpath l2) in
let rec aux =
function
he::tl when tl = l2' -> [he]
| he::tl -> he::(aux tl)
| [] -> assert (l2' = []) ; []
in
Names.make_dirpath (List.rev (aux l1'))
;;
CSC : Dead code to be removed
let token_list_of_kernel_name ~keep_sections kn tag =
let module N = Names in
let ( modpath , dirpath , label ) = Names.repr_kn kn in
let
List.rev_map N.string_of_id ( ) in
let rec token_list_of_modpath =
function
N.MPdot ( path , label ) - >
token_list_of_modpath path @ [ N.string_of_label label ]
| N.MPfile dirpath - > token_list_of_dirpath dirpath
| N.MPself self - >
if self = then
[ " Top " ]
else
let module_path =
let f = N.string_of_id ( N.id_of_msid self ) in
let _ , longf =
System.find_file_in_path ( Library.get_load_path ( ) ) ( f^".v " ) in
let ldir0 = Library.find_logical_path ( Filename.dirname longf ) in
let i d = Names.id_of_string ( Filename.basename f ) in
Libnames.extend_dirpath ldir0 i d
in
token_list_of_dirpath module_path
| N.MPbound _ - > raise FunctorsXMLExportationNotImplementedYet
in
token_list_of_modpath modpath @
( if keep_sections then token_list_of_dirpath dirpath else [ ] ) @
[ N.string_of_label label ^ " . " ^ ( ext_of_tag tag ) ]
; ;
let token_list_of_kernel_name ~keep_sections kn tag =
let module N = Names in
let (modpath,dirpath,label) = Names.repr_kn kn in
let token_list_of_dirpath dirpath =
List.rev_map N.string_of_id (N.repr_dirpath dirpath) in
let rec token_list_of_modpath =
function
N.MPdot (path,label) ->
token_list_of_modpath path @ [N.string_of_label label]
| N.MPfile dirpath -> token_list_of_dirpath dirpath
| N.MPself self ->
if self = Names.initial_msid then
[ "Top" ]
else
let module_path =
let f = N.string_of_id (N.id_of_msid self) in
let _,longf =
System.find_file_in_path (Library.get_load_path ()) (f^".v") in
let ldir0 = Library.find_logical_path (Filename.dirname longf) in
let id = Names.id_of_string (Filename.basename f) in
Libnames.extend_dirpath ldir0 id
in
token_list_of_dirpath module_path
| N.MPbound _ -> raise FunctorsXMLExportationNotImplementedYet
in
token_list_of_modpath modpath @
(if keep_sections then token_list_of_dirpath dirpath else []) @
[N.string_of_label label ^ "." ^ (ext_of_tag tag)]
;;
*)
let token_list_of_path dir id tag =
let module N = Names in
let token_list_of_dirpath dirpath =
List.rev_map N.string_of_id (N.repr_dirpath dirpath) in
token_list_of_dirpath dir @ [N.string_of_id id ^ "." ^ (ext_of_tag tag)]
let token_list_of_kernel_name tag =
let module N = Names in
let module LN = Libnames in
let id,dir = match tag with
| Variable kn ->
N.id_of_label (N.label kn), Lib.cwd ()
| Constant con ->
N.id_of_label (N.con_label con),
Lib.remove_section_part (LN.ConstRef con)
| Inductive kn ->
N.id_of_label (N.label kn),
Lib.remove_section_part (LN.IndRef (kn,0))
in
token_list_of_path dir id (etag_of_tag tag)
;;
let uri_of_kernel_name tag =
let tokens = token_list_of_kernel_name tag in
"cic:/" ^ String.concat "/" tokens
let uri_of_declaration id tag =
let module LN = Libnames in
let dir = LN.extract_dirpath_prefix (Lib.sections_depth ()) (Lib.cwd ()) in
let tokens = token_list_of_path dir id tag in
"cic:/" ^ String.concat "/" tokens
Special functions for handling of CCorn 's CProp " sort "
type sort =
Coq_sort of Term.sorts_family
| CProp
;;
let prerr_endline _ = ();;
let family_of_term ty =
match Term.kind_of_term ty with
Term.Sort s -> Coq_sort (Term.family_of_sort s)
I could check that the constant is CProp
| _ -> Util.anomaly "family_of_term"
;;
module CPropRetyping =
struct
module T = Term
let outsort env sigma t =
family_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma t)
let rec subst_type env sigma typ = function
| [] -> typ
| h::rest ->
match T.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma typ) with
| T.Prod (na,c1,c2) -> subst_type env sigma (T.subst1 h c2) rest
| _ -> Util.anomaly "Non-functional construction"
let sort_of_atomic_type env sigma ft args =
let rec concl_of_arity env ar =
match T.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma ar) with
| T.Prod (na, t, b) -> concl_of_arity (Environ.push_rel (na,None,t) env) b
| T.Sort s -> Coq_sort (T.family_of_sort s)
| _ -> outsort env sigma (subst_type env sigma ft (Array.to_list args))
in concl_of_arity env ft
let typeur sigma metamap =
let rec type_of env cstr=
match Term.kind_of_term cstr with
| T.Meta n ->
(try T.strip_outer_cast (List.assoc n metamap)
with Not_found -> Util.anomaly "type_of: this is not a well-typed term")
| T.Rel n ->
let (_,_,ty) = Environ.lookup_rel n env in
T.lift n ty
| T.Var id ->
(try
let (_,_,ty) = Environ.lookup_named id env in
ty
with Not_found ->
Util.anomaly ("type_of: variable "^(Names.string_of_id id)^" unbound"))
| T.Const c ->
let cb = Environ.lookup_constant c env in
Typeops.type_of_constant_type env (cb.Declarations.const_type)
| T.Evar ev -> Evd.existential_type sigma ev
| T.Ind ind -> Inductiveops.type_of_inductive env ind
| T.Construct cstr -> Inductiveops.type_of_constructor env cstr
| T.Case (_,p,c,lf) ->
let Inductiveops.IndType(_,realargs) =
try Inductiveops.find_rectype env sigma (type_of env c)
with Not_found -> Util.anomaly "type_of: Bad recursive type" in
let t = Reductionops.whd_beta (T.applist (p, realargs)) in
(match Term.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma (type_of env t)) with
| T.Prod _ -> Reductionops.whd_beta (T.applist (t, [c]))
| _ -> t)
| T.Lambda (name,c1,c2) ->
T.mkProd (name, c1, type_of (Environ.push_rel (name,None,c1) env) c2)
| T.LetIn (name,b,c1,c2) ->
T.subst1 b (type_of (Environ.push_rel (name,Some b,c1) env) c2)
| T.Fix ((_,i),(_,tys,_)) -> tys.(i)
| T.CoFix (i,(_,tys,_)) -> tys.(i)
| T.App(f,args)->
T.strip_outer_cast
(subst_type env sigma (type_of env f) (Array.to_list args))
| T.Cast (c,_, t) -> t
| T.Sort _ | T.Prod _ ->
match sort_of env cstr with
Coq_sort T.InProp -> T.mkProp
| Coq_sort T.InSet -> T.mkSet
| Coq_sort T.InType -> T.mkType Univ.type1_univ (* ERROR HERE *)
| CProp -> T.mkConst DoubleTypeInference.cprop
and sort_of env t =
match Term.kind_of_term t with
| T.Cast (c,_, s) when T.isSort s -> family_of_term s
| T.Sort (T.Prop c) -> Coq_sort T.InType
| T.Sort (T.Type u) -> Coq_sort T.InType
| T.Prod (name,t,c2) ->
(match sort_of env t,sort_of (Environ.push_rel (name,None,t) env) c2 with
| _, (Coq_sort T.InProp as s) -> s
| Coq_sort T.InProp, (Coq_sort T.InSet as s)
| Coq_sort T.InSet, (Coq_sort T.InSet as s) -> s
| Coq_sort T.InType, (Coq_sort T.InSet as s)
| CProp, (Coq_sort T.InSet as s) when
Environ.engagement env = Some Declarations.ImpredicativeSet -> s
| Coq_sort T.InType, Coq_sort T.InSet
| CProp, Coq_sort T.InSet -> Coq_sort T.InType
| _, (Coq_sort T.InType as s) -> s (*Type Univ.dummy_univ*)
| _, (CProp as s) -> s)
| T.App(f,args) -> sort_of_atomic_type env sigma (type_of env f) args
| T.Lambda _ | T.Fix _ | T.Construct _ ->
Util.anomaly "sort_of: Not a type (1)"
| _ -> outsort env sigma (type_of env t)
and sort_family_of env t =
match T.kind_of_term t with
| T.Cast (c,_, s) when T.isSort s -> family_of_term s
| T.Sort (T.Prop c) -> Coq_sort T.InType
| T.Sort (T.Type u) -> Coq_sort T.InType
| T.Prod (name,t,c2) -> sort_family_of (Environ.push_rel (name,None,t) env) c2
| T.App(f,args) ->
sort_of_atomic_type env sigma (type_of env f) args
| T.Lambda _ | T.Fix _ | T.Construct _ ->
Util.anomaly "sort_of: Not a type (1)"
| _ -> outsort env sigma (type_of env t)
in type_of, sort_of, sort_family_of
let get_type_of env sigma c = let f,_,_ = typeur sigma [] in f env c
let get_sort_family_of env sigma c = let _,_,f = typeur sigma [] in f env c
end
;;
let get_sort_family_of env evar_map ty =
CPropRetyping.get_sort_family_of env evar_map ty
;;
let type_as_sort env evar_map ty =
(* CCorn code *)
family_of_term (DoubleTypeInference.whd_betadeltaiotacprop env evar_map ty)
;;
let is_a_Prop =
function
"Prop"
| "CProp" -> true
| _ -> false
;;
(* Main Functions *)
type anntypes =
{annsynthesized : Acic.aconstr ; annexpected : Acic.aconstr option}
;;
let gen_id seed =
let res = "i" ^ string_of_int !seed in
incr seed ;
res
;;
let fresh_id seed ids_to_terms constr_to_ids ids_to_father_ids =
fun father t ->
let res = gen_id seed in
Hashtbl.add ids_to_father_ids res father ;
Hashtbl.add ids_to_terms res t ;
Acic.CicHash.add constr_to_ids t res ;
res
;;
let source_id_of_id id = "#source#" ^ id;;
let acic_of_cic_context' computeinnertypes seed ids_to_terms constr_to_ids
ids_to_father_ids ids_to_inner_sorts ids_to_inner_types
pvars ?(fake_dependent_products=false) env idrefs evar_map t expectedty
=
let module D = DoubleTypeInference in
let module E = Environ in
let module N = Names in
let module A = Acic in
let module T = Term in
let fresh_id' = fresh_id seed ids_to_terms constr_to_ids ids_to_father_ids in
CSC : do you have any reasonable substitute for 503 ?
let terms_to_types = Acic.CicHash.create 503 in
D.double_type_of env evar_map t expectedty terms_to_types ;
let rec aux computeinnertypes father passed_lambdas_or_prods_or_letins env
idrefs ?(subst=None,[]) tt
=
let fresh_id'' = fresh_id' father tt in
let aux' = aux computeinnertypes (Some fresh_id'') [] in
let string_of_sort_family =
function
Coq_sort T.InProp -> "Prop"
| Coq_sort T.InSet -> "Set"
| Coq_sort T.InType -> "Type"
| CProp -> "CProp"
in
let string_of_sort t =
string_of_sort_family
(type_as_sort env evar_map t)
in
let ainnertypes,innertype,innersort,expected_available =
let {D.synthesized = synthesized; D.expected = expected} =
if computeinnertypes then
try
Acic.CicHash.find terms_to_types tt
with _ ->
CSC : Warning : it really happens , for example in Ring_theory ! ! !
Pp.ppnl (Pp.(++) (Pp.str "BUG: this subterm was not visited during the double-type-inference: ") (Printer.pr_lconstr tt)) ; assert false
else
We are already in an inner - type and Coscoy 's double
(* type inference algorithm has not been applied. *)
(* We need to refresh the universes because we are doing *)
(* type inference on an already inferred type. *)
{D.synthesized =
Reductionops.nf_beta
(CPropRetyping.get_type_of env evar_map
(Termops.refresh_universes tt)) ;
D.expected = None}
in
Debugging only :
print_endline " : " ; flush stdout ;
Pp.ppnl ( Printer.pr_lconstr tt ) ; flush stdout ;
print_endline " : " ; flush stdout ;
Pp.ppnl ( Printer.pr_lconstr synthesized ) ; flush stdout ;
print_endline " ENVIRONMENT : " ; flush stdout ;
Pp.ppnl ( Printer.pr_context_of env ) ; flush stdout ;
print_endline " FINE_ENVIRONMENT " ; flush stdout ;
print_endline "TERMINE:" ; flush stdout ;
Pp.ppnl (Printer.pr_lconstr tt) ; flush stdout ;
print_endline "TIPO:" ; flush stdout ;
Pp.ppnl (Printer.pr_lconstr synthesized) ; flush stdout ;
print_endline "ENVIRONMENT:" ; flush stdout ;
Pp.ppnl (Printer.pr_context_of env) ; flush stdout ;
print_endline "FINE_ENVIRONMENT" ; flush stdout ;
*)
let innersort =
let synthesized_innersort =
get_sort_family_of env evar_map synthesized
in
match expected with
None -> synthesized_innersort
| Some ty ->
let expected_innersort =
get_sort_family_of env evar_map ty
in
match expected_innersort, synthesized_innersort with
CProp, _
| _, CProp -> CProp
| _, _ -> expected_innersort
in
Debugging only :
print_endline " PASSATO " ; flush stdout ;
print_endline "PASSATO" ; flush stdout ;
*)
let ainnertypes,expected_available =
if computeinnertypes then
let annexpected,expected_available =
match expected with
None -> None,false
| Some expectedty' ->
Some (aux false (Some fresh_id'') [] env idrefs expectedty'),
true
in
Some
{annsynthesized =
aux false (Some fresh_id'') [] env idrefs synthesized ;
annexpected = annexpected
}, expected_available
else
None,false
in
ainnertypes,synthesized, string_of_sort_family innersort,
expected_available
in
let add_inner_type id =
match ainnertypes with
None -> ()
| Some ainnertypes -> Hashtbl.add ids_to_inner_types id ainnertypes
in
(* explicit_substitute_and_eta_expand_if_required h t t' *)
(* where [t] = [] and [tt] = [h]{[t']} ("{.}" denotes explicit *)
(* named substitution) or [tt] = (App [h]::[t]) (and [t'] = []) *)
(* check if [h] is a term that requires an explicit named *)
substitution and , in that case , uses the first arguments of
(* [t] as the actual arguments of the substitution. If there *)
(* are not enough parameters in the list [t], then eta-expansion *)
(* is performed. *)
let
explicit_substitute_and_eta_expand_if_required h t t'
compute_result_if_eta_expansion_not_required
=
let subst,residual_args,uninst_vars =
let variables,basedir =
try
let g = Libnames.global_of_constr h in
let sp =
match g with
Libnames.ConstructRef ((induri,_),_)
| Libnames.IndRef (induri,_) ->
Nametab.sp_of_global (Libnames.IndRef (induri,0))
| Libnames.VarRef id ->
Invariant : variables are never cooked in Coq
raise Not_found
| _ -> Nametab.sp_of_global g
in
Dischargedhypsmap.get_discharged_hyps sp,
get_module_path_of_section_path sp
with Not_found ->
(* no explicit substitution *)
[], Libnames.dirpath_of_string "dummy"
in
returns a triple whose first element is
(* an explicit named substitution of "type" *)
(* (variable * argument) list, whose *)
second element is the list of residual
arguments and whose third argument is
(* the list of uninstantiated variables *)
let rec get_explicit_subst variables arguments =
match variables,arguments with
[],_ -> [],arguments,[]
| _,[] -> [],[],variables
| he1::tl1,he2::tl2 ->
let subst,extra_args,uninst = get_explicit_subst tl1 tl2 in
let (he1_sp, he1_id) = Libnames.repr_path he1 in
let he1' = remove_module_dirpath_from_dirpath ~basedir he1_sp in
let he1'' =
String.concat "/"
(List.map Names.string_of_id (List.rev he1')) ^ "/"
^ (Names.string_of_id he1_id) ^ ".var"
in
(he1'',he2)::subst, extra_args, uninst
in
get_explicit_subst variables t'
in
let uninst_vars_length = List.length uninst_vars in
if uninst_vars_length > 0 then
(* Not enough arguments provided. We must eta-expand! *)
let un_args,_ =
T.decompose_prod_n uninst_vars_length
(CPropRetyping.get_type_of env evar_map tt)
in
let eta_expanded =
let arguments =
List.map (T.lift uninst_vars_length) t @
Termops.rel_list 0 uninst_vars_length
in
Unshare.unshare
(T.lamn uninst_vars_length un_args
(T.applistc h arguments))
in
D.double_type_of env evar_map eta_expanded
None terms_to_types ;
Hashtbl.remove ids_to_inner_types fresh_id'' ;
aux' env idrefs eta_expanded
else
compute_result_if_eta_expansion_not_required subst residual_args
in
(* Now that we have all the auxiliary functions we *)
(* can finally proceed with the main case analysis. *)
match T.kind_of_term tt with
T.Rel n ->
let id =
match List.nth (E.rel_context env) (n - 1) with
(N.Name id,_,_) -> id
| (N.Anonymous,_,_) -> Nameops.make_ident "_" None
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.ARel (fresh_id'', n, List.nth idrefs (n-1), id)
| T.Var id ->
let path = get_uri_of_var (N.string_of_id id) pvars in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.AVar
(fresh_id'', path ^ "/" ^ (N.string_of_id id) ^ ".var")
| T.Evar (n,l) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.AEvar
(fresh_id'', n, Array.to_list (Array.map (aux' env idrefs) l))
| T.Meta _ -> Util.anomaly "Meta met during exporting to XML"
| T.Sort s -> A.ASort (fresh_id'', s)
| T.Cast (v,_, t) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
A.ACast (fresh_id'', aux' env idrefs v, aux' env idrefs t)
| T.Prod (n,s,t) ->
let n' =
match n with
N.Anonymous -> N.Anonymous
| _ ->
if not fake_dependent_products && T.noccurn 1 t then
N.Anonymous
else
N.Name
(Nameops.next_name_away n (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id''
(string_of_sort innertype) ;
let sourcetype = CPropRetyping.get_type_of env evar_map s in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort sourcetype) ;
let new_passed_prods =
let father_is_prod =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.Prod _ -> true
| _ -> false
in
(fresh_id'', n', aux' env idrefs s)::
(if father_is_prod then
passed_lambdas_or_prods_or_letins
else [])
in
let new_env = E.push_rel (n', None, s) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term t with
T.Prod _ ->
aux computeinnertypes (Some fresh_id'') new_passed_prods
new_env new_idrefs t
| _ ->
A.AProds (new_passed_prods, aux' new_env new_idrefs t))
| T.Lambda (n,s,t) ->
let n' =
match n with
N.Anonymous -> N.Anonymous
| _ ->
N.Name (Nameops.next_name_away n (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
let sourcetype = CPropRetyping.get_type_of env evar_map s in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort sourcetype) ;
let father_is_lambda =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.Lambda _ -> true
| _ -> false
in
if is_a_Prop innersort &&
((not father_is_lambda) || expected_available)
then add_inner_type fresh_id'' ;
let new_passed_lambdas =
(fresh_id'',n', aux' env idrefs s)::
(if father_is_lambda then
passed_lambdas_or_prods_or_letins
else []) in
let new_env = E.push_rel (n', None, s) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term t with
T.Lambda _ ->
aux computeinnertypes (Some fresh_id'') new_passed_lambdas
new_env new_idrefs t
| _ ->
let t' = aux' new_env new_idrefs t in
(* eta-expansion for explicit named substitutions *)
can create nested Lambdas . Here we perform the
(* flattening. *)
match t' with
A.ALambdas (lambdas, t'') ->
A.ALambdas (lambdas@new_passed_lambdas, t'')
| _ ->
A.ALambdas (new_passed_lambdas, t')
)
| T.LetIn (n,s,t,d) ->
let id =
match n with
N.Anonymous -> N.id_of_string "_X"
| N.Name id -> id
in
let n' =
N.Name (Nameops.next_ident_away id (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
let sourcesort =
get_sort_family_of env evar_map
(CPropRetyping.get_type_of env evar_map s)
in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort_family sourcesort) ;
let father_is_letin =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.LetIn _ -> true
| _ -> false
in
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let new_passed_letins =
(fresh_id'',n', aux' env idrefs s)::
(if father_is_letin then
passed_lambdas_or_prods_or_letins
else []) in
let new_env = E.push_rel (n', Some s, t) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term d with
T.LetIn _ ->
aux computeinnertypes (Some fresh_id'') new_passed_letins
new_env new_idrefs d
| _ -> A.ALetIns
(new_passed_letins, aux' new_env new_idrefs d))
| T.App (h,t) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let
compute_result_if_eta_expansion_not_required subst residual_args
=
let residual_args_not_empty = residual_args <> [] in
let h' =
if residual_args_not_empty then
aux' env idrefs ~subst:(None,subst) h
else
aux' env idrefs ~subst:(Some fresh_id'',subst) h
in
(* maybe all the arguments were used for the explicit *)
(* named substitution *)
if residual_args_not_empty then
A.AApp (fresh_id'', h'::residual_args)
else
h'
in
let t' =
Array.fold_right (fun x i -> (aux' env idrefs x)::i) t []
in
explicit_substitute_and_eta_expand_if_required h
(Array.to_list t) t'
compute_result_if_eta_expansion_not_required
| T.Const kn ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
let compute_result_if_eta_expansion_not_required _ _ =
A.AConst (fresh_id'', subst, (uri_of_kernel_name (Constant kn)))
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Ind (kn,i) ->
let compute_result_if_eta_expansion_not_required _ _ =
A.AInd (fresh_id'', subst, (uri_of_kernel_name (Inductive kn)), i)
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Construct ((kn,i),j) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
let compute_result_if_eta_expansion_not_required _ _ =
A.AConstruct
(fresh_id'', subst, (uri_of_kernel_name (Inductive kn)), i, j)
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Case ({T.ci_ind=(kn,i)},ty,term,a) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let a' =
Array.fold_right (fun x i -> (aux' env idrefs x)::i) a []
in
A.ACase
(fresh_id'', (uri_of_kernel_name (Inductive kn)), i,
aux' env idrefs ty, aux' env idrefs term, a')
| T.Fix ((ai,i),(f,t,b)) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then add_inner_type fresh_id'' ;
let fresh_idrefs =
Array.init (Array.length t) (function _ -> gen_id seed) in
let new_idrefs =
(List.rev (Array.to_list fresh_idrefs)) @ idrefs
in
let f' =
let ids = ref (Termops.ids_of_context env) in
Array.map
(function
N.Anonymous -> Util.error "Anonymous fix function met"
| N.Name id as n ->
let res = N.Name (Nameops.next_name_away n !ids) in
ids := id::!ids ;
res
) f
in
A.AFix (fresh_id'', i,
Array.fold_right
(fun (id,fi,ti,bi,ai) i ->
let fi' =
match fi with
N.Name fi -> fi
| N.Anonymous -> Util.error "Anonymous fix function met"
in
(id, fi', ai,
aux' env idrefs ti,
aux' (E.push_rec_types (f',t,b) env) new_idrefs bi)::i)
(Array.mapi
(fun j x -> (fresh_idrefs.(j),x,t.(j),b.(j),ai.(j))) f'
) []
)
| T.CoFix (i,(f,t,b)) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then add_inner_type fresh_id'' ;
let fresh_idrefs =
Array.init (Array.length t) (function _ -> gen_id seed) in
let new_idrefs =
(List.rev (Array.to_list fresh_idrefs)) @ idrefs
in
let f' =
let ids = ref (Termops.ids_of_context env) in
Array.map
(function
N.Anonymous -> Util.error "Anonymous fix function met"
| N.Name id as n ->
let res = N.Name (Nameops.next_name_away n !ids) in
ids := id::!ids ;
res
) f
in
A.ACoFix (fresh_id'', i,
Array.fold_right
(fun (id,fi,ti,bi) i ->
let fi' =
match fi with
N.Name fi -> fi
| N.Anonymous -> Util.error "Anonymous fix function met"
in
(id, fi',
aux' env idrefs ti,
aux' (E.push_rec_types (f',t,b) env) new_idrefs bi)::i)
(Array.mapi
(fun j x -> (fresh_idrefs.(j),x,t.(j),b.(j)) ) f'
) []
)
in
aux computeinnertypes None [] env idrefs t
;;
let acic_of_cic_context metasenv context t =
let ids_to_terms = Hashtbl.create 503 in
let constr_to_ids = Acic.CicHash.create 503 in
let ids_to_father_ids = Hashtbl.create 503 in
let ids_to_inner_sorts = Hashtbl.create 503 in
let ids_to_inner_types = Hashtbl.create 503 in
let seed = ref 0 in
acic_of_cic_context' true seed ids_to_terms constr_to_ids ids_to_father_ids
ids_to_inner_sorts ids_to_inner_types metasenv context t,
ids_to_terms, ids_to_father_ids, ids_to_inner_sorts, ids_to_inner_types
;;
let acic_object_of_cic_object pvars sigma obj =
let module A = Acic in
let ids_to_terms = Hashtbl.create 503 in
let constr_to_ids = Acic.CicHash.create 503 in
let ids_to_father_ids = Hashtbl.create 503 in
let ids_to_inner_sorts = Hashtbl.create 503 in
let ids_to_inner_types = Hashtbl.create 503 in
let ids_to_conjectures = Hashtbl.create 11 in
let ids_to_hypotheses = Hashtbl.create 127 in
let hypotheses_seed = ref 0 in
let conjectures_seed = ref 0 in
let seed = ref 0 in
let acic_term_of_cic_term_context' =
acic_of_cic_context' true seed ids_to_terms constr_to_ids ids_to_father_ids
ids_to_inner_sorts ids_to_inner_types pvars in
CSC : is this the right env to use ? Hhmmm . There is a problem : in
CSC : Global.env ( ) the object we are exporting is already defined ,
CSC : either in the environment or in the named context ( in the case
CSC : of variables . Is this a problem ?
let env = Global.env () in
let acic_term_of_cic_term' ?fake_dependent_products =
acic_term_of_cic_term_context' ?fake_dependent_products env [] sigma in
CSC : the fresh_id is not stored anywhere . This _ MUST _ be fixed using
CSC : a modified version of the already existent fresh_id function
let fresh_id () =
let res = "i" ^ string_of_int !seed in
incr seed ;
res
in
let aobj =
match obj with
A.Constant (id,bo,ty,params) ->
let abo =
match bo with
None -> None
| Some bo' -> Some (acic_term_of_cic_term' bo' (Some ty))
in
let aty = acic_term_of_cic_term' ty None in
A.AConstant (fresh_id (),id,abo,aty,params)
| A.Variable (id,bo,ty,params) ->
let abo =
match bo with
Some bo -> Some (acic_term_of_cic_term' bo (Some ty))
| None -> None
in
let aty = acic_term_of_cic_term' ty None in
A.AVariable (fresh_id (),id,abo,aty,params)
| A.CurrentProof (id,conjectures,bo,ty) ->
let aconjectures =
List.map
(function (i,canonical_context,term) as conjecture ->
let cid = "c" ^ string_of_int !conjectures_seed in
Hashtbl.add ids_to_conjectures cid conjecture ;
incr conjectures_seed ;
let canonical_env,idrefs',acanonical_context =
let rec aux env idrefs =
function
[] -> env,idrefs,[]
| ((n,decl_or_def) as hyp)::tl ->
let hid = "h" ^ string_of_int !hypotheses_seed in
let new_idrefs = hid::idrefs in
Hashtbl.add ids_to_hypotheses hid hyp ;
incr hypotheses_seed ;
match decl_or_def with
A.Decl t ->
let final_env,final_idrefs,atl =
aux (Environ.push_rel (Names.Name n,None,t) env)
new_idrefs tl
in
let at =
acic_term_of_cic_term_context' env idrefs sigma t None
in
final_env,final_idrefs,(hid,(n,A.Decl at))::atl
| A.Def (t,ty) ->
let final_env,final_idrefs,atl =
aux
(Environ.push_rel (Names.Name n,Some t,ty) env)
new_idrefs tl
in
let at =
acic_term_of_cic_term_context' env idrefs sigma t None
in
let dummy_never_used =
let s = "dummy_never_used" in
A.ARel (s,99,s,Names.id_of_string s)
in
final_env,final_idrefs,
(hid,(n,A.Def (at,dummy_never_used)))::atl
in
aux env [] canonical_context
in
let aterm =
acic_term_of_cic_term_context' canonical_env idrefs' sigma term
None
in
(cid,i,List.rev acanonical_context,aterm)
) conjectures in
let abo = acic_term_of_cic_term_context' env [] sigma bo (Some ty) in
let aty = acic_term_of_cic_term_context' env [] sigma ty None in
A.ACurrentProof (fresh_id (),id,aconjectures,abo,aty)
| A.InductiveDefinition (tys,params,paramsno) ->
let env' =
List.fold_right
(fun (name,_,arity,_) env ->
Environ.push_rel (Names.Name name, None, arity) env
) (List.rev tys) env in
let idrefs = List.map (function _ -> gen_id seed) tys in
let atys =
List.map2
(fun id (name,inductive,ty,cons) ->
let acons =
List.map
(function (name,ty) ->
(name,
acic_term_of_cic_term_context' ~fake_dependent_products:true
env' idrefs Evd.empty ty None)
) cons
in
let aty =
acic_term_of_cic_term' ~fake_dependent_products:true ty None
in
(id,name,inductive,aty,acons)
) (List.rev idrefs) tys
in
A.AInductiveDefinition (fresh_id (),atys,params,paramsno)
in
aobj,ids_to_terms,constr_to_ids,ids_to_father_ids,ids_to_inner_sorts,
ids_to_inner_types,ids_to_conjectures,ids_to_hypotheses
;;
| null | https://raw.githubusercontent.com/SamB/coq/8f84aba9ae83a4dc43ea6e804227ae8cae8086b1/contrib/xml/cic2acic.ml | ocaml | **********************************************************************
**********************************************************************
This file is distributed under the terms of the
**********************************************************************
Utility Functions
ERROR HERE
Type Univ.dummy_univ
CCorn code
Main Functions
type inference algorithm has not been applied.
We need to refresh the universes because we are doing
type inference on an already inferred type.
explicit_substitute_and_eta_expand_if_required h t t'
where [t] = [] and [tt] = [h]{[t']} ("{.}" denotes explicit
named substitution) or [tt] = (App [h]::[t]) (and [t'] = [])
check if [h] is a term that requires an explicit named
[t] as the actual arguments of the substitution. If there
are not enough parameters in the list [t], then eta-expansion
is performed.
no explicit substitution
an explicit named substitution of "type"
(variable * argument) list, whose
the list of uninstantiated variables
Not enough arguments provided. We must eta-expand!
Now that we have all the auxiliary functions we
can finally proceed with the main case analysis.
eta-expansion for explicit named substitutions
flattening.
maybe all the arguments were used for the explicit
named substitution | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
// * The HELM Project / The EU MoWGLI Project
* University of Bologna
GNU Lesser General Public License Version 2.1
Copyright ( C ) 2000 - 2004 , HELM Team .
exception TwoModulesWhoseDirPathIsOneAPrefixOfTheOther;;
let get_module_path_of_section_path path =
let dirpath = fst (Libnames.repr_path path) in
let modules = Lib.library_dp () :: (Library.loaded_libraries ()) in
match
List.filter
(function modul -> Libnames.is_dirpath_prefix_of modul dirpath) modules
with
[] ->
Pp.warning ("Modules not supported: reference to "^
Libnames.string_of_path path^" will be wrong");
dirpath
| [modul] -> modul
| _ ->
raise TwoModulesWhoseDirPathIsOneAPrefixOfTheOther
;;
CSC : Problem : here we are using the wrong ( ? ? ? ) hypothesis that there do
CSC : not exist two modules whose dir_paths are one a prefix of the other
let remove_module_dirpath_from_dirpath ~basedir dir =
let module Ln = Libnames in
if Ln.is_dirpath_prefix_of basedir dir then
let ids = Names.repr_dirpath dir in
let rec remove_firsts n l =
match n,l with
(0,l) -> l
| (n,he::tl) -> remove_firsts (n-1) tl
| _ -> assert false
in
let ids' =
List.rev
(remove_firsts
(List.length (Names.repr_dirpath basedir))
(List.rev ids))
in
ids'
else Names.repr_dirpath dir
;;
let get_uri_of_var v pvars =
let module D = Decls in
let module N = Names in
let rec search_in_open_sections =
function
[] -> Util.error ("Variable "^v^" not found")
| he::tl as modules ->
let dirpath = N.make_dirpath modules in
if List.mem (N.id_of_string v) (D.last_section_hyps dirpath) then
modules
else
search_in_open_sections tl
in
let path =
if List.mem v pvars then
[]
else
search_in_open_sections (N.repr_dirpath (Lib.cwd ()))
in
"cic:" ^
List.fold_left
(fun i x -> "/" ^ N.string_of_id x ^ i) "" path
;;
type tag =
Constant of Names.constant
| Inductive of Names.kernel_name
| Variable of Names.kernel_name
;;
type etag =
TConstant
| TInductive
| TVariable
;;
let etag_of_tag =
function
Constant _ -> TConstant
| Inductive _ -> TInductive
| Variable _ -> TVariable
let ext_of_tag =
function
TConstant -> "con"
| TInductive -> "ind"
| TVariable -> "var"
;;
exception FunctorsXMLExportationNotImplementedYet;;
let subtract l1 l2 =
let l1' = List.rev (Names.repr_dirpath l1) in
let l2' = List.rev (Names.repr_dirpath l2) in
let rec aux =
function
he::tl when tl = l2' -> [he]
| he::tl -> he::(aux tl)
| [] -> assert (l2' = []) ; []
in
Names.make_dirpath (List.rev (aux l1'))
;;
CSC : Dead code to be removed
let token_list_of_kernel_name ~keep_sections kn tag =
let module N = Names in
let ( modpath , dirpath , label ) = Names.repr_kn kn in
let
List.rev_map N.string_of_id ( ) in
let rec token_list_of_modpath =
function
N.MPdot ( path , label ) - >
token_list_of_modpath path @ [ N.string_of_label label ]
| N.MPfile dirpath - > token_list_of_dirpath dirpath
| N.MPself self - >
if self = then
[ " Top " ]
else
let module_path =
let f = N.string_of_id ( N.id_of_msid self ) in
let _ , longf =
System.find_file_in_path ( Library.get_load_path ( ) ) ( f^".v " ) in
let ldir0 = Library.find_logical_path ( Filename.dirname longf ) in
let i d = Names.id_of_string ( Filename.basename f ) in
Libnames.extend_dirpath ldir0 i d
in
token_list_of_dirpath module_path
| N.MPbound _ - > raise FunctorsXMLExportationNotImplementedYet
in
token_list_of_modpath modpath @
( if keep_sections then token_list_of_dirpath dirpath else [ ] ) @
[ N.string_of_label label ^ " . " ^ ( ext_of_tag tag ) ]
; ;
let token_list_of_kernel_name ~keep_sections kn tag =
let module N = Names in
let (modpath,dirpath,label) = Names.repr_kn kn in
let token_list_of_dirpath dirpath =
List.rev_map N.string_of_id (N.repr_dirpath dirpath) in
let rec token_list_of_modpath =
function
N.MPdot (path,label) ->
token_list_of_modpath path @ [N.string_of_label label]
| N.MPfile dirpath -> token_list_of_dirpath dirpath
| N.MPself self ->
if self = Names.initial_msid then
[ "Top" ]
else
let module_path =
let f = N.string_of_id (N.id_of_msid self) in
let _,longf =
System.find_file_in_path (Library.get_load_path ()) (f^".v") in
let ldir0 = Library.find_logical_path (Filename.dirname longf) in
let id = Names.id_of_string (Filename.basename f) in
Libnames.extend_dirpath ldir0 id
in
token_list_of_dirpath module_path
| N.MPbound _ -> raise FunctorsXMLExportationNotImplementedYet
in
token_list_of_modpath modpath @
(if keep_sections then token_list_of_dirpath dirpath else []) @
[N.string_of_label label ^ "." ^ (ext_of_tag tag)]
;;
*)
let token_list_of_path dir id tag =
let module N = Names in
let token_list_of_dirpath dirpath =
List.rev_map N.string_of_id (N.repr_dirpath dirpath) in
token_list_of_dirpath dir @ [N.string_of_id id ^ "." ^ (ext_of_tag tag)]
let token_list_of_kernel_name tag =
let module N = Names in
let module LN = Libnames in
let id,dir = match tag with
| Variable kn ->
N.id_of_label (N.label kn), Lib.cwd ()
| Constant con ->
N.id_of_label (N.con_label con),
Lib.remove_section_part (LN.ConstRef con)
| Inductive kn ->
N.id_of_label (N.label kn),
Lib.remove_section_part (LN.IndRef (kn,0))
in
token_list_of_path dir id (etag_of_tag tag)
;;
let uri_of_kernel_name tag =
let tokens = token_list_of_kernel_name tag in
"cic:/" ^ String.concat "/" tokens
let uri_of_declaration id tag =
let module LN = Libnames in
let dir = LN.extract_dirpath_prefix (Lib.sections_depth ()) (Lib.cwd ()) in
let tokens = token_list_of_path dir id tag in
"cic:/" ^ String.concat "/" tokens
Special functions for handling of CCorn 's CProp " sort "
type sort =
Coq_sort of Term.sorts_family
| CProp
;;
let prerr_endline _ = ();;
let family_of_term ty =
match Term.kind_of_term ty with
Term.Sort s -> Coq_sort (Term.family_of_sort s)
I could check that the constant is CProp
| _ -> Util.anomaly "family_of_term"
;;
module CPropRetyping =
struct
module T = Term
let outsort env sigma t =
family_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma t)
let rec subst_type env sigma typ = function
| [] -> typ
| h::rest ->
match T.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma typ) with
| T.Prod (na,c1,c2) -> subst_type env sigma (T.subst1 h c2) rest
| _ -> Util.anomaly "Non-functional construction"
let sort_of_atomic_type env sigma ft args =
let rec concl_of_arity env ar =
match T.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma ar) with
| T.Prod (na, t, b) -> concl_of_arity (Environ.push_rel (na,None,t) env) b
| T.Sort s -> Coq_sort (T.family_of_sort s)
| _ -> outsort env sigma (subst_type env sigma ft (Array.to_list args))
in concl_of_arity env ft
let typeur sigma metamap =
let rec type_of env cstr=
match Term.kind_of_term cstr with
| T.Meta n ->
(try T.strip_outer_cast (List.assoc n metamap)
with Not_found -> Util.anomaly "type_of: this is not a well-typed term")
| T.Rel n ->
let (_,_,ty) = Environ.lookup_rel n env in
T.lift n ty
| T.Var id ->
(try
let (_,_,ty) = Environ.lookup_named id env in
ty
with Not_found ->
Util.anomaly ("type_of: variable "^(Names.string_of_id id)^" unbound"))
| T.Const c ->
let cb = Environ.lookup_constant c env in
Typeops.type_of_constant_type env (cb.Declarations.const_type)
| T.Evar ev -> Evd.existential_type sigma ev
| T.Ind ind -> Inductiveops.type_of_inductive env ind
| T.Construct cstr -> Inductiveops.type_of_constructor env cstr
| T.Case (_,p,c,lf) ->
let Inductiveops.IndType(_,realargs) =
try Inductiveops.find_rectype env sigma (type_of env c)
with Not_found -> Util.anomaly "type_of: Bad recursive type" in
let t = Reductionops.whd_beta (T.applist (p, realargs)) in
(match Term.kind_of_term (DoubleTypeInference.whd_betadeltaiotacprop env sigma (type_of env t)) with
| T.Prod _ -> Reductionops.whd_beta (T.applist (t, [c]))
| _ -> t)
| T.Lambda (name,c1,c2) ->
T.mkProd (name, c1, type_of (Environ.push_rel (name,None,c1) env) c2)
| T.LetIn (name,b,c1,c2) ->
T.subst1 b (type_of (Environ.push_rel (name,Some b,c1) env) c2)
| T.Fix ((_,i),(_,tys,_)) -> tys.(i)
| T.CoFix (i,(_,tys,_)) -> tys.(i)
| T.App(f,args)->
T.strip_outer_cast
(subst_type env sigma (type_of env f) (Array.to_list args))
| T.Cast (c,_, t) -> t
| T.Sort _ | T.Prod _ ->
match sort_of env cstr with
Coq_sort T.InProp -> T.mkProp
| Coq_sort T.InSet -> T.mkSet
| CProp -> T.mkConst DoubleTypeInference.cprop
and sort_of env t =
match Term.kind_of_term t with
| T.Cast (c,_, s) when T.isSort s -> family_of_term s
| T.Sort (T.Prop c) -> Coq_sort T.InType
| T.Sort (T.Type u) -> Coq_sort T.InType
| T.Prod (name,t,c2) ->
(match sort_of env t,sort_of (Environ.push_rel (name,None,t) env) c2 with
| _, (Coq_sort T.InProp as s) -> s
| Coq_sort T.InProp, (Coq_sort T.InSet as s)
| Coq_sort T.InSet, (Coq_sort T.InSet as s) -> s
| Coq_sort T.InType, (Coq_sort T.InSet as s)
| CProp, (Coq_sort T.InSet as s) when
Environ.engagement env = Some Declarations.ImpredicativeSet -> s
| Coq_sort T.InType, Coq_sort T.InSet
| CProp, Coq_sort T.InSet -> Coq_sort T.InType
| _, (CProp as s) -> s)
| T.App(f,args) -> sort_of_atomic_type env sigma (type_of env f) args
| T.Lambda _ | T.Fix _ | T.Construct _ ->
Util.anomaly "sort_of: Not a type (1)"
| _ -> outsort env sigma (type_of env t)
and sort_family_of env t =
match T.kind_of_term t with
| T.Cast (c,_, s) when T.isSort s -> family_of_term s
| T.Sort (T.Prop c) -> Coq_sort T.InType
| T.Sort (T.Type u) -> Coq_sort T.InType
| T.Prod (name,t,c2) -> sort_family_of (Environ.push_rel (name,None,t) env) c2
| T.App(f,args) ->
sort_of_atomic_type env sigma (type_of env f) args
| T.Lambda _ | T.Fix _ | T.Construct _ ->
Util.anomaly "sort_of: Not a type (1)"
| _ -> outsort env sigma (type_of env t)
in type_of, sort_of, sort_family_of
let get_type_of env sigma c = let f,_,_ = typeur sigma [] in f env c
let get_sort_family_of env sigma c = let _,_,f = typeur sigma [] in f env c
end
;;
let get_sort_family_of env evar_map ty =
CPropRetyping.get_sort_family_of env evar_map ty
;;
let type_as_sort env evar_map ty =
family_of_term (DoubleTypeInference.whd_betadeltaiotacprop env evar_map ty)
;;
let is_a_Prop =
function
"Prop"
| "CProp" -> true
| _ -> false
;;
type anntypes =
{annsynthesized : Acic.aconstr ; annexpected : Acic.aconstr option}
;;
let gen_id seed =
let res = "i" ^ string_of_int !seed in
incr seed ;
res
;;
let fresh_id seed ids_to_terms constr_to_ids ids_to_father_ids =
fun father t ->
let res = gen_id seed in
Hashtbl.add ids_to_father_ids res father ;
Hashtbl.add ids_to_terms res t ;
Acic.CicHash.add constr_to_ids t res ;
res
;;
let source_id_of_id id = "#source#" ^ id;;
let acic_of_cic_context' computeinnertypes seed ids_to_terms constr_to_ids
ids_to_father_ids ids_to_inner_sorts ids_to_inner_types
pvars ?(fake_dependent_products=false) env idrefs evar_map t expectedty
=
let module D = DoubleTypeInference in
let module E = Environ in
let module N = Names in
let module A = Acic in
let module T = Term in
let fresh_id' = fresh_id seed ids_to_terms constr_to_ids ids_to_father_ids in
CSC : do you have any reasonable substitute for 503 ?
let terms_to_types = Acic.CicHash.create 503 in
D.double_type_of env evar_map t expectedty terms_to_types ;
let rec aux computeinnertypes father passed_lambdas_or_prods_or_letins env
idrefs ?(subst=None,[]) tt
=
let fresh_id'' = fresh_id' father tt in
let aux' = aux computeinnertypes (Some fresh_id'') [] in
let string_of_sort_family =
function
Coq_sort T.InProp -> "Prop"
| Coq_sort T.InSet -> "Set"
| Coq_sort T.InType -> "Type"
| CProp -> "CProp"
in
let string_of_sort t =
string_of_sort_family
(type_as_sort env evar_map t)
in
let ainnertypes,innertype,innersort,expected_available =
let {D.synthesized = synthesized; D.expected = expected} =
if computeinnertypes then
try
Acic.CicHash.find terms_to_types tt
with _ ->
CSC : Warning : it really happens , for example in Ring_theory ! ! !
Pp.ppnl (Pp.(++) (Pp.str "BUG: this subterm was not visited during the double-type-inference: ") (Printer.pr_lconstr tt)) ; assert false
else
We are already in an inner - type and Coscoy 's double
{D.synthesized =
Reductionops.nf_beta
(CPropRetyping.get_type_of env evar_map
(Termops.refresh_universes tt)) ;
D.expected = None}
in
Debugging only :
print_endline " : " ; flush stdout ;
Pp.ppnl ( Printer.pr_lconstr tt ) ; flush stdout ;
print_endline " : " ; flush stdout ;
Pp.ppnl ( Printer.pr_lconstr synthesized ) ; flush stdout ;
print_endline " ENVIRONMENT : " ; flush stdout ;
Pp.ppnl ( Printer.pr_context_of env ) ; flush stdout ;
print_endline " FINE_ENVIRONMENT " ; flush stdout ;
print_endline "TERMINE:" ; flush stdout ;
Pp.ppnl (Printer.pr_lconstr tt) ; flush stdout ;
print_endline "TIPO:" ; flush stdout ;
Pp.ppnl (Printer.pr_lconstr synthesized) ; flush stdout ;
print_endline "ENVIRONMENT:" ; flush stdout ;
Pp.ppnl (Printer.pr_context_of env) ; flush stdout ;
print_endline "FINE_ENVIRONMENT" ; flush stdout ;
*)
let innersort =
let synthesized_innersort =
get_sort_family_of env evar_map synthesized
in
match expected with
None -> synthesized_innersort
| Some ty ->
let expected_innersort =
get_sort_family_of env evar_map ty
in
match expected_innersort, synthesized_innersort with
CProp, _
| _, CProp -> CProp
| _, _ -> expected_innersort
in
Debugging only :
print_endline " PASSATO " ; flush stdout ;
print_endline "PASSATO" ; flush stdout ;
*)
let ainnertypes,expected_available =
if computeinnertypes then
let annexpected,expected_available =
match expected with
None -> None,false
| Some expectedty' ->
Some (aux false (Some fresh_id'') [] env idrefs expectedty'),
true
in
Some
{annsynthesized =
aux false (Some fresh_id'') [] env idrefs synthesized ;
annexpected = annexpected
}, expected_available
else
None,false
in
ainnertypes,synthesized, string_of_sort_family innersort,
expected_available
in
let add_inner_type id =
match ainnertypes with
None -> ()
| Some ainnertypes -> Hashtbl.add ids_to_inner_types id ainnertypes
in
substitution and , in that case , uses the first arguments of
let
explicit_substitute_and_eta_expand_if_required h t t'
compute_result_if_eta_expansion_not_required
=
let subst,residual_args,uninst_vars =
let variables,basedir =
try
let g = Libnames.global_of_constr h in
let sp =
match g with
Libnames.ConstructRef ((induri,_),_)
| Libnames.IndRef (induri,_) ->
Nametab.sp_of_global (Libnames.IndRef (induri,0))
| Libnames.VarRef id ->
Invariant : variables are never cooked in Coq
raise Not_found
| _ -> Nametab.sp_of_global g
in
Dischargedhypsmap.get_discharged_hyps sp,
get_module_path_of_section_path sp
with Not_found ->
[], Libnames.dirpath_of_string "dummy"
in
returns a triple whose first element is
second element is the list of residual
arguments and whose third argument is
let rec get_explicit_subst variables arguments =
match variables,arguments with
[],_ -> [],arguments,[]
| _,[] -> [],[],variables
| he1::tl1,he2::tl2 ->
let subst,extra_args,uninst = get_explicit_subst tl1 tl2 in
let (he1_sp, he1_id) = Libnames.repr_path he1 in
let he1' = remove_module_dirpath_from_dirpath ~basedir he1_sp in
let he1'' =
String.concat "/"
(List.map Names.string_of_id (List.rev he1')) ^ "/"
^ (Names.string_of_id he1_id) ^ ".var"
in
(he1'',he2)::subst, extra_args, uninst
in
get_explicit_subst variables t'
in
let uninst_vars_length = List.length uninst_vars in
if uninst_vars_length > 0 then
let un_args,_ =
T.decompose_prod_n uninst_vars_length
(CPropRetyping.get_type_of env evar_map tt)
in
let eta_expanded =
let arguments =
List.map (T.lift uninst_vars_length) t @
Termops.rel_list 0 uninst_vars_length
in
Unshare.unshare
(T.lamn uninst_vars_length un_args
(T.applistc h arguments))
in
D.double_type_of env evar_map eta_expanded
None terms_to_types ;
Hashtbl.remove ids_to_inner_types fresh_id'' ;
aux' env idrefs eta_expanded
else
compute_result_if_eta_expansion_not_required subst residual_args
in
match T.kind_of_term tt with
T.Rel n ->
let id =
match List.nth (E.rel_context env) (n - 1) with
(N.Name id,_,_) -> id
| (N.Anonymous,_,_) -> Nameops.make_ident "_" None
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.ARel (fresh_id'', n, List.nth idrefs (n-1), id)
| T.Var id ->
let path = get_uri_of_var (N.string_of_id id) pvars in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.AVar
(fresh_id'', path ^ "/" ^ (N.string_of_id id) ^ ".var")
| T.Evar (n,l) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
A.AEvar
(fresh_id'', n, Array.to_list (Array.map (aux' env idrefs) l))
| T.Meta _ -> Util.anomaly "Meta met during exporting to XML"
| T.Sort s -> A.ASort (fresh_id'', s)
| T.Cast (v,_, t) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
A.ACast (fresh_id'', aux' env idrefs v, aux' env idrefs t)
| T.Prod (n,s,t) ->
let n' =
match n with
N.Anonymous -> N.Anonymous
| _ ->
if not fake_dependent_products && T.noccurn 1 t then
N.Anonymous
else
N.Name
(Nameops.next_name_away n (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id''
(string_of_sort innertype) ;
let sourcetype = CPropRetyping.get_type_of env evar_map s in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort sourcetype) ;
let new_passed_prods =
let father_is_prod =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.Prod _ -> true
| _ -> false
in
(fresh_id'', n', aux' env idrefs s)::
(if father_is_prod then
passed_lambdas_or_prods_or_letins
else [])
in
let new_env = E.push_rel (n', None, s) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term t with
T.Prod _ ->
aux computeinnertypes (Some fresh_id'') new_passed_prods
new_env new_idrefs t
| _ ->
A.AProds (new_passed_prods, aux' new_env new_idrefs t))
| T.Lambda (n,s,t) ->
let n' =
match n with
N.Anonymous -> N.Anonymous
| _ ->
N.Name (Nameops.next_name_away n (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
let sourcetype = CPropRetyping.get_type_of env evar_map s in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort sourcetype) ;
let father_is_lambda =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.Lambda _ -> true
| _ -> false
in
if is_a_Prop innersort &&
((not father_is_lambda) || expected_available)
then add_inner_type fresh_id'' ;
let new_passed_lambdas =
(fresh_id'',n', aux' env idrefs s)::
(if father_is_lambda then
passed_lambdas_or_prods_or_letins
else []) in
let new_env = E.push_rel (n', None, s) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term t with
T.Lambda _ ->
aux computeinnertypes (Some fresh_id'') new_passed_lambdas
new_env new_idrefs t
| _ ->
let t' = aux' new_env new_idrefs t in
can create nested Lambdas . Here we perform the
match t' with
A.ALambdas (lambdas, t'') ->
A.ALambdas (lambdas@new_passed_lambdas, t'')
| _ ->
A.ALambdas (new_passed_lambdas, t')
)
| T.LetIn (n,s,t,d) ->
let id =
match n with
N.Anonymous -> N.id_of_string "_X"
| N.Name id -> id
in
let n' =
N.Name (Nameops.next_ident_away id (Termops.ids_of_context env))
in
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
let sourcesort =
get_sort_family_of env evar_map
(CPropRetyping.get_type_of env evar_map s)
in
Hashtbl.add ids_to_inner_sorts (source_id_of_id fresh_id'')
(string_of_sort_family sourcesort) ;
let father_is_letin =
match father with
None -> false
| Some father' ->
match
Term.kind_of_term (Hashtbl.find ids_to_terms father')
with
T.LetIn _ -> true
| _ -> false
in
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let new_passed_letins =
(fresh_id'',n', aux' env idrefs s)::
(if father_is_letin then
passed_lambdas_or_prods_or_letins
else []) in
let new_env = E.push_rel (n', Some s, t) env in
let new_idrefs = fresh_id''::idrefs in
(match Term.kind_of_term d with
T.LetIn _ ->
aux computeinnertypes (Some fresh_id'') new_passed_letins
new_env new_idrefs d
| _ -> A.ALetIns
(new_passed_letins, aux' new_env new_idrefs d))
| T.App (h,t) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let
compute_result_if_eta_expansion_not_required subst residual_args
=
let residual_args_not_empty = residual_args <> [] in
let h' =
if residual_args_not_empty then
aux' env idrefs ~subst:(None,subst) h
else
aux' env idrefs ~subst:(Some fresh_id'',subst) h
in
if residual_args_not_empty then
A.AApp (fresh_id'', h'::residual_args)
else
h'
in
let t' =
Array.fold_right (fun x i -> (aux' env idrefs x)::i) t []
in
explicit_substitute_and_eta_expand_if_required h
(Array.to_list t) t'
compute_result_if_eta_expansion_not_required
| T.Const kn ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
let compute_result_if_eta_expansion_not_required _ _ =
A.AConst (fresh_id'', subst, (uri_of_kernel_name (Constant kn)))
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Ind (kn,i) ->
let compute_result_if_eta_expansion_not_required _ _ =
A.AInd (fresh_id'', subst, (uri_of_kernel_name (Inductive kn)), i)
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Construct ((kn,i),j) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort && expected_available then
add_inner_type fresh_id'' ;
let compute_result_if_eta_expansion_not_required _ _ =
A.AConstruct
(fresh_id'', subst, (uri_of_kernel_name (Inductive kn)), i, j)
in
let (_,subst') = subst in
explicit_substitute_and_eta_expand_if_required tt []
(List.map snd subst')
compute_result_if_eta_expansion_not_required
| T.Case ({T.ci_ind=(kn,i)},ty,term,a) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then
add_inner_type fresh_id'' ;
let a' =
Array.fold_right (fun x i -> (aux' env idrefs x)::i) a []
in
A.ACase
(fresh_id'', (uri_of_kernel_name (Inductive kn)), i,
aux' env idrefs ty, aux' env idrefs term, a')
| T.Fix ((ai,i),(f,t,b)) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then add_inner_type fresh_id'' ;
let fresh_idrefs =
Array.init (Array.length t) (function _ -> gen_id seed) in
let new_idrefs =
(List.rev (Array.to_list fresh_idrefs)) @ idrefs
in
let f' =
let ids = ref (Termops.ids_of_context env) in
Array.map
(function
N.Anonymous -> Util.error "Anonymous fix function met"
| N.Name id as n ->
let res = N.Name (Nameops.next_name_away n !ids) in
ids := id::!ids ;
res
) f
in
A.AFix (fresh_id'', i,
Array.fold_right
(fun (id,fi,ti,bi,ai) i ->
let fi' =
match fi with
N.Name fi -> fi
| N.Anonymous -> Util.error "Anonymous fix function met"
in
(id, fi', ai,
aux' env idrefs ti,
aux' (E.push_rec_types (f',t,b) env) new_idrefs bi)::i)
(Array.mapi
(fun j x -> (fresh_idrefs.(j),x,t.(j),b.(j),ai.(j))) f'
) []
)
| T.CoFix (i,(f,t,b)) ->
Hashtbl.add ids_to_inner_sorts fresh_id'' innersort ;
if is_a_Prop innersort then add_inner_type fresh_id'' ;
let fresh_idrefs =
Array.init (Array.length t) (function _ -> gen_id seed) in
let new_idrefs =
(List.rev (Array.to_list fresh_idrefs)) @ idrefs
in
let f' =
let ids = ref (Termops.ids_of_context env) in
Array.map
(function
N.Anonymous -> Util.error "Anonymous fix function met"
| N.Name id as n ->
let res = N.Name (Nameops.next_name_away n !ids) in
ids := id::!ids ;
res
) f
in
A.ACoFix (fresh_id'', i,
Array.fold_right
(fun (id,fi,ti,bi) i ->
let fi' =
match fi with
N.Name fi -> fi
| N.Anonymous -> Util.error "Anonymous fix function met"
in
(id, fi',
aux' env idrefs ti,
aux' (E.push_rec_types (f',t,b) env) new_idrefs bi)::i)
(Array.mapi
(fun j x -> (fresh_idrefs.(j),x,t.(j),b.(j)) ) f'
) []
)
in
aux computeinnertypes None [] env idrefs t
;;
let acic_of_cic_context metasenv context t =
let ids_to_terms = Hashtbl.create 503 in
let constr_to_ids = Acic.CicHash.create 503 in
let ids_to_father_ids = Hashtbl.create 503 in
let ids_to_inner_sorts = Hashtbl.create 503 in
let ids_to_inner_types = Hashtbl.create 503 in
let seed = ref 0 in
acic_of_cic_context' true seed ids_to_terms constr_to_ids ids_to_father_ids
ids_to_inner_sorts ids_to_inner_types metasenv context t,
ids_to_terms, ids_to_father_ids, ids_to_inner_sorts, ids_to_inner_types
;;
let acic_object_of_cic_object pvars sigma obj =
let module A = Acic in
let ids_to_terms = Hashtbl.create 503 in
let constr_to_ids = Acic.CicHash.create 503 in
let ids_to_father_ids = Hashtbl.create 503 in
let ids_to_inner_sorts = Hashtbl.create 503 in
let ids_to_inner_types = Hashtbl.create 503 in
let ids_to_conjectures = Hashtbl.create 11 in
let ids_to_hypotheses = Hashtbl.create 127 in
let hypotheses_seed = ref 0 in
let conjectures_seed = ref 0 in
let seed = ref 0 in
let acic_term_of_cic_term_context' =
acic_of_cic_context' true seed ids_to_terms constr_to_ids ids_to_father_ids
ids_to_inner_sorts ids_to_inner_types pvars in
CSC : is this the right env to use ? Hhmmm . There is a problem : in
CSC : Global.env ( ) the object we are exporting is already defined ,
CSC : either in the environment or in the named context ( in the case
CSC : of variables . Is this a problem ?
let env = Global.env () in
let acic_term_of_cic_term' ?fake_dependent_products =
acic_term_of_cic_term_context' ?fake_dependent_products env [] sigma in
CSC : the fresh_id is not stored anywhere . This _ MUST _ be fixed using
CSC : a modified version of the already existent fresh_id function
let fresh_id () =
let res = "i" ^ string_of_int !seed in
incr seed ;
res
in
let aobj =
match obj with
A.Constant (id,bo,ty,params) ->
let abo =
match bo with
None -> None
| Some bo' -> Some (acic_term_of_cic_term' bo' (Some ty))
in
let aty = acic_term_of_cic_term' ty None in
A.AConstant (fresh_id (),id,abo,aty,params)
| A.Variable (id,bo,ty,params) ->
let abo =
match bo with
Some bo -> Some (acic_term_of_cic_term' bo (Some ty))
| None -> None
in
let aty = acic_term_of_cic_term' ty None in
A.AVariable (fresh_id (),id,abo,aty,params)
| A.CurrentProof (id,conjectures,bo,ty) ->
let aconjectures =
List.map
(function (i,canonical_context,term) as conjecture ->
let cid = "c" ^ string_of_int !conjectures_seed in
Hashtbl.add ids_to_conjectures cid conjecture ;
incr conjectures_seed ;
let canonical_env,idrefs',acanonical_context =
let rec aux env idrefs =
function
[] -> env,idrefs,[]
| ((n,decl_or_def) as hyp)::tl ->
let hid = "h" ^ string_of_int !hypotheses_seed in
let new_idrefs = hid::idrefs in
Hashtbl.add ids_to_hypotheses hid hyp ;
incr hypotheses_seed ;
match decl_or_def with
A.Decl t ->
let final_env,final_idrefs,atl =
aux (Environ.push_rel (Names.Name n,None,t) env)
new_idrefs tl
in
let at =
acic_term_of_cic_term_context' env idrefs sigma t None
in
final_env,final_idrefs,(hid,(n,A.Decl at))::atl
| A.Def (t,ty) ->
let final_env,final_idrefs,atl =
aux
(Environ.push_rel (Names.Name n,Some t,ty) env)
new_idrefs tl
in
let at =
acic_term_of_cic_term_context' env idrefs sigma t None
in
let dummy_never_used =
let s = "dummy_never_used" in
A.ARel (s,99,s,Names.id_of_string s)
in
final_env,final_idrefs,
(hid,(n,A.Def (at,dummy_never_used)))::atl
in
aux env [] canonical_context
in
let aterm =
acic_term_of_cic_term_context' canonical_env idrefs' sigma term
None
in
(cid,i,List.rev acanonical_context,aterm)
) conjectures in
let abo = acic_term_of_cic_term_context' env [] sigma bo (Some ty) in
let aty = acic_term_of_cic_term_context' env [] sigma ty None in
A.ACurrentProof (fresh_id (),id,aconjectures,abo,aty)
| A.InductiveDefinition (tys,params,paramsno) ->
let env' =
List.fold_right
(fun (name,_,arity,_) env ->
Environ.push_rel (Names.Name name, None, arity) env
) (List.rev tys) env in
let idrefs = List.map (function _ -> gen_id seed) tys in
let atys =
List.map2
(fun id (name,inductive,ty,cons) ->
let acons =
List.map
(function (name,ty) ->
(name,
acic_term_of_cic_term_context' ~fake_dependent_products:true
env' idrefs Evd.empty ty None)
) cons
in
let aty =
acic_term_of_cic_term' ~fake_dependent_products:true ty None
in
(id,name,inductive,aty,acons)
) (List.rev idrefs) tys
in
A.AInductiveDefinition (fresh_id (),atys,params,paramsno)
in
aobj,ids_to_terms,constr_to_ids,ids_to_father_ids,ids_to_inner_sorts,
ids_to_inner_types,ids_to_conjectures,ids_to_hypotheses
;;
|
edf70f2ec2474c464e4f1be861750a58b001bae21590654628325be856af9ee9 | kupl/LearnML | patch.ml | type nat = ZERO | SUCC of nat
let rec natadd (n1 : nat) (n2 : nat) : nat =
match n1 with ZERO -> n2 | SUCC n1 -> SUCC (natadd n1 n2)
let rec natmul (n1 : nat) (n2 : nat) : nat =
match n1 with ZERO -> ZERO | SUCC n1 -> natadd n2 (natmul n1 n2)
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/nat/sub24/patch.ml | ocaml | type nat = ZERO | SUCC of nat
let rec natadd (n1 : nat) (n2 : nat) : nat =
match n1 with ZERO -> n2 | SUCC n1 -> SUCC (natadd n1 n2)
let rec natmul (n1 : nat) (n2 : nat) : nat =
match n1 with ZERO -> ZERO | SUCC n1 -> natadd n2 (natmul n1 n2)
| |
7984fc22cfc07867a66bbe63ecf3d4d41d244797e8eb3c29eeb255a1ac13e5e6 | russell/cl-git | diff.lisp | -*- Mode : Lisp ; Syntax : COMMON - LISP ; Base : 10 -*-
;;
;; cl-git is a Common Lisp interface to git repositories.
Copyright ( C ) 2011 - 2022 < >
;;
;; This program is free software: you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public License
as published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
;;
;; This program is distributed in the hope that it will be useful, but
;; WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this program. If not, see
;; </>.
(in-package #:cl-git-tests)
(in-suite :cl-git)
(defun sort-flags (object &rest key-path)
(loop :for key :in (butlast key-path)
:for obj = (getf (or obj object) key)
:finally (setf (getf obj (car (last key-path)))
(stable-sort (getf obj (car (last key-path))) #'string<)))
object)
(defun sort-diff-flags (diff)
(loop :for patch :in diff
:collect (sort-flags (sort-flags patch :file-a :flags) :file-b :flags)))
(def-test diff-revisions (:fixture repository-with-changes)
(let ((diffs (diff commit1 commit2)))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
(def-test diff-working (:fixture repository-with-unstaged)
(let ((diffs (diff *test-repository* (open-index *test-repository*))))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
(def-test diff-staged (:fixture repository-with-staged)
(let ((diffs (diff commit1 (open-index *test-repository*))))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
| null | https://raw.githubusercontent.com/russell/cl-git/db84343e6b756b26fc64877583b41e887bd74602/tests/diff.lisp | lisp | Syntax : COMMON - LISP ; Base : 10 -*-
cl-git is a Common Lisp interface to git repositories.
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this program. If not, see
</>. | Copyright ( C ) 2011 - 2022 < >
as published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
(in-package #:cl-git-tests)
(in-suite :cl-git)
(defun sort-flags (object &rest key-path)
(loop :for key :in (butlast key-path)
:for obj = (getf (or obj object) key)
:finally (setf (getf obj (car (last key-path)))
(stable-sort (getf obj (car (last key-path))) #'string<)))
object)
(defun sort-diff-flags (diff)
(loop :for patch :in diff
:collect (sort-flags (sort-flags patch :file-a :flags) :file-b :flags)))
(def-test diff-revisions (:fixture repository-with-changes)
(let ((diffs (diff commit1 commit2)))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
(def-test diff-working (:fixture repository-with-unstaged)
(let ((diffs (diff *test-repository* (open-index *test-repository*))))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
(def-test diff-staged (:fixture repository-with-staged)
(let ((diffs (diff commit1 (open-index *test-repository*))))
(is (eq (diff-deltas-count diffs) 1))
(is (equal '((:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 902
:path "test-file"
:oid 97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:not-binary :valid-id :exists)
:size 919
:path "test-file"
:oid 243568240973109882797341286687005129339258402139)))
(diff-deltas-summary diffs)))
(is (equal `((:patch ,repository-with-changes-diff
:status :modified
:similarity 0
:flags (:not-binary)
:file-a (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 902
:path "test-file"
:oid
97787706012661474925191056142692387097255677107)
:file-b (:id-abbrev 40
:mode :blob
:flags (:exists :not-binary :valid-id)
:size 919
:path "test-file"
:oid
243568240973109882797341286687005129339258402139)))
(sort-diff-flags (make-patch diffs))))))
|
d8952a2829696ccb63ea5361c2984297f843501a815dcea9ac251bdeb03e1c28 | bytekid/mkbtt | inferences.ml | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
* Inferences for ( ordered ) multicompletion with termination tools .
@author
@since 2010/11/01
@author Sarah Winkler
@since 2010/11/01 *)
* * OPENS ( 1 ) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util;;
(*** SUBMODULES **********************************************************)
module C = Completion;;
module W = World;;
module Monad = W.Monad;;
(*** OPENS ***************************************************************)
open Monad;;
(*** FUNCTIONS ***********************************************************)
" % s\n " s ;
let union = List.union
let deduce n =
W.get_options >>= fun o ->
if (C.is_ordered o) then OMKBtt.deduce n
else MKBtt.deduce n
;;
let deduce_rewrite n =
deduce n >>= fun ns ->
MKBtt.rewrite_open ns >>= fun ns' ->
MKBtt.gc ns >>= fun ns ->
return (union ns ns')
;;
let unfinished_processes =
W.get_options >>= fun o ->
if (C.is_ordered o) then OMKBtt.unfinished_processes
else MKBtt.unfinished_processes
;;
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mkbtt/inferences.ml | ocaml | ** SUBMODULES *********************************************************
** OPENS **************************************************************
** FUNCTIONS ********************************************************** | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
* Inferences for ( ordered ) multicompletion with termination tools .
@author
@since 2010/11/01
@author Sarah Winkler
@since 2010/11/01 *)
* * OPENS ( 1 ) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util;;
module C = Completion;;
module W = World;;
module Monad = W.Monad;;
open Monad;;
" % s\n " s ;
let union = List.union
let deduce n =
W.get_options >>= fun o ->
if (C.is_ordered o) then OMKBtt.deduce n
else MKBtt.deduce n
;;
let deduce_rewrite n =
deduce n >>= fun ns ->
MKBtt.rewrite_open ns >>= fun ns' ->
MKBtt.gc ns >>= fun ns ->
return (union ns ns')
;;
let unfinished_processes =
W.get_options >>= fun o ->
if (C.is_ordered o) then OMKBtt.unfinished_processes
else MKBtt.unfinished_processes
;;
|
569bd9813eb8c92133610a805501e4b16b1e24913f69809ac25dd8e7bf8f0828 | tari3x/csec-modex | pitsyntax.ml | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* and *
* *
* Copyright ( C ) INRIA , LIENS , 2000 - 2009 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet and Xavier Allamigeon *
* *
* Copyright (C) INRIA, LIENS, MPII 2000-2009 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Parsing_helper
open Ptree
open Pitptree
open Types
open Pitypes
open Stringmap
let occ_count = ref 0
let new_occurrence () =
incr occ_count;
!occ_count
Parse a file
let parse filename =
try
let ic = open_in filename in
let lexbuf = Lexing.from_channel ic in
lexbuf.Lexing.lex_curr_p <- { lexbuf.Lexing.lex_curr_p with
Lexing.pos_fname = filename };
let ptree =
try
Pitparser.all Pitlexer.token lexbuf
with Parsing.Parse_error ->
input_error "Syntax error" (extent lexbuf)
in
close_in ic;
ptree
with Sys_error s ->
user_error ("File error: " ^ s ^ "\n")
let parse_lib filename =
let filename = filename ^ ".pvl" in
try
let ic = open_in filename in
let lexbuf = Lexing.from_channel ic in
lexbuf.Lexing.lex_curr_p <- { lexbuf.Lexing.lex_curr_p with
Lexing.pos_fname = filename };
let ptree =
try
Pitparser.lib Pitlexer.token lexbuf
with Parsing.Parse_error ->
input_error "Syntax error" (extent lexbuf)
in
close_in ic;
ptree
with Sys_error s ->
user_error ("File error: " ^ s ^ "\n")
let parse_with_lib filename =
let l1 =
if (!Param.lib_name) <> "" then
parse_lib (!Param.lib_name)
else
[]
in
let (l,p) = parse filename in
(l1 @ l, p)
(* Global table of identifiers, including names, functions, variables,
predicates, and types.
Is a map from strings to the description of the ident *)
let global_env = ref (StringMap.empty : envElement StringMap.t)
(** Types **)
let get_type_polym polym (s, ext) =
if s = "any_type" then
if polym then
Param.any_type
else
input_error "polymorphic type not allowed here" ext
else
try
List.find (fun t -> t.tname = s) (!Param.all_types)
with Not_found ->
input_error ("type " ^ s ^ " not declared") ext
let get_type (s, ext) = get_type_polym false (s,ext)
let check_type_decl (s, ext) =
if s = "any_type" then
input_error "type any_type reserved for polymorphism" ext;
if StringMap.mem s (!global_env) then
input_error ("identifier " ^ s ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { tname = s } in
Param.all_types := r :: (!Param.all_types);
global_env := StringMap.add s (EType r) (!global_env)
(* Table of bound names of the process *)
let glob_table = Hashtbl.create 7
let check_single ext s =
let vals = Hashtbl.find_all glob_table s in
match vals with
_::_::_ -> input_error (s ^ " cannot be used in queries. Its definition is ambiguous. (For example, several restrictions might define " ^ s ^ ".)") ext
| _ -> ()
(* Functions *)
let fun_decls = Param.fun_decls
let true_cst = Terms.true_cst
let false_cst = Terms.false_cst
let init_fun_decl () =
Hashtbl.add fun_decls "true" true_cst;
global_env := StringMap.add "true" (EFun true_cst) (!global_env);
Hashtbl.add fun_decls "false" false_cst;
global_env := StringMap.add "false" (EFun false_cst) (!global_env);
Hashtbl.add fun_decls "not" Terms.not_fun;
global_env := StringMap.add "not" (EFun Terms.not_fun) (!global_env);
List.iter (fun t -> global_env := StringMap.add t.tname (EType t) (!global_env)) (!Param.all_types)
let special_functions = ["choice"; "||"; "&&"; "="; "<>"]
let get_fun env (s,ext) tl =
if List.mem s special_functions then
input_error (s ^ " not allowed here") ext;
try
match StringMap.find s env with
EFun r ->
if not (Terms.eq_lists (fst r.f_type) tl) then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst r.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
r
| _ ->
input_error (s ^ " should be a function") ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
let check_fun_decl (name, ext) argtypes restype options =
let tyarg = List.map get_type argtypes in
let tyres = get_type restype in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let is_tuple = ref false in
let is_private = ref false in
let opt = ref 0 in
List.iter (function
("data",_) -> is_tuple := true
| ("private",_) -> is_private := true
| ("typeConverter",_) ->
if List.length tyarg != 1 then
input_error "only unary functions can be declared \"typeConverter\"" ext;
opt := (!opt) lor Param.fun_TYPECONVERTER
| (_,ext) ->
input_error "for functions, the only allowed options are data, private, and typeConverter" ext) options;
let cat = if !is_tuple (* || ((arity == 0) && (not is_private)) *) then Tuple else Eq [] in
let r = { f_name = name;
f_type = tyarg, tyres;
f_cat = cat;
f_initial_cat = cat;
f_private = !is_private;
f_options = !opt }
in
Hashtbl.add fun_decls name r;
global_env := StringMap.add name (EFun r) (!global_env)
let get_var env (s,ext) =
try
match StringMap.find s env with
EVar v -> v
| _ -> input_error (s ^ " should be a variable") ext
with Not_found ->
input_error ("variable " ^ s ^ " not declared") ext
let add_env env l =
let env_ref = ref env in
List.iter (fun ((s,ext),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s (!env_ref) with
EVar _ -> input_error ("variable " ^ s ^ " already defined") ext
| _ -> input_warning ("identifier " ^ s ^ " rebound") ext
with Not_found -> ()
end;
let v = Terms.new_var s t in
env_ref := StringMap.add s (EVar v) (!env_ref)
) l;
!env_ref
let create_env l =
add_env (!global_env) l
let f_eq_tuple f ext =
match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ f.f_name ^ " has been defined by reduction. It should not appear in equations or clauses") ext
let f_any f ext = ()
let rec check_eq_term f_allowed env (term,ext) =
match term with
(PIdent (s,ext)) ->
let t =
try
match StringMap.find s env with
EVar v -> Var v
| EFun f ->
if fst f.f_type <> [] then
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext;
f_allowed f ext;
FunApp(f, [])
| _ -> input_error ("identifier " ^ s ^ " should be a function or a variable") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined as a function or as a variable") ext
in
(t, Terms.get_term_type t)
| (PFunApp ((f,ext), tlist)) ->
let (tl', tyl) = List.split (List.map (check_eq_term f_allowed env) tlist) in
let f' = get_fun env (f,ext) tyl in
f_allowed f' ext;
if (f'.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match tl' with
[t] -> (t, snd f'.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f', tl'), snd f'.f_type)
| (PTuple tlist) ->
let (tl', tyl) = List.split (List.map (check_eq_term f_allowed env) tlist) in
(FunApp (Terms.get_tuple_fun tyl, tl'), Param.bitstring_type)
(* Equations *)
let check_equation env t1 t2 =
let var_env = create_env env in
let (t1', ty1) = check_eq_term f_eq_tuple var_env t1 in
let (t2', ty2) = check_eq_term f_eq_tuple var_env t2 in
if ty1 != ty2 then
begin
let ext = merge_ext (snd t1) (snd t2) in
input_error "the two members of an equation should have the same type" ext
end;
TermsEq.register_equation (t1',t2')
(* Definitions of destructors by rewrite rules *)
let check_red tlist options =
match tlist with
(_,(PFunApp((f,ext),l),_),_)::_ ->
begin
if List.mem f special_functions then
input_error (f ^ " not allowed here") ext;
if StringMap.mem f (!global_env) then
input_error ("identifier " ^ f ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let red_list, ty_red_list = List.split (List.map
(function (env, (PFunApp((f',ext'),l1),_), t2) ->
if f <> f' then
input_error ("In \"reduc\", all rewrite rules should begin with the same function " ^ f) ext';
let var_env = create_env env in
let ((lhs, tylhs), (rhs, tyrhs)) = (List.split (List.map (check_eq_term f_eq_tuple var_env) l1),
check_eq_term f_eq_tuple var_env t2)
in
let var_list_rhs = ref [] in
Terms.get_vars var_list_rhs rhs;
if not (List.for_all (fun v -> List.exists (Terms.occurs_var v) lhs) (!var_list_rhs)) then
Parsing_helper.input_error "All variables of the right-hand side of a \"reduc\" definition\nshould also occur in the left-hand side." ext';
(lhs, rhs), (tylhs, tyrhs)
| _, (_, ext1), _ -> input_error ("In \"reduc\", all rewrite rules should begin with function application") ext1) tlist)
in
match ty_red_list with
[] -> internal_error "reduction with empty list"
| (tylhs,tyrhs)::r ->
List.iter (fun (tylhs',tyrhs') ->
if not (Terms.eq_lists tylhs tylhs') then
input_error ("the arguments of function " ^ f ^ " do not have the same type in all rewrite rules") ext;
if not (tyrhs == tyrhs') then
input_error ("the result of function " ^ f ^ " does not have the same type in all rewrite rules") ext
) r;
let cat = Red red_list in
let is_private = ref false in
List.iter (function
| ("private",_) -> is_private := true
| (_,ext) ->
input_error "for functions defined by rewrite rules, the only allowed option is private" ext) options;
let fsymb = { f_name = f;
f_type = tylhs, tyrhs;
f_private = !is_private;
f_options = 0;
f_cat = cat;
f_initial_cat = cat
}
in
Hashtbl.add fun_decls f fsymb;
global_env := StringMap.add f (EFun fsymb) (!global_env)
end
| (_,(_, ext1), _) :: l ->
input_error ("In \"reduc\", all rewrite rules should begin with function application") ext1
| [] -> internal_error "reduction with empty list"
(* Check clauses *)
let pred_env = Param.pred_env
let rec interpret_info ty r = function
("memberOptim", ext) ->
if List.length ty != 2 then
input_error "memberOptim makes sense only for predicates of arity 2" ext;
r.p_prop <- r.p_prop lor Param.pred_ELEM
| ("refTransAtt", ext) ->
begin
match ty with
[t1;t2] when t1 == t2 -> r.p_prop <- r.p_prop lor Param.pred_REFTRANS
| _ -> input_error "refTransAtt makes sense only for predicates with 2 arguments of the same type" ext
end
| ("decompData",ext) ->
if List.exists (fun t -> t != Param.any_type) ty then
input_error "decompData makes sense only for predicates that are polymorphic in all their arguments" ext;
r.p_prop <- r.p_prop lor Param.pred_TUPLE
| ("decompDataSelect",ext) ->
if List.exists (fun t -> t != Param.any_type) ty then
input_error "decompDataSelect makes sense only for predicates that are polymorphic in all their arguments" ext;
r.p_prop <- r.p_prop lor Param.pred_TUPLE lor Param.pred_TUPLE_SELECT
| ("block",_) -> r.p_prop <- r.p_prop lor Param.pred_BLOCKING
(* add other qualifiers here *)
| (s,ext) -> input_error ("unknown predicate qualifier " ^ s) ext
let check_pred (c,ext) tl info =
if c = "attacker" || c = "mess" || c = "event" || c = "inj-event" then
input_error ("predicate name " ^ c ^ " is reserved. You cannot declare it") ext;
if StringMap.mem c (!global_env) then
input_error ("identifier " ^ c ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let tyl = List.map (get_type_polym true) tl in
let r = { p_name = c; p_type = tyl; p_prop = 0; p_info = [] } in
List.iter (interpret_info tyl r) info;
if List.exists (fun t -> t == Param.any_type) tyl then
r.p_info <- [PolymPred(c, r.p_prop, tyl)];
Hashtbl.add pred_env c r;
global_env := StringMap.add c (EPred r) (!global_env)
let get_pred env (c, ext) tl =
try
match StringMap.find c env with
EPred r ->
if not ((List.length r.p_type == List.length tl) && (List.for_all2 (fun t1 t2 -> t1 == t2 || t1 == Param.any_type) r.p_type tl)) then
input_error ("predicate " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " r.p_type) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
if List.exists (fun t -> t == Param.any_type) r.p_type then
Param.get_pred (PolymPred(r.p_name, r.p_prop, tl))
else
r
| _ -> input_error (c ^ " should be a predicate") ext
with Not_found ->
input_error ("undeclared predicate " ^ c ) ext
type pred_or_fun =
IsPred of predicate
| IsFun of funsymb
let get_pred_or_fun env (c,ext) tl' =
try
match StringMap.find c env with
EPred r ->
if not ((List.length r.p_type == List.length tl') && (List.for_all2 (fun t1 t2 -> t1 == t2 || t1 == Param.any_type) r.p_type tl')) then
input_error ("predicate " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " r.p_type) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
let p' =
if List.exists (fun t -> t == Param.any_type) r.p_type then
Param.get_pred (PolymPred(r.p_name, r.p_prop, tl'))
else
r
in
IsPred p'
| EFun r ->
if not (Terms.eq_lists (fst r.f_type) tl') then
input_error ("function " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst r.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
if (snd r.f_type) != Param.bool_type then
input_error ("function " ^ c ^ " returns a result of type " ^
(snd r.f_type).tname ^ " but a boolean is expected") ext;
IsFun r
| _ -> input_error (c ^ " should be a predicate or a boolean function") ext
with Not_found ->
input_error ("undeclared predicate or function " ^ c ) ext
let add_rule hyp concl constra tag =
Param.red_rules := (hyp, concl, constra, tag) :: (!Param.red_rules)
let equal_fact t1 t2 =
Pred(Param.get_pred (Equal(Terms.get_term_type t1)), [t1;t2])
let check_cterm env (p,t) =
let (tl, tyl) = List.split (List.map (check_eq_term f_any env) t) in
(get_pred env p tyl, tl)
let rec check_hyp (hyp_accu,constra_accu) env (fact, ext) =
match fact with
PIdent p ->
let (p',l') = check_cterm env (p,[]) in
(Pred(p',l')::hyp_accu, constra_accu)
| PTuple _ -> input_error "tuples not allowed here" ext
| PFunApp((f,fext) as p, l) ->
match f,l with
"<>", [t1;t2] ->
let (t1', ty1) = check_eq_term f_any env t1 in
let (t2', ty2) = check_eq_term f_any env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext;
(hyp_accu, [Neq(t1', t2')] :: constra_accu)
| "=", [t1;t2] ->
let (t1', ty1) = check_eq_term f_any env t1 in
let (t2', ty2) = check_eq_term f_any env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
((equal_fact t1' t2')::hyp_accu, constra_accu)
| "&&", [h1;h2] ->
check_hyp (check_hyp (hyp_accu,constra_accu) env h1) env h2
| ("<>" | "=" | "&&"), _ -> internal_error ("Bad arity for special function " ^ f)
| ("||" | "not" | "choice"), _ -> input_error (f ^ " not allowed here") fext
| _ ->
let (p',l') = check_cterm env (p,l) in
(Pred(p',l')::hyp_accu, constra_accu)
let check_simple_fact env (fact, ext) =
match fact with
PIdent p ->
let (p',l') = check_cterm env (p,[]) in
Pred(p',l')
| PTuple _ -> input_error "tuples not allowed here" ext
| PFunApp((f,fext) as p,l) ->
match f with
"=" | "<>" | "&&" | "||" | "not" | "choice" -> input_error (f ^ " not allowed here") fext
| _ ->
let (p',l') = check_cterm env (p,l) in
Pred(p',l')
let check_clause = function
(env, PFact(c)) ->
begin
let env = create_env env in
let concl = check_simple_fact env c in
add_rule [] concl [] LblClause
end
| (env, PClause(i,c)) ->
begin
try
let env = create_env env in
let (hyp, constra) = check_hyp ([],[]) env i in
let concl = check_simple_fact env c in
add_rule hyp concl
(Rules.simplify_constra_list (concl :: hyp) constra) LblClause
with Rules.FalseConstraint -> ()
end
| (env, PEquiv(i,c,select)) ->
let env = create_env env in
let (hyp, constra) = check_hyp ([],[]) env i in
if constra != [] then
Parsing_helper.user_error "Inequality constraints not allowed in equivalences";
let concl = check_simple_fact env c in
add_rule hyp concl [] LblEquiv;
List.iter (fun h -> add_rule [concl] h [] LblEquiv) hyp;
Rules.add_equiv (hyp, concl, -1); (* TO DO should give a real rule number, but that's not easy... *)
if not select then Terms.add_unsel concl
(* List of the free names of the process *)
let freenames = Param.freenames
let create_name s ty is_free =
let cat = Name { prev_inputs = None; prev_inputs_meaning = [] } in
{ f_name = s;
f_type = ty;
f_cat = cat;
f_initial_cat = cat;
f_private = not is_free;
f_options = 0 }
let create_name_uniq s ty is_free =
let cat = Name { prev_inputs = None; prev_inputs_meaning = [] } in
{ f_name = s ^ "_" ^ (string_of_int (Terms.new_var_name()));
f_type = ty;
f_cat = cat;
f_initial_cat = cat;
f_private = not is_free;
f_options = 0 }
let add_free_name (s,ext) t options =
let is_private = ref false in
List.iter (function
| ("private",_) -> is_private := true
| (_,ext) ->
input_error "for free names, the only allowed option is private" ext) options;
let ty = get_type t in
if StringMap.mem s (!global_env) then
input_error ("identifier " ^ s ^ " already declared (as a free name, a function, a predicate, or a type)") ext;
let r = create_name s ([],ty) (not (!is_private)) in
global_env := StringMap.add s (EName r) (!global_env);
freenames := r :: !freenames
(* Check non-interference terms *)
let get_non_interf_name env (s,ext) =
try
match StringMap.find s env with
EName r ->
check_single ext s;
if not r.f_private then
input_error ("Non-interference is certainly false on public values, such as " ^ s) ext
else
r
| _ ->
input_error ("Non-interference can only be tested on private free names") ext
with Not_found ->
input_error ("Name " ^ s ^ " is not declared") ext
let rec check_ni_term env (term,ext) =
match term with
(PIdent (s,ext)) ->
let t =
try
match StringMap.find s env with
EVar v -> Var v
| EFun f ->
if fst f.f_type <> [] then
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext;
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " has been defined by reduction. It should not appear in non-interference queries") ext);
FunApp(f, [])
| EName r ->
FunApp (r, [])
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined as a variable, a function, or a name") ext
in
(t, Terms.get_term_type t)
| (PFunApp ((s,ext), tlist)) ->
let (tl, tyl) = List.split (List.map (check_ni_term env) tlist) in
let f = get_fun env (s,ext) tyl in
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " has been defined by reduction. It should not appear in non-interference queries") ext);
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match tl with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f, tl), snd f.f_type)
| (PTuple tlist) ->
let (l, tl) = List.split (List.map (check_ni_term env) tlist) in
(FunApp (Terms.get_tuple_fun tl, l), Param.bitstring_type)
let get_non_interf env (id, lopt) =
let n = get_non_interf_name (create_env env) id in
(n,
match lopt with
None -> None
| Some l ->
Some (List.map (fun t ->
let (t', ty) = check_ni_term (create_env env) t in
if ty != snd n.f_type then
input_error ("this term has type " ^ ty.tname ^ " but should have type " ^ (snd n.f_type).tname) (snd t);
t'
) l))
(* Copy a process *)
let copy_binder b =
let b' = Terms.new_var b.sname b.btype in
match b.link with
NoLink ->
Terms.link b (TLink (Var b'));
b'
| _ -> Parsing_helper.internal_error ("unexpected link in copy_binder " ^ b.sname)
let rec copy_pat = function
PatVar b -> PatVar (copy_binder b)
| PatTuple(f,l) -> PatTuple(f, List.map copy_pat l)
| PatEqual(t) -> PatEqual (Terms.copy_term3 t)
let rec copy_process add_in_glob_table = function
Nil -> Nil
| Par(p1,p2) -> Par(copy_process add_in_glob_table p1, copy_process add_in_glob_table p2)
| Restr(n,p) ->
if add_in_glob_table then
(* If it is the final copy, create a distinct name for each restriction and add it in the glob_table *)
let n' = create_name_uniq n.f_name n.f_type false in
Hashtbl.add glob_table n.f_name n';
Restr(n', Reduction_helper.process_subst (copy_process add_in_glob_table p) n (FunApp(n',[])))
else
Restr(n, copy_process add_in_glob_table p)
| Repl(p,occ) -> Repl(copy_process add_in_glob_table p, new_occurrence())
| Let(pat, t, p, q, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Let(pat', Terms.copy_term3 t, copy_process add_in_glob_table p, copy_process add_in_glob_table q, new_occurrence()))
| Input(t, pat, p, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Input(Terms.copy_term3 t, pat', copy_process add_in_glob_table p, new_occurrence()))
| Output(tc,t,p, occ) -> Output(Terms.copy_term3 tc, Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Test(t,t',p,q,occ) -> Test(Terms.copy_term3 t, Terms.copy_term3 t', copy_process add_in_glob_table p, copy_process add_in_glob_table q,new_occurrence())
| Event(t, p, occ) -> Event(Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Insert(t, p, occ) -> Insert(Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Get(pat, t, p, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Get(pat', Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence()))
| Phase(n,p) -> Phase(n, copy_process add_in_glob_table p)
| LetFilter(bl, f, p, q, occ) ->
Terms.auto_cleanup (fun () ->
let bl' = List.map copy_binder bl in
LetFilter(bl', Terms.copy_fact3 f, copy_process add_in_glob_table p, copy_process add_in_glob_table q, new_occurrence()))
(*** Translate a process from parse tree to internal representation ***)
(* Table of processes defined by "let" *)
let pdeftbl = (Hashtbl.create 7 : (string, binder list * process) Hashtbl.t)
(* Get an ident when anything is allowed *)
let get_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b -> Var b
| EName r -> FunApp (r,[])
| EFun f ->
if fst f.f_type = [] then
FunApp(f,[])
else
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("Variable, function, or name " ^ s ^ " not declared") ext
let rec cross_product l1 = function
[] -> []
| (a::l) -> (List.map (fun l1i -> (l1i,a)) l1) @ (cross_product l1 l)
let rec split n l =
if n = 0 then ([],l) else
match l with
[] -> Parsing_helper.internal_error "split"
| (a::l') -> let l1,l2 = split (n-1) l' in (a::l1,l2)
let rec split_every n = function
[] -> []
| l ->
let (l1,l2) = split n l in
l1 :: (split_every n l2)
let no_expand_fun = function
[p] -> p
| _ -> Parsing_helper.internal_error "no_expand_fun expecting a list with a single element"
let pairing_expand (fa,la) (fl,ll) =
if fa == no_expand_fun then
if fl == no_expand_fun then
(no_expand_fun, cross_product la ll)
else
(fl, cross_product la ll)
else
if fl == no_expand_fun then
(fa, cross_product la ll)
else
let len = List.length la in
((fun l -> let l' = split_every len l in fl (List.map fa l')),
cross_product la ll)
let check_no_ref ext vlist (fex, tex) =
let fNil = fex (List.map (fun _ -> Nil) tex) in
if List.exists (fun v -> Reduction_helper.occurs_var_proc v fNil) vlist then
input_error "Cannot expand term because a variable in the expanded part would be referenced before being defined" ext
let rec check_term env (term, ext) =
match term with
PPIdent i ->
let t = get_ident_any env i in
(no_expand_fun, [t], Terms.get_term_type t)
| PPFunApp((s,ext),l) ->
let (fex',lex',tl') = check_term_list env l in
if s = "choice" then
begin
match tl' with
[t1;t2] when t1 == t2 ->
let f = Param.choice_fun t1 in
(fex', List.map (fun l' -> FunApp(f, l')) lex', t1)
| _ ->
input_error ("function choice expects two arguments of same type but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
end
else
begin
if List.mem s special_functions then
input_error (s ^ " not allowed here") ext;
try
match StringMap.find s env with
EFun f ->
if not (Terms.eq_lists (fst f.f_type) tl') then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
(fex', List.map (function
[t] -> t
| _ -> internal_error "type converter functions should always be unary"
) lex', snd f.f_type)
else
(fex', List.map (fun l' -> FunApp(f, l')) lex', snd f.f_type)
| ELetFun(args, fex, tex, ty) ->
let tyargs = List.map (fun v -> v.btype) args in
if not (Terms.eq_lists tyargs tl') then
input_error ("letfun function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " tyargs) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
(* Fix the variables that we are going to use to rename the arguments of the function *)
let var_map = List.map (fun v -> (v, Terms.new_var v.sname v.btype)) args in
((fun l ->
fex' (List.map (fun tl' ->
let p = ref (Terms.auto_cleanup (fun () ->
List.iter (fun (v,v') -> Terms.link v (TLink (Var v'))) var_map;
copy_process false (fex l))) in
List.iter2 (fun (_,v') t' ->
p := Let(PatVar v', t', (!p), Nil, new_occurrence())) var_map tl';
!p
) lex')),
Terms.auto_cleanup (fun () ->
List.iter (fun (v,v') -> Terms.link v (TLink (Var v'))) var_map;
List.map Terms.copy_term3 tex), ty)
| _ ->
input_error (s ^ " should be a function") ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PPTuple l ->
let (fex',lex',tl') = check_term_list env l in
let f = Terms.get_tuple_fun tl' in
(fex', List.map (fun l' -> FunApp(f, l')) lex', Param.bitstring_type)
| PPRestr((s,ext),tyid,t) ->
let ty = get_type tyid in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") ext;
let r = create_name s (Param.tmp_type, ty) false in
let env' = StringMap.add s (EName r) env in
let (fex, tex, ty) = check_term env' t in
((fun l -> Restr(r, fex l)), tex, ty)
| PPTest(c,p1,p2) ->
let rec interpret_cond p1 p2 = function
(PPIdent pred), ext -> interpret_cond p1 p2 (PPFunApp(pred,[]), ext)
| (PPTuple _), ext ->
input_error "tuples allowed in terms, but not at this level of conditions" ext
| (PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _), ext -> input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| (PPFunApp((f,fext), l)), ext0 ->
match f, l with
"||", [c1;c2] ->
(* if c1 || c2 then p1 else p2
is equivalent to
if c1 then p1 else (if c2 then p1 else p2) *)
interpret_cond p1 (PPTest(c2,p1,p2), ext) c1
| "&&", [c1;c2] ->
if c1 & & c2 then p1 else p2
is equivalent to
if c1 then ( if c2 then p1 else p2 ) else p2
is equivalent to
if c1 then (if c2 then p1 else p2) else p2 *)
interpret_cond (PPTest(c2,p1,p2), ext) p2 c1
| "not", [c] ->
interpret_cond p2 p1 c
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext0;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
let (fexthen,texthen, tythen) = check_term env p1 in
let (fexelse,texelse, tyelse) = check_term env p2 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t1, t2) ->
Test(t1, t2, fexthen thenpart, fexelse elsepart,
new_occurrence())) tex)), texthen @ texelse, tythen)
| "<>", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext0;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
let (fexthen,texthen, tythen) = check_term env p2 in
let (fexelse,texelse, tyelse) = check_term env p1 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t1, t2) ->
Test(t1, t2, fexthen thenpart, fexelse elsepart,
new_occurrence())) tex)), texthen @ texelse, tythen)
| ("||" | "&&" | "=" | "<>" | "not"), _ ->
internal_error ("Bad arity for special function " ^ f)
| "choice", _ ->
input_error "choice allowed in terms, but not at this level of conditions" ext0
| _ ->
let (fex, lex', tl') = check_term_list env l in
let (fexthen,texthen, tythen) = check_term env p1 in
let (fexelse,texelse, tyelse) = check_term env p2 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
match get_pred_or_fun env (f, fext) tl' with
IsPred p' ->
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun testi ->
LetFilter([], Pred(p', testi), fexthen thenpart, fexelse elsepart,
new_occurrence())) lex')), texthen @ texelse, tythen)
| IsFun f ->
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun testi ->
Test(FunApp(f, testi), FunApp(true_cst, []),
fexthen thenpart, fexelse elsepart,
new_occurrence())) lex')), texthen @ texelse, tythen)
in
interpret_cond p1 p2 c
| PPLet(pat,t,p,p') ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
let (fexthen,texthen, tythen) = check_term env' p in
let (fexelse,texelse, tyelse) = check_term env p' in
if tythen != tyelse then
input_error "the in and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t', pat') ->
Let(pat', t', fexthen thenpart, fexelse elsepart,
new_occurrence())) lex)), texthen @ texelse, tythen)
| PPLetIn(pat,t,p) ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
let (fexin, texin, tyin) = check_term env' p in
((fun l ->
fex (List.map (fun (t', pat') ->
Let(pat', t', fexin l, Nil, new_occurrence())) lex)),
texin, tyin)
| PPLetFilter(identlist,(fact,ext),p,q) ->
let (env', vlist) = List.fold_left (fun (env, vlist) ((s,e),t) ->
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let ty = get_type t in
let v = Terms.new_var s ty in
(StringMap.add s (EVar v) env, v:: vlist)) (env,[]) identlist in
let vlist = List.rev vlist in
let (ffex, fex') = check_fact env' (fact,ext) in
Verify that ffex does not reference the variables of vlist
check_no_ref ext vlist (ffex, fex');
let (fexthen,texthen, tythen) = check_term env' p in
let (fexelse,texelse, tyelse) = check_term env q in
if tythen != tyelse then
input_error "the in and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
ffex (List.map (fun f' ->
LetFilter(vlist, f', fexthen thenpart, fexelse elsepart,
new_occurrence())) fex')), texthen @ texelse, tythen)
and check_term_list env = function
[] -> (no_expand_fun, [[]], [])
| (a::l) ->
let (afex, alex, ta) = check_term env a in
let (lfex, llex, tl) = check_term_list env l in
let (f, l') = pairing_expand (afex, alex) (lfex, llex) in
(f, List.map (fun (a,l'') -> a::l'') l', ta::tl)
and check_fl_term env (p,l) =
let (fex', lex', tl') = check_term_list env l in
match get_pred_or_fun env p tl' with
IsPred p' ->
(fex', List.map (fun l' -> Pred(p', l')) lex')
| IsFun r ->
(fex', List.map (fun l' -> equal_fact (FunApp(r, l')) (FunApp(true_cst, []))) lex')
and check_fact env' (fact, ext) =
match fact with
PPIdent p ->
check_fl_term env' (p,[])
| PPTuple _ -> input_error "tuples not allowed here" ext
| PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _ ->
input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| PPFunApp((f,fext) as p,l) ->
match f, l with
"=", [t1;t2] ->
let (fex1', tex1', ty1) = check_term env' t1 in
let (fex2', tex2', ty2) = check_term env' t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
let (fex, lex) = pairing_expand (fex1', tex1') (fex2', tex2') in
(fex, List.map (fun (t1', t2') -> equal_fact t1' t2') lex)
| "=", _ -> internal_error ("Bad arity for special function " ^ f)
| ("<>" | "&&" | "||" | "not" | "choice"), _ -> input_error (f ^ " not allowed here") fext
| _ ->
check_fl_term env' (p,l)
and check_pat env def_in_this_pat tyopt = function
PPatVar ((s,e), topt) ->
let ty =
match topt, tyopt with
None, None ->
input_error ("variable " ^ s ^ " should be declared with a type") e
| Some (t,e), None ->
get_type (t,e)
| None, Some ty ->
ty
| Some (t,e), Some ty ->
let ty' = get_type (t,e) in
if ty != ty' then
input_error ("variable " ^ s ^ " is declared of type " ^ t ^ " but should be of type " ^ ty.tname) e;
ty
in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let v = Terms.new_var s ty in
(no_expand_fun, [PatVar v], StringMap.add s (EVar v) env, v::def_in_this_pat)
| PPatTuple l ->
let (fex',lex',env',def_in_this_pat') = check_pat_list dummy_ext env def_in_this_pat (List.map (fun _ -> None) l) l in
let f = Terms.get_tuple_fun (List.map Reduction_helper.get_pat_type (List.hd lex')) in
(fex',List.map (fun l' -> PatTuple(f, l')) lex', env', def_in_this_pat')
| PPatFunApp((s,ext),l) ->
begin
try
match StringMap.find s env with
EFun f ->
begin
match tyopt with
None -> ()
| Some ty ->
if ty != snd f.f_type then
input_error ("pattern is of type " ^ (snd f.f_type).tname ^ " but should be of type " ^ ty.tname) ext;
end;
let (fex',lex',env',def_in_this_pat') = check_pat_list ext env def_in_this_pat (List.map (fun t -> Some t) (fst f.f_type)) l in
if f.f_cat <> Tuple then
input_error ("only data functions are allowed in patterns, not " ^ s) ext;
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
(fex', List.map (function
[t] -> t
| _ -> internal_error "type converter functions should always be unary") lex', env', def_in_this_pat')
else
(fex', List.map (fun l' -> PatTuple(f, l')) lex', env', def_in_this_pat')
| _ ->
input_error ("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PPatEqual t ->
let (fex', tex', ty') = check_term env t in
Verify that fex ' does not reference the variables of def_in_this_pat ,
which are defined in this pattern , so will not be defined before fex '
which are defined in this pattern, so will not be defined before fex' *)
check_no_ref (snd t) def_in_this_pat (fex', tex');
begin
match tyopt with
None -> ()
| Some ty ->
if ty != ty' then
input_error ("pattern is of type " ^ ty'.tname ^ " but should be of type " ^ ty.tname) (snd t);
end;
(fex', List.map (fun t' -> PatEqual t') tex', env, def_in_this_pat)
and check_pat_list ext env def_in_this_pat tyl tl =
match (tl, tyl) with
[],[] -> (no_expand_fun, [[]], env, def_in_this_pat)
| (a::l),(ty::tyl) ->
let (afex, alex, env', def_in_this_pat') = check_pat env def_in_this_pat ty a in
let (lfex, llex, env'', def_in_this_pat'') = check_pat_list ext env' def_in_this_pat' tyl l in
let (f, l') = pairing_expand (afex, alex) (lfex, llex) in
(f, List.map (fun (a',l'') -> a'::l'') l', env'', def_in_this_pat'')
| _ -> input_error "wrong arity for pattern" ext
(* Events *)
let event_fun_table = Hashtbl.create 7
let check_event (name, ext) argtypes =
let tyarg = List.map get_type argtypes in
let tyarg = if !Param.key_compromise = 0 then tyarg else Param.sid_type :: tyarg in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { f_name = name;
f_type = tyarg, Param.event_type;
f_cat = Eq[];
f_initial_cat = Eq[];
f_private = true;
f_options = 0 }
in
Hashtbl.add event_fun_table name r;
global_env := StringMap.add name (EEvent r) (!global_env)
let get_event_fun env (s,ext) tl =
try
let r = StringMap.find s env in
match r with
EEvent p ->
if not (Terms.eq_lists (fst p.f_type) tl) then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst p.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
p
| _ -> input_error (s ^ " should be an event") ext
with Not_found ->
input_error ("event " ^ s ^ " not defined") ext
(* Tables *)
let check_table (name, ext) argtypes =
let tyarg = List.map get_type argtypes in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { f_name = name;
f_type = tyarg, Param.table_type;
f_cat = Eq[];
f_initial_cat = Eq[];
f_private = true;
f_options = 0 }
in
global_env := StringMap.add name (ETable r) (!global_env)
let get_table_fun env (s,ext) tl =
try
let r = StringMap.find s env in
match r with
ETable p ->
if not (Terms.eq_lists (fst p.f_type) tl) then
input_error ("table " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst p.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
p
| _ -> input_error (s ^ " should be a table") ext
with Not_found ->
input_error ("event " ^ s ^ " not defined") ext
let rec has_destr = function
Var _ -> false
| FunApp(f,l) ->
(match f.f_cat with
Eq _ | Tuple | Choice | Name _ -> false
| _ -> true) || (List.exists has_destr l)
let rec check_process env = function
PNil -> Nil
| PPar (p1,p2) ->
Par(check_process env p1, check_process env p2)
| PRepl p ->
Repl(check_process env p, new_occurrence())
| PTest(c,p1,p2) ->
let rec interpret_cond p1 p2 = function
(PPIdent pred), ext -> interpret_cond p1 p2 (PPFunApp(pred,[]), ext)
| (PPTuple _), ext ->
input_error "tuples allowed in terms, but not at this level of conditions" ext
| (PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _), ext -> input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| (PPFunApp((f,fext), l)), ext ->
match f, l with
"||", [c1;c2] ->
(* if c1 || c2 then p1 else p2
is equivalent to
if c1 then p1 else (if c2 then p1 else p2) *)
interpret_cond p1 (PTest(c2,p1,p2)) c1
| "&&", [c1;c2] ->
if c1 & & c2 then p1 else p2
is equivalent to
if c1 then ( if c2 then p1 else p2 ) else p2
is equivalent to
if c1 then (if c2 then p1 else p2) else p2 *)
interpret_cond (PTest(c2,p1,p2)) p2 c1
| "not", [c] ->
interpret_cond p2 p1 c
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
fex (List.map (fun (t1',t2') ->
Test(t1', t2',
check_process env p1,
check_process env p2,
new_occurrence())) tex)
| "<>", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
fex (List.map (fun (t1',t2') ->
Test(t1', t2',
check_process env p2,
check_process env p1,
new_occurrence())) tex)
| ("||" | "&&" | "=" | "<>" | "not"), _ ->
internal_error ("Bad arity for special function " ^ f)
| "choice", _ ->
input_error "choice allowed in terms, but not at this level of conditions" ext
| _ ->
let (fex, lex', tl') = check_term_list env l in
match get_pred_or_fun env (f,fext) tl' with
IsPred p' ->
fex (List.map (fun f ->
LetFilter([], Pred(p', f),
check_process env p1,
check_process env p2,
new_occurrence())) lex')
| IsFun f' ->
fex (List.map (fun f ->
Test(FunApp(f', f), FunApp(true_cst, []),
check_process env p1,
check_process env p2,
new_occurrence())) lex')
in
interpret_cond p1 p2 c
| PLetDef ((s,ext), args) ->
let (fex, tlex, tyl) = check_term_list env args in
begin
try
let (param, p') = Hashtbl.find pdeftbl s in
let ptype = List.map (fun b -> b.btype) param in
if not (Terms.eq_lists ptype tyl) then
input_error ("process " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " ptype) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tyl)) ext;
fex (List.map (fun tl ->
if !Terms.current_bound_vars != [] then
Parsing_helper.internal_error "bound vars should be cleaned up (pitsyntax)";
let p = ref p' in
List.iter2 (fun t v ->
if has_destr t then
p := Let(PatVar v, t, (!p), Nil, new_occurrence())
else
Terms.link v (TLink t)) tl param;
let p'' = copy_process false (!p) in
Terms.cleanup();
p'') tlex)
with Not_found ->
input_error ("process " ^ s ^ " not defined") ext
end
| PRestr((s,ext),t,p) ->
let ty = get_type t in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") ext;
let r = create_name s (Param.tmp_type, ty) false in
Restr(r, check_process (StringMap.add s (EName r) env) p)
| PInput(tc,pat,p) ->
let (ftexc, texc', tyc) = check_term env tc in
if tyc != Param.channel_type then
input_error ("this term has type " ^ tyc.tname ^ " but should have type channel") (snd tc);
let (fpex, patex',env',_) = check_pat env [] None pat in
let (fex, lex) = pairing_expand (ftexc,texc') (fpex, patex') in
fex (List.map (fun (tc', pat') ->
Input(tc', pat', check_process env' p,
new_occurrence())) lex)
| POutput(tc,t,p) ->
let (ftexc, texc', tyc) = check_term env tc in
if tyc != Param.channel_type then
input_error ("this term has type " ^ tyc.tname ^ " but should have type channel") (snd tc);
let (ftex, tex, ty) = check_term env t in
let (fex, lex) = pairing_expand (ftexc,texc') (ftex, tex) in
fex (List.map (fun (tc', t) ->
Output(tc', t, check_process env p,
new_occurrence())) lex)
| PLet(pat,t,p,p') ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
fex (List.map (fun (t', pat') ->
Let(pat', t', check_process env' p,
check_process env p',
new_occurrence())) lex)
| PLetFilter(identlist,(fact,ext),p,q) ->
let (env', vlist) = List.fold_left (fun (env, vlist) ((s,e),t) ->
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let ty = get_type t in
let v = Terms.new_var s ty in
(StringMap.add s (EVar v) env, v:: vlist)) (env,[]) identlist in
let vlist = List.rev vlist in
let (ffex, fex') = check_fact env' (fact,ext) in
Verify that ffex does not reference the variables of vlist
check_no_ref ext vlist (ffex, fex');
ffex (List.map (fun f' ->
LetFilter(vlist, f', check_process env' p,
check_process env q,
new_occurrence())) fex')
| PEvent((i,ext),l,p) ->
let (fex, lex', tl) = check_term_list env l in
if !Param.key_compromise == 0 then
let f = get_event_fun env (i,ext) tl in
fex (List.map (fun l' ->
Event(FunApp(f, l'), check_process env p,
new_occurrence())) lex')
else
let f = get_event_fun env (i,ext) (Param.sid_type :: tl) in
fex (List.map (fun l' ->
Event(FunApp(f, (Terms.new_var_def Param.sid_type) :: l'),
check_process env p,
new_occurrence())) lex')
| PInsert((i,ext),l,p) ->
let (fex, lex', tl) = check_term_list env l in
let f = get_table_fun env (i,ext) tl in
fex (List.map (fun l' ->
Insert(FunApp(f, l'), check_process env p,
new_occurrence())) lex')
| PGet((i,ext),patl,t,p) ->
begin
try
match StringMap.find i env with
ETable f ->
TO DO when check_term will allow & & , || , < > , = ( thanks to destructors
with inequality conditions ) , check_term will be enough instead of interpret_cond
with inequality conditions), check_term will be enough instead of interpret_cond *)
let rec interpret_cond env = function
(PPFunApp((f,fext), l)), ext0 when (f = "&&" || f = "=") ->
begin
match f, l with
"&&", [c1;c2] ->
let (fex1',tex1', ty1) = interpret_cond env c1 in
let (fex2',tex2', ty2) = interpret_cond env c2 in
if ty1 != Param.bool_type then
input_error "this argument of && should be a boolean" (snd c1);
if ty2 != Param.bool_type then
input_error "this argument of && should be a boolean" (snd c2);
let (fex',lex') = pairing_expand (fex1',tex1') (fex2',tex2') in
(fex', List.map (function (t1,t2) -> FunApp(Terms.and_fun, [t1;t2])) lex', Param.bool_type)
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext0;
let (fex',lex') = pairing_expand (fex1',tex1') (fex2',tex2') in
let feq = Terms.equal_fun ty1 in
(fex', List.map (function (t1,t2) -> FunApp(feq, [t1;t2])) lex', Param.bool_type)
| _ ->
internal_error ("Bad arity for special function " ^ f)
end
| t -> check_term env t
in
let (fex',lex',env',_) = check_pat_list ext env [] (List.map (fun t -> Some t) (fst f.f_type)) patl in
let (ftex, tex', ty) = interpret_cond env' t in
if ty != Param.bool_type then
input_error ("this term has type " ^ ty.tname ^ " but should have type bool") (snd t);
let (fex, lex) = pairing_expand (fex', lex') (ftex,tex') in
fex (List.map (fun (l', t') ->
Get(PatTuple(f, l'), t', check_process env' p,
new_occurrence())) lex)
| _ ->
input_error ("only functions can be applied, not " ^ i) ext
with Not_found ->
input_error ("function " ^ i ^ " not defined") ext
end
| PPhase(n, p) ->
Phase(n, check_process env p)
let query_list = ref ([] : (envdecl * tquery list) list)
let need_vars_in_names = Reduction_helper.need_vars_in_names
let noninterf_list = ref ([] : (funsymb * term list option) list list)
let not_list = ref ([] : (envdecl * gterm_e) list)
let nounif_list = ref ([] : (envdecl * nounif_t) list)
let weaksecret_list = ref ([] : funsymb list)
(* Compute need_vars_in_names: the list of pairs (restriction, variable name)
such that the variable "variable name" must occur as argument in the
pattern that models names created by "restriction", because the
structure "restriction[... variable name = ... ]" is used in the input
file. *)
let rec nvn_t (term, ext0) =
match term with
PGIdent _ -> ()
| PGFunApp(_,l) -> List.iter nvn_t l
| PGPhase(_,l, _) -> List.iter nvn_t l
| PGTuple l -> List.iter nvn_t l
| PGName ((s,ext),bl) ->
List.iter (fun ((s',ext'),t) ->
(* The replication indices do not need to be added in
need_vars_in_names, because they are always included as
arguments of names, whether or not they occur in
the input file.
They must not be added to need_vars_in_names, because
they are not correctly computed by trace reconstruction,
so adding them would cause bugs in trace reconstruction. *)
if (s' <> "") && (s'.[0] != '!') then
begin
try
let r = Hashtbl.find glob_table s in
(* print_string ("Need " ^ s' ^ " in " ^ r.f_name ^ "\n"); *)
need_vars_in_names := (r.f_name, s',ext') :: (!need_vars_in_names)
with Not_found ->
()
end;
nvn_t t
) bl
| PGLet(_,t,t') -> nvn_t t; nvn_t t'
let nvn_q = function
PRealQuery q -> nvn_t q
| PPutBegin(i, l) -> ()
let rec nvn_f (f,ext0) =
match f with
PFGIdent (s,ext) -> ()
| PFGFunApp((s,ext),l) -> List.iter nvn_f l
| PFGTuple l -> List.iter nvn_f l
| PFGName ((s,ext),bl) ->
List.iter (fun ((s',ext'),t) ->
(* The replication indices do not need to be added in
need_vars_in_names, because they are always included as
arguments of names, whether or not they occur in
the input file.
They must not be added to need_vars_in_names, because
they are not correctly computed by trace reconstruction,
so adding them would cause bugs in trace reconstruction. *)
if (s' <> "") && (s'.[0] != '!') then
begin
try
let r = Hashtbl.find glob_table s in
(* print_string ("Need " ^ s' ^ " in " ^ r.f_name ^ "\n"); *)
need_vars_in_names := (r.f_name, s',ext') :: (!need_vars_in_names)
with Not_found ->
()
end;
nvn_f t
) bl
| PFGAny (s,ext) -> ()
| PFGLet(_,t,t') -> nvn_f t; nvn_f t'
let rec nvn_nounif = function
BFLet(_,t,nounif) -> nvn_f t; nvn_nounif nounif
| BFNoUnif((id,fl,n),_) -> List.iter nvn_f fl
Macro expansion
let macrotable = ref StringMap.empty
let rename_table = ref StringMap.empty
let expansion_number = ref 0
let rename_ident i =
match i with
"=" | "<>" | "not" | "&&" | "||" | "event" | "inj-event" | "==>" | "choice" -> i
| _ -> if i.[0] = '!' then i else
try
StringMap.find i (!rename_table)
with Not_found ->
let r = "@" ^ (string_of_int (!expansion_number)) ^ "_" ^ i in
rename_table := StringMap.add i r (!rename_table);
r
let rename_ie (i,ext) = (rename_ident i, ext)
let rec rename_term (t,ext) =
let t' = match t with
PIdent i -> PIdent (rename_ie i)
| PFunApp(f,l) -> PFunApp(rename_ie f, List.map rename_term l)
| PTuple l -> PTuple(List.map rename_term l)
in
(t',ext)
let rec rename_format = function
PFIdent i -> PFIdent (rename_ie i)
| PFFunApp(f,l) -> PFFunApp(rename_ie f, List.map rename_format l)
| PFTuple l -> PFTuple(List.map rename_format l)
| PFName _ -> internal_error "Names not allowed in formats with -in pi"
| PFAny i -> PFAny (rename_ie i)
let rename_format_fact (i,l) = (rename_ie i, List.map rename_format l)
let rec rename_gformat (t,ext) =
let t' = match t with
PFGIdent i -> PFGIdent (rename_ie i)
| PFGFunApp(f,l) -> PFGFunApp(rename_ie f, List.map rename_gformat l)
| PFGTuple l -> PFGTuple(List.map rename_gformat l)
| PFGName(i,l) -> PFGName(rename_ie i, List.map (fun (i,t) -> (rename_ie i, rename_gformat t)) l)
| PFGAny i -> PFGAny (rename_ie i)
| PFGLet(i,t,t') -> PFGLet(rename_ie i, rename_gformat t, rename_gformat t')
in
(t',ext)
let rec rename_nounif = function
BFLet(i,f,t) -> BFLet(rename_ie i, rename_gformat f, rename_nounif t)
| BFNoUnif((i,l,n'),n) -> BFNoUnif((rename_ie i, List.map rename_gformat l, n'), n)
let rec rename_gterm (t,ext) =
let t' = match t with
PGIdent i -> PGIdent (rename_ie i)
| PGFunApp(f,l) -> PGFunApp(rename_ie f, List.map rename_gterm l)
| PGPhase(i,l,n) -> PGPhase(rename_ie i, List.map rename_gterm l, n)
| PGTuple l -> PGTuple(List.map rename_gterm l)
| PGName(i,l) -> PGName(rename_ie i, List.map (fun (i,t) -> (rename_ie i, rename_gterm t)) l)
| PGLet(i,t,t') -> PGLet(rename_ie i, rename_gterm t, rename_gterm t')
in
(t',ext)
let rename_query = function
PPutBegin(b,l) -> PPutBegin(b, List.map rename_ie l)
| PRealQuery t -> PRealQuery(rename_gterm t)
let rename_clause = function
PClause(t,t') -> PClause(rename_term t, rename_term t')
| PFact t -> PFact(rename_term t)
| PEquiv(t,t',b) -> PEquiv(rename_term t, rename_term t', b)
let rec rename_pterm (t,ext) =
let t' = match t with
PPIdent i -> PPIdent (rename_ie i)
| PPFunApp(f,l) -> PPFunApp(rename_ie f, List.map rename_pterm l)
| PPTuple(l) -> PPTuple(List.map rename_pterm l)
| PPRestr(i,ty,t) -> PPRestr(rename_ie i, rename_ie ty, rename_pterm t)
| PPTest(t1,t2,t3) -> PPTest(rename_pterm t1, rename_pterm t2, rename_pterm t3)
| PPLetIn(pat, t1, t2) -> PPLetIn(rename_pat pat, rename_pterm t1, rename_pterm t2)
| PPLet(pat, t1, t2, t3) -> PPLet(rename_pat pat, rename_pterm t1, rename_pterm t2, rename_pterm t3)
| PPLetFilter(l, t1, t2, t3) -> PPLetFilter(List.map(fun (i,ty) -> (rename_ie i, rename_ie ty)) l, rename_pterm t1, rename_pterm t2, rename_pterm t3)
in
(t',ext)
and rename_pat = function
PPatVar(i,tyopt) -> PPatVar(rename_ie i, match tyopt with
None -> None
| Some ty -> Some (rename_ie ty))
| PPatTuple l -> PPatTuple(List.map rename_pat l)
| PPatFunApp(f,l) -> PPatFunApp(rename_ie f, List.map rename_pat l)
| PPatEqual t -> PPatEqual (rename_pterm t)
let rec rename_process = function
PNil -> PNil
| PPar(p1,p2) -> PPar(rename_process p1, rename_process p2)
| PRepl(p) -> PRepl(rename_process p)
| PRestr(i,ty,p) -> PRestr(rename_ie i, rename_ie ty, rename_process p)
| PLetDef(i,l) -> PLetDef(rename_ie i, List.map rename_pterm l)
| PTest(t,p1,p2) -> PTest(rename_pterm t, rename_process p1, rename_process p2)
| PInput(t,pat,p) -> PInput(rename_pterm t, rename_pat pat, rename_process p)
| POutput(t1,t2,p) -> POutput(rename_pterm t1, rename_pterm t2, rename_process p)
| PLet(pat, t, p1, p2) -> PLet(rename_pat pat, rename_pterm t, rename_process p1, rename_process p2)
| PLetFilter(l, t, p1, p2) -> PLetFilter(List.map (fun (i,ty) -> (rename_ie i, rename_ie ty)) l, rename_pterm t, rename_process p1, rename_process p2)
| PEvent(i,l,p) -> PEvent(rename_ie i ,List.map rename_pterm l, rename_process p)
| PInsert(i,l,p) -> PInsert(rename_ie i ,List.map rename_pterm l, rename_process p)
| PGet(i,patl,t,p) -> PGet(rename_ie i ,List.map rename_pat patl, rename_pterm t, rename_process p)
| PPhase(n,p) -> PPhase(n, rename_process p)
let rename_env env = List.map (fun (i,ty) -> (rename_ie i, rename_ie ty)) env
let rename_decl = function
TTypeDecl i -> TTypeDecl (rename_ie i)
| TFunDecl(i,l,ty,opt) -> TFunDecl(rename_ie i, List.map rename_ie l, rename_ie ty, opt)
| TEventDecl(i,l) -> TEventDecl(rename_ie i, List.map rename_ie l)
| TTableDecl(i,l) -> TTableDecl(rename_ie i, List.map rename_ie l)
| TConstDecl(i,ty,opt) -> TConstDecl(rename_ie i, rename_ie ty, opt)
| TReduc(l,opt) -> TReduc(List.map (fun (env,t1,t2) -> (rename_env env, rename_term t1, rename_term t2)) l, opt)
| TEquation(env, t1, t2) -> TEquation(rename_env env, rename_term t1, rename_term t2)
| TPredDecl(i,l,opt) -> TPredDecl(rename_ie i, List.map rename_ie l, opt)
| TSet ((_,ext),_) ->
input_error "set is not allowed inside macro definitions" ext
| TPDef(i,env,p) -> TPDef(rename_ie i, rename_env env, rename_process p)
| TQuery(env, l) -> TQuery(rename_env env, List.map rename_query l)
| TNoninterf(env, l) -> TNoninterf(rename_env env, List.map (fun (i,tlopt) ->
(rename_ie i, match tlopt with
None -> None
| Some tl -> Some (List.map rename_term tl))) l)
| TWeaksecret i -> TWeaksecret (rename_ie i)
| TNoUnif(env, nounif) -> TNoUnif(rename_env env, rename_nounif nounif)
| TNot(env, t) -> TNot(rename_env env, rename_gterm t)
| TElimtrue(env, f) -> TElimtrue(rename_env env, rename_term f)
| TFree(i,ty, opt) -> TFree(rename_ie i, rename_ie ty, opt)
| TClauses l -> TClauses (List.map (fun (env, cl) -> (rename_env env, rename_clause cl)) l)
| TDefine((s1,ext1),argl,def) ->
input_error "macro definitions are not allowed inside macro definitions" ext1
| TExpand((s1,ext1),argl) ->
internal_error "macro-expansion inside a macro should have been expanded at macro definition point"
| TLetFun(i,env,t) -> TLetFun(rename_ie i, rename_env env, rename_pterm t)
let apply argl paraml already_def def =
rename_table := StringMap.empty;
incr expansion_number;
List.iter (fun s ->
rename_table := StringMap.add s s (!rename_table)) already_def;
List.iter2 (fun (a,_) (p,_) ->
rename_table := StringMap.add p a (!rename_table)) argl paraml;
let def' = List.map rename_decl def in
rename_table := StringMap.empty;
def'
(* Handle all declarations *)
let rec check_one = function
TTypeDecl(i) -> check_type_decl i
| TFunDecl(f,argt,rest,i) -> check_fun_decl f argt rest i
| TConstDecl(f,rest,i) -> check_fun_decl f [] rest i
| TEquation(env,t1,t2) -> check_equation env t1 t2
| TReduc (r,i) -> check_red r i
| TPredDecl (p, argt, info) -> check_pred p argt info
| TEventDecl(i, args) -> check_event i args
| TTableDecl(i, args) -> check_table i args
| TPDef ((s,ext), args, p) ->
let env = ref (!global_env) in
let arglist = List.map (fun ((s',ext'),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s' (!env) with
EVar _ -> input_error ("variable " ^ s' ^ " already defined") ext'
| _ -> ()
with Not_found ->
()
end;
let v = Terms.new_var s' t in
env := StringMap.add s' (EVar v) (!env);
v
) args
in
let p' = check_process (!env) p in
Hashtbl.add pdeftbl s (arglist, p')
| TQuery (env,q) ->
query_list := (env,q) :: (!query_list)
| TNoninterf (env, lnoninterf) ->
noninterf_list := (List.map (get_non_interf env) lnoninterf) :: (!noninterf_list);
| TWeaksecret i ->
weaksecret_list := (get_non_interf_name (!global_env) i) ::(!weaksecret_list)
| TNoUnif (env, nounif) ->
nounif_list := (env, nounif) :: (!nounif_list)
| TElimtrue(env, fact) ->
let env = create_env env in
Param.elim_true := (check_simple_fact env fact) :: (!Param.elim_true)
| TNot (env, no) ->
not_list := (env, no) :: (!not_list)
| TFree (name,ty,i) ->
add_free_name name ty i
| TClauses c ->
List.iter check_clause c
| TLetFun ((s,ext), args, p) ->
let env = ref (!global_env) in
let arglist = List.map (fun ((s',ext'),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s' (!env) with
EVar _ -> input_error ("variable " ^ s' ^ " already defined") ext'
| _ -> ()
with Not_found ->
()
end;
let v = Terms.new_var s' t in
env := StringMap.add s' (EVar v) (!env);
v
) args
in
let (fex, tex, ty) = check_term (!env) p in
global_env := StringMap.add s (ELetFun(arglist, fex, tex, ty)) (!global_env)
(* TO DO handle TExpand (_, _)|TDefine (_, _, _) *)
| TDefine((s1,ext1),argl,def) ->
if StringMap.mem s1 (!macrotable) then
input_error ("Macro " ^ s1 ^ " already defined.") ext1
else
(* Expand macro calls inside macro definitions
Because this is done at macro definition point, this requires that
the macros used inside the definition be defined before, which
is a safe requirement. (It prevents recursive macros, in particular.) *)
let rec expand_inside_macro = function
TDefine((s,ext),_,_)::l ->
input_error "macro definitions are not allowed inside macro definitions" ext
| TExpand((s2,ext2), argl2)::l ->
begin
try
let (paraml2, def2, already_def2) = StringMap.find s2 (!macrotable) in
if List.length argl2 != List.length paraml2 then
input_error ("Macro " ^ s2 ^ " expects " ^ (string_of_int (List.length paraml2)) ^
" arguments, but is here given " ^ (string_of_int (List.length argl2)) ^ " arguments.") ext2;
(apply argl2 paraml2 already_def2 def2) @ (expand_inside_macro l)
with Not_found ->
input_error ("Macro " ^ s2 ^ " not defined.") ext2
end
| a::l -> a::(expand_inside_macro l)
| [] -> []
in
let def = expand_inside_macro def in
let already_def = ref [] in
StringMap.iter (fun s _ -> already_def := s :: (!already_def)) (!global_env);
macrotable := StringMap.add s1 (argl, def, !already_def) (!macrotable)
| TExpand((s1,ext1),argl) ->
begin
try
let (paraml, def, already_def ) = StringMap.find s1 (!macrotable) in
if List.length argl != List.length paraml then
input_error ("Macro " ^ s1 ^ " expects " ^ (string_of_int (List.length paraml)) ^
" arguments, but is here given " ^ (string_of_int (List.length argl)) ^ " arguments.") ext1;
List.iter check_one (apply argl paraml already_def def)
with Not_found ->
input_error ("Macro " ^ s1 ^ " not defined.") ext1
end
| TSet _ -> internal_error "set declaration should have been handled before"
(* Get the maximum phase number *)
let rec set_max_used_phase = function
Nil -> ()
| Par(p1,p2) -> set_max_used_phase p1; set_max_used_phase p2
| Repl (p,_) -> set_max_used_phase p
| Restr(n,p) -> set_max_used_phase p
| Test(_,_,p1,p2,_) -> set_max_used_phase p1; set_max_used_phase p2
| Input(_,_, p,_) -> set_max_used_phase p
| Output(_,_,p,_) -> set_max_used_phase p
| Let(_,_,p1, p2,_) -> set_max_used_phase p1; set_max_used_phase p2
| LetFilter(_,_,p,q,_) -> set_max_used_phase p; set_max_used_phase q
| Event(_,p,_) -> set_max_used_phase p
| Insert(_,p,_) -> set_max_used_phase p
| Get(_,_,p,_) -> set_max_used_phase p
| Phase(n,p) ->
if n > !Param.max_used_phase then
Param.max_used_phase := n;
set_max_used_phase p
let parse_file s =
init_fun_decl();
let (decl, proc) = parse_with_lib s in
(* ignoreTypes must be set before doing the rest of the work
Setting all parameters beforehand does not hurt. *)
List.iter (function
TSet((p,ext),v) ->
begin
match (p,v) with
"attacker", S ("passive",_) -> Param.active_attacker := false
| "attacker", S ("active",_) -> Param.active_attacker := true
| "keyCompromise", S ("strict",_) -> Param.key_compromise := 2
| "keyCompromise", S ("approx",_) -> Param.key_compromise := 1
| "keyCompromise", S ("none",_) -> Param.key_compromise := 0
| "movenew", _ -> Param.boolean_param Param.move_new p ext v
| "verboseClauses", S ("explained",_) -> Param.verbose_explain_clauses := Param.ExplainedClauses
| "verboseClauses", S ("short",_) -> Param.verbose_explain_clauses := Param.Clauses
| "verboseClauses", S ("none",_) -> Param.verbose_explain_clauses := Param.NoClauses
| "explainDerivation", _ -> Param.boolean_param Param.explain_derivation p ext v
| "predicatesImplementable", S("check",_) -> Param.check_pred_calls := true
| "predicatesImplementable", S("nocheck",_) -> Param.check_pred_calls := false
| "eqInNames", _ ->
Param.boolean_param Param.eq_in_names p ext v;
if !Param.eq_in_names then Param.reconstruct_trace := false
| "reconstructTrace", _ -> Param.boolean_param Param.reconstruct_trace p ext v
| "traceBacktracking", _ -> Param.boolean_param Param.trace_backtracking p ext v
| "unifyDerivation", _ -> Param.boolean_param Param.unify_derivation p ext v
| "traceDisplay", S ("none",_) -> Param.trace_display := Param.NoDisplay
| "traceDisplay", S ("short",_) -> Param.trace_display := Param.ShortDisplay
| "traceDisplay", S ("long",_) -> Param.trace_display := Param.LongDisplay
| "ignoreTypes", S (("all" | "true"), _) -> Param.ignore_types := true
| "ignoreTypes", S ("attacker", _) -> Param.ignore_types := false; Param.untyped_attacker := true
| "ignoreTypes", S (("none" | "false"), _) -> Param.ignore_types := false; Param.untyped_attacker := false
| _,_ -> Param.common_parameters p ext v
end
| _ -> ()) decl;
List.iter (function
TSet _ -> ()
| x -> check_one x) decl;
let p = Terms.auto_cleanup (fun () ->
(* I call copy_process to make sure that all variables are distinct.
copy_process renames variables in patterns *)
copy_process true (check_process (!global_env) proc))
in
List.iter (fun (_, q) -> List.iter nvn_q q) (!query_list);
List.iter (fun (_, no) -> nvn_t no) (!not_list);
List.iter (fun (_, nounif) -> nvn_nounif nounif) (!nounif_list);
if !Param.key_compromise = 2 then
Param.max_used_phase := 1
else
set_max_used_phase p;
p
let display () =
print_string "Functions ";
Hashtbl.iter (fun _ fsymb ->
print_string (fsymb.f_name ^ "(" ^ (Terms.tl_to_string ", " (fst fsymb.f_type))
^ "):" ^ (snd fsymb.f_type).tname ^ ". ")
) fun_decls;
print_string "\n"
(* queries *)
let non_compromised_session = FunApp(Param.session1, [])
Note : when check_query , get_queries are applied before the
translation of the process into Horn clauses has been done ,
the arity of names may not be correctly initialized . In this case ,
update_arity_names should be called after the translation of the
process to update it .
translation of the process into Horn clauses has been done,
the arity of names may not be correctly initialized. In this case,
update_arity_names should be called after the translation of the
process to update it. *)
let get_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
TLink t -> t
| NoLink -> Var b
| _ -> internal_error "unexpected link in get_ident_any"
end
| EName r ->
FunApp(r, [])
| EFun f ->
if fst f.f_type == [] then
FunApp(f,[])
else
input_error ("function " ^ s ^ " has expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a free name, or a function") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined") ext
let rec check_query_term env (term, ext0) =
match term with
PGIdent i ->
let t = get_ident_any env i in
(t, Terms.get_term_type t)
| PGPhase _ -> input_error ("phase unexpected in query terms") ext0
| PGFunApp((s,ext),l) ->
if List.mem s ["="; "<>"; "==>"; "&&"; "||"; "event"; "inj-event"] then
input_error (s ^ " unexpected in query terms") ext;
begin
try
match StringMap.find s env with
EFun f ->
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " is defined by \"reduc\". Such a function should not be used in a query") ext);
let (l', tl') = List.split (List.map (check_query_term env) l) in
if Terms.eq_lists (fst f.f_type) tl' then
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match l' with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f, l'), snd f.f_type)
else
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
| _ -> input_error("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PGTuple l ->
let (l', tl') = List.split (List.map (check_query_term env) l) in
(FunApp(Terms.get_tuple_fun tl', l'), Param.bitstring_type)
| PGName ((s,ext),bl) ->
begin
try
let r = Hashtbl.find glob_table s in
check_single ext s;
if fst r.f_type == Param.tmp_type then
begin
let v = Terms.new_var Param.def_var_name (snd r.f_type) in
v.link <- PGTLink (env, (term,ext0));
(Var v, snd r.f_type)
end
else
begin
match r.f_cat with
Name { prev_inputs_meaning = sl } ->
List.iter (fun ((s',ext'),_) ->
if not (List.mem s' sl) then
input_error ("variable " ^ s' ^ " not defined at restriction " ^ s) ext') bl;
let p = List.map2 (fun s'' ty ->
if s'' = "!comp" then non_compromised_session else
binding_find env s'' ty bl) sl (fst r.f_type)
in
(FunApp(r, p), snd r.f_type)
| _ -> internal_error "name expected here"
end
with Not_found ->
input_error (s ^ " should be a name") ext
end
| PGLet(id,t,t') -> check_query_term (add_binding env (id,t)) t'
and binding_find env s ty = function
[] -> Terms.new_var_def ty
| ((s',ext),t)::l ->
if s' = s then
begin
let (t', ty') = check_query_term env t in
if ty' != ty then
input_error ("this variable is of type " ^ ty.tname ^ " but is given a value of type " ^ ty'.tname) ext;
t'
end
else
binding_find env s ty l
and add_binding env ((i,ext),t) =
begin
try
match StringMap.find i env with
EVar _ -> input_error ("variable " ^ i ^ " already defined") ext
| _ -> ()
with Not_found -> ()
end;
let (t', ty') = check_query_term env t in
let v = Terms.new_var i ty' in
v.link <- TLink t';
StringMap.add i (EVar v) env
let check_mess env e tl n =
match tl with
[t1;t2] ->
if n > !Param.max_used_phase then
begin
input_warning "phase greater than the maximum phase used in the process.\nIs that really what you want?" e;
Param.max_used_phase := n;
end;
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != Param.channel_type then
input_error ("First argument of mess is of type " ^ ty1.tname ^ " and should be of type channel") e;
let mess_n = Param.get_pred (Mess((if n = -1 then (!Param.max_used_phase) else n),
ty2))
in
QFact(mess_n, [t1';t2'])
| _ ->
input_error "arity of predicate mess should be 2" e
let check_attacker env e tl n =
match tl with
[t1] ->
if n > !Param.max_used_phase then
begin
input_warning "phase greater than the maximum phase used in the process.\nIs that really what you want?" e;
Param.max_used_phase := n;
end;
let (t1', ty1) = check_query_term env t1 in
let att_n = Param.get_pred (Attacker((if n = -1 then (!Param.max_used_phase) else n),
ty1))
in
QFact(att_n, [t1'])
| _ ->
input_error "arity of predicate attacker should be 1" e
let rec check_event env (f,e) =
match f with
PGFunApp(("<>", _), [t1; t2]) ->
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" e;
QNeq(t1', t2')
| PGFunApp(("=", _), [t1; t2]) ->
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" e;
QEq(t1', t2')
| PGFunApp(("event",e'),tl0) ->
begin
match tl0 with
[PGFunApp((s,e''), tl),_] ->
let (tl', tyl') = List.split (List.map (check_query_term env) tl) in
if !Param.key_compromise == 0 then
QSEvent(false, FunApp((get_event_fun env (s, e'') tyl'), tl'))
else
QSEvent(false, FunApp((get_event_fun env (s, e'') (Param.sid_type :: tyl')),
(Terms.new_var_def Param.sid_type)::tl'))
| _ -> input_error "predicate event should have one argument, which is a function application" e'
end
| PGFunApp(("inj-event",e'),tl0) ->
begin
match tl0 with
[PGFunApp((s,e''), tl),_] ->
let (tl', tyl') = List.split (List.map (check_query_term env) tl) in
if !Param.key_compromise == 0 then
QSEvent(true, FunApp((get_event_fun env (s, e'') tyl'), tl'))
else
QSEvent(true, FunApp((get_event_fun env (s, e'') (Param.sid_type :: tyl')),
(Terms.new_var_def Param.sid_type)::tl'))
| _ -> input_error "predicate inj-event should have one argument, which is a function application" e'
end
| PGFunApp(("attacker",_), tl) ->
check_attacker env e tl (-1)
| PGFunApp(("mess",_), tl) ->
check_mess env e tl (-1)
| PGFunApp((s, ext) as p, tl) ->
if List.mem s ["||"; "&&"; "not"; "==>"] then
input_error (s ^ " unexpected in events") ext;
let (tl', tyl) = List.split (List.map (check_query_term env) tl) in
QFact(get_pred env p tyl, tl')
| PGPhase((s, ext), tl, n) ->
begin
match s with
"mess" -> check_mess env e tl n
| "attacker" -> check_attacker env e tl n
| _ -> input_error "phases can be used only with attacker or mess" ext
end
| PGIdent p ->
QFact(get_pred env p [], [])
| PGLet(id,t,t') -> check_event (add_binding env (id,t)) t'
| _ -> input_error "an event should be a predicate application" e
let rec check_hyp env = function
PGFunApp(("==>", _), [ev; hypll]), _ ->
let ev' = check_event env ev in
(
match ev' with
QNeq _ | QEq _ -> input_error "Inequalities or equalities cannot occur before ==> in queries" (snd ev)
| _ -> ()
);
let hypll' = check_hyp env hypll in
[[NestedQuery(Before(ev', hypll'))]]
| PGFunApp(("||", _), [he1;he2]), _ ->
(check_hyp env he1) @ (check_hyp env he2)
| PGFunApp(("&&", _), [he1;he2]), _ ->
let he1' = check_hyp env he1 in
let he2' = check_hyp env he2 in
List.concat (List.map (fun e1 -> List.map (fun e2 -> e1 @ e2) he2') he1')
| PGLet(id,t,t'), _ -> check_hyp (add_binding env (id,t)) t'
| ev -> [[QEvent(check_event env ev)]]
let rec check_real_query_top env = function
PGFunApp(("==>", _), [ev; hypll]), _ ->
let ev' = check_event env ev in
let ev'' =
match ev' with
QNeq _ | QEq _ -> user_error "Inequalities or equalities cannot occur before ==> in queries\n"
| QFact _ -> ev'
| QSEvent _ when !Param.key_compromise == 0 -> ev'
| QSEvent(inj, FunApp(f, sid::l)) ->
QSEvent(inj, FunApp(f, non_compromised_session::l))
| QSEvent(_,_) ->
internal_error "Bad format for events in queries"
in
let hypll' = check_hyp env hypll in
Before(ev'', hypll')
| PGLet(id,t,t'), _ -> check_real_query_top (add_binding env (id,t)) t'
| ev ->
let ev' = check_event env ev in
let ev'' =
match ev' with
QNeq _ | QEq _ -> user_error "Inequalities or equalities cannot occur alone queries\n"
| QFact _ -> ev'
| QSEvent _ when !Param.key_compromise == 0 -> ev'
| QSEvent(inj, FunApp(f, sid::l)) ->
QSEvent(inj, FunApp(f, non_compromised_session::l))
| QSEvent(_,_) ->
internal_error "Bad format for events in queries"
in
Before(ev'', [])
let rec check_query_list env = function
[] -> []
| (PRealQuery q)::lq ->
(RealQuery(check_real_query_top env q))::(check_query_list env lq)
| (PPutBegin(i, l))::lq ->
let l' = List.map (fun (s,e) ->
try
match StringMap.find s env with
EEvent r -> r
| _ -> input_error (s ^ " should be an event") e
with Not_found ->
input_error ("unknown event " ^s) e) l
in
(PutBegin(i,l'))::(check_query_list env lq)
let rec has_inj = function
Before(_,ll) ->
List.exists (List.exists (function
NestedQuery q -> has_inj q
| QEvent (QSEvent (i,_)) -> i
| QEvent (_) -> false)) ll
let rec check_inj_coherent_r q =
if has_inj q then
match q with
Before(e,ll) ->
let e' =
match e with
QFact _ | QNeq _ | QEq _ -> user_error "In a query e ==> h, if h contains an injective event, then e must be an event or better inj-event\n"
| QSEvent(_,t) -> QSEvent(true, t) (* set the event injective *)
in
Before(e', List.map (List.map (function
QEvent e -> QEvent e
| NestedQuery q' -> NestedQuery (check_inj_coherent_r q'))) ll)
else q
let check_inj_coherent = function
(PutBegin(_,_) as q) -> q
| RealQuery q -> RealQuery (check_inj_coherent_r q)
let transl_query (env,q) =
let q' = check_query_list (create_env env) q in
let q'' = List.map check_inj_coherent q' in
Pievent.init_event_status_table event_fun_table;
List.iter Pievent.set_event_status q'';
q''
(* Give the fact to query from the detailed query
This is used only to create a resembling specification for SPASS
*)
let query_to_facts q =
let facts = ref [] in
List.iter (function
PutBegin(_) -> ()
| RealQuery(Before(e,_)) -> match e with
QSEvent(_,(FunApp(f,l) as param)) ->
facts :=
(if (Pievent.get_event_status f).end_status = Inj then
Pred(Param.end_pred_inj, [Var(Terms.new_var "endsid" Param.sid_type);param])
else
Pred(Param.end_pred, [param])) :: (!facts)
| QSEvent(_, _) ->
user_error ("Events should be function applications\n")
| QFact(p,l) ->
facts := (Pred(p,l)) :: (!facts)
| QNeq _ | QEq _ -> internal_error "no Neq/Eq queries"
) q;
!facts
(* After its translation, the arguments of names in the query are
given type Param.tmp_type The exact types of the arguments of each
name function symbol is computed during the translation of the
process. The following functions scan the query to update the names
with their real type. *)
let rec update_type_names_t = function
Var v ->
begin
match v.link with
PGTLink (env, t) ->
let (t', _) = check_query_term env t in
v.link <- TLink t';
t'
| TLink t -> t
| NoLink -> Var v
| _ -> internal_error "unexpected link in update_type_names_t"
end
| FunApp(f,l) -> FunApp(f, List.map update_type_names_t l)
let update_type_names_e = function
QSEvent(b,t) -> QSEvent(b, update_type_names_t t)
| QFact(p,tl) -> QFact(p, List.map update_type_names_t tl)
| QNeq(t1,t2) -> QNeq(update_type_names_t t1, update_type_names_t t2)
| QEq(t1,t2) -> QEq(update_type_names_t t1, update_type_names_t t2)
let rec update_type_names_r = function
Before(ev,hypll) -> Before(update_type_names_e ev, List.map (List.map update_type_names_h) hypll)
and update_type_names_h = function
QEvent(ev) -> QEvent(update_type_names_e ev)
| NestedQuery(q) -> NestedQuery(update_type_names_r q)
let update_type_names = function
PutBegin(b,l) -> PutBegin(b,l)
| RealQuery q -> RealQuery(update_type_names_r q)
Noninterf queries
let get_noninterf_queries () =
!noninterf_list
(* Weaksecret queries *)
let get_weaksecret_queries () =
!weaksecret_list
(* Not declarations *)
let get_not() =
List.map (fun (env, no) -> check_event (create_env env) no) (!not_list)
For . Very similar to queries , except that * v is allowed
and events are not allowed
and events are not allowed *)
let fget_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
FLink t -> (t, b.btype)
| NoLink -> (FVar b, b.btype)
| _ -> internal_error "unexpected link in fget_ident_any"
end
| EName r ->
(FFunApp(r, []), snd r.f_type)
| EFun f ->
if fst f.f_type == [] then
(FFunApp(f,[]), snd f.f_type)
else
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ ->
input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined") ext
let rec check_gformat env (term, ext0) =
match term with
PFGIdent i -> fget_ident_any env i
| PFGFunApp((s,ext),l) ->
begin
try
match StringMap.find s env with
EFun f ->
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " is defined by \"reduc\". Such a function should not be used in a \"nounif\" declaration") ext);
let (l', tl') = List.split (List.map (check_gformat env) l) in
if Terms.eq_lists (fst f.f_type) tl' then
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match l' with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FFunApp(f, l'), snd f.f_type)
else
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
| _ -> input_error("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PFGTuple l ->
let (l', tl') = List.split (List.map (check_gformat env) l) in
(FFunApp(Terms.get_tuple_fun tl', l'), Param.bitstring_type)
| PFGAny (s,ext) ->
begin
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
NoLink -> (FAny b, b.btype)
| FLink _ -> input_error "variables preceded by * must not be defined by a binding" ext
| _ -> internal_error "unexpected link in check_gformat"
end
| _ -> input_error (s ^ " should be a variable") ext
with Not_found ->
input_error ("variable " ^ s ^ " is not defined") ext
end
| PFGName ((s,ext),bl) ->
begin
try
let r = Hashtbl.find glob_table s in
check_single ext s;
if fst r.f_type == Param.tmp_type then
Parsing_helper.internal_error "Names should have their arity at this point"
else
begin
match r.f_cat with
Name { prev_inputs_meaning = sl } ->
List.iter (fun ((s',ext'),_) ->
if not (List.mem s' sl) then
input_error ("variable " ^ s' ^ " not defined at restriction " ^ s) ext') bl;
let p = List.map2 (fun s'' ty ->
fbinding_find env s'' ty bl) sl (fst r.f_type)
in
(FFunApp(r, p), snd r.f_type)
| _ -> internal_error "name expected here"
end
with Not_found ->
input_error (s ^ " should be a name") ext
end
| PFGLet(id,t,t') -> check_gformat (add_fbinding env (id,t)) t'
and fbinding_find env s ty = function
[] -> FAny (Terms.new_var Param.def_var_name ty)
| ((s',ext),t)::l ->
if s' = s then
begin
let (t', ty') = check_gformat env t in
if ty' != ty then
input_error ("this variable is of type " ^ ty.tname ^ " but is given a value of type " ^ ty'.tname) ext;
t'
end
else
fbinding_find env s ty l
and add_fbinding env ((i,ext),t) =
begin
try
match StringMap.find i env with
EVar _ -> input_error ("variable " ^ i ^ " already defined") ext
| _ -> ()
with Not_found -> ()
end;
let (t', ty') = check_gformat env t in
let v = Terms.new_var i ty' in
v.link <- FLink t';
StringMap.add i (EVar v) env
let check_gfact_format env ((s, ext), tl, n) =
match s with
"attacker" ->
begin
match tl with
[t1] ->
if n > !Param.max_used_phase then
input_warning "nounif declaration for a phase greater than used" ext;
let (t1', ty1) = check_gformat env t1 in
let att_n = Param.get_pred (Attacker((if n = -1 then (!Param.max_used_phase) else n), ty1))
in
(att_n, [t1'])
| _ ->
input_error "arity of predicate attacker should be 1" ext
end
| "mess" ->
begin
match tl with
[t1;t2] ->
if n > !Param.max_used_phase then
input_warning "nounif declaration for a phase greater than used" ext;
let (t1', ty1) = check_gformat env t1 in
let (t2', ty2) = check_gformat env t2 in
if ty1 != Param.channel_type then
input_error ("First argument of mess is of type " ^ ty1.tname ^ " and should be of type channel") ext;
let mess_n = Param.get_pred (Mess((if n = -1 then (!Param.max_used_phase) else n), ty2))
in
(mess_n, [t1';t2'])
| _ ->
input_error "arity of predicate mess should be 2" ext
end
| s ->
if n != -1 then
input_error "declared predicates do not depend on phases, so no phase should be specified in such facts in queries" ext;
let (tl', tyl) = List.split (List.map (check_gformat env) tl) in
(get_pred env (s,ext) tyl, tl')
let rec handle_nounif env = function
BFLet(id,t,nounif) -> handle_nounif (add_fbinding env (id,t)) nounif
| BFNoUnif(fact,n) -> (check_gfact_format env fact, -n)
let get_nounif() =
List.map (fun (env, nounif) -> handle_nounif (create_env env) nounif) (!nounif_list)
| null | https://raw.githubusercontent.com/tari3x/csec-modex/5ab2aa18ef308b4d18ac479e5ab14476328a6a50/deps/proverif1.84/src/pitsyntax.ml | ocaml | Global table of identifiers, including names, functions, variables,
predicates, and types.
Is a map from strings to the description of the ident
* Types *
Table of bound names of the process
Functions
|| ((arity == 0) && (not is_private))
Equations
Definitions of destructors by rewrite rules
Check clauses
add other qualifiers here
TO DO should give a real rule number, but that's not easy...
List of the free names of the process
Check non-interference terms
Copy a process
If it is the final copy, create a distinct name for each restriction and add it in the glob_table
** Translate a process from parse tree to internal representation **
Table of processes defined by "let"
Get an ident when anything is allowed
Fix the variables that we are going to use to rename the arguments of the function
if c1 || c2 then p1 else p2
is equivalent to
if c1 then p1 else (if c2 then p1 else p2)
Events
Tables
if c1 || c2 then p1 else p2
is equivalent to
if c1 then p1 else (if c2 then p1 else p2)
Compute need_vars_in_names: the list of pairs (restriction, variable name)
such that the variable "variable name" must occur as argument in the
pattern that models names created by "restriction", because the
structure "restriction[... variable name = ... ]" is used in the input
file.
The replication indices do not need to be added in
need_vars_in_names, because they are always included as
arguments of names, whether or not they occur in
the input file.
They must not be added to need_vars_in_names, because
they are not correctly computed by trace reconstruction,
so adding them would cause bugs in trace reconstruction.
print_string ("Need " ^ s' ^ " in " ^ r.f_name ^ "\n");
The replication indices do not need to be added in
need_vars_in_names, because they are always included as
arguments of names, whether or not they occur in
the input file.
They must not be added to need_vars_in_names, because
they are not correctly computed by trace reconstruction,
so adding them would cause bugs in trace reconstruction.
print_string ("Need " ^ s' ^ " in " ^ r.f_name ^ "\n");
Handle all declarations
TO DO handle TExpand (_, _)|TDefine (_, _, _)
Expand macro calls inside macro definitions
Because this is done at macro definition point, this requires that
the macros used inside the definition be defined before, which
is a safe requirement. (It prevents recursive macros, in particular.)
Get the maximum phase number
ignoreTypes must be set before doing the rest of the work
Setting all parameters beforehand does not hurt.
I call copy_process to make sure that all variables are distinct.
copy_process renames variables in patterns
queries
set the event injective
Give the fact to query from the detailed query
This is used only to create a resembling specification for SPASS
After its translation, the arguments of names in the query are
given type Param.tmp_type The exact types of the arguments of each
name function symbol is computed during the translation of the
process. The following functions scan the query to update the names
with their real type.
Weaksecret queries
Not declarations | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* and *
* *
* Copyright ( C ) INRIA , LIENS , 2000 - 2009 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet and Xavier Allamigeon *
* *
* Copyright (C) INRIA, LIENS, MPII 2000-2009 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Parsing_helper
open Ptree
open Pitptree
open Types
open Pitypes
open Stringmap
let occ_count = ref 0
let new_occurrence () =
incr occ_count;
!occ_count
Parse a file
let parse filename =
try
let ic = open_in filename in
let lexbuf = Lexing.from_channel ic in
lexbuf.Lexing.lex_curr_p <- { lexbuf.Lexing.lex_curr_p with
Lexing.pos_fname = filename };
let ptree =
try
Pitparser.all Pitlexer.token lexbuf
with Parsing.Parse_error ->
input_error "Syntax error" (extent lexbuf)
in
close_in ic;
ptree
with Sys_error s ->
user_error ("File error: " ^ s ^ "\n")
let parse_lib filename =
let filename = filename ^ ".pvl" in
try
let ic = open_in filename in
let lexbuf = Lexing.from_channel ic in
lexbuf.Lexing.lex_curr_p <- { lexbuf.Lexing.lex_curr_p with
Lexing.pos_fname = filename };
let ptree =
try
Pitparser.lib Pitlexer.token lexbuf
with Parsing.Parse_error ->
input_error "Syntax error" (extent lexbuf)
in
close_in ic;
ptree
with Sys_error s ->
user_error ("File error: " ^ s ^ "\n")
let parse_with_lib filename =
let l1 =
if (!Param.lib_name) <> "" then
parse_lib (!Param.lib_name)
else
[]
in
let (l,p) = parse filename in
(l1 @ l, p)
let global_env = ref (StringMap.empty : envElement StringMap.t)
let get_type_polym polym (s, ext) =
if s = "any_type" then
if polym then
Param.any_type
else
input_error "polymorphic type not allowed here" ext
else
try
List.find (fun t -> t.tname = s) (!Param.all_types)
with Not_found ->
input_error ("type " ^ s ^ " not declared") ext
let get_type (s, ext) = get_type_polym false (s,ext)
let check_type_decl (s, ext) =
if s = "any_type" then
input_error "type any_type reserved for polymorphism" ext;
if StringMap.mem s (!global_env) then
input_error ("identifier " ^ s ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { tname = s } in
Param.all_types := r :: (!Param.all_types);
global_env := StringMap.add s (EType r) (!global_env)
let glob_table = Hashtbl.create 7
let check_single ext s =
let vals = Hashtbl.find_all glob_table s in
match vals with
_::_::_ -> input_error (s ^ " cannot be used in queries. Its definition is ambiguous. (For example, several restrictions might define " ^ s ^ ".)") ext
| _ -> ()
let fun_decls = Param.fun_decls
let true_cst = Terms.true_cst
let false_cst = Terms.false_cst
let init_fun_decl () =
Hashtbl.add fun_decls "true" true_cst;
global_env := StringMap.add "true" (EFun true_cst) (!global_env);
Hashtbl.add fun_decls "false" false_cst;
global_env := StringMap.add "false" (EFun false_cst) (!global_env);
Hashtbl.add fun_decls "not" Terms.not_fun;
global_env := StringMap.add "not" (EFun Terms.not_fun) (!global_env);
List.iter (fun t -> global_env := StringMap.add t.tname (EType t) (!global_env)) (!Param.all_types)
let special_functions = ["choice"; "||"; "&&"; "="; "<>"]
let get_fun env (s,ext) tl =
if List.mem s special_functions then
input_error (s ^ " not allowed here") ext;
try
match StringMap.find s env with
EFun r ->
if not (Terms.eq_lists (fst r.f_type) tl) then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst r.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
r
| _ ->
input_error (s ^ " should be a function") ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
let check_fun_decl (name, ext) argtypes restype options =
let tyarg = List.map get_type argtypes in
let tyres = get_type restype in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let is_tuple = ref false in
let is_private = ref false in
let opt = ref 0 in
List.iter (function
("data",_) -> is_tuple := true
| ("private",_) -> is_private := true
| ("typeConverter",_) ->
if List.length tyarg != 1 then
input_error "only unary functions can be declared \"typeConverter\"" ext;
opt := (!opt) lor Param.fun_TYPECONVERTER
| (_,ext) ->
input_error "for functions, the only allowed options are data, private, and typeConverter" ext) options;
let r = { f_name = name;
f_type = tyarg, tyres;
f_cat = cat;
f_initial_cat = cat;
f_private = !is_private;
f_options = !opt }
in
Hashtbl.add fun_decls name r;
global_env := StringMap.add name (EFun r) (!global_env)
let get_var env (s,ext) =
try
match StringMap.find s env with
EVar v -> v
| _ -> input_error (s ^ " should be a variable") ext
with Not_found ->
input_error ("variable " ^ s ^ " not declared") ext
let add_env env l =
let env_ref = ref env in
List.iter (fun ((s,ext),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s (!env_ref) with
EVar _ -> input_error ("variable " ^ s ^ " already defined") ext
| _ -> input_warning ("identifier " ^ s ^ " rebound") ext
with Not_found -> ()
end;
let v = Terms.new_var s t in
env_ref := StringMap.add s (EVar v) (!env_ref)
) l;
!env_ref
let create_env l =
add_env (!global_env) l
let f_eq_tuple f ext =
match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ f.f_name ^ " has been defined by reduction. It should not appear in equations or clauses") ext
let f_any f ext = ()
let rec check_eq_term f_allowed env (term,ext) =
match term with
(PIdent (s,ext)) ->
let t =
try
match StringMap.find s env with
EVar v -> Var v
| EFun f ->
if fst f.f_type <> [] then
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext;
f_allowed f ext;
FunApp(f, [])
| _ -> input_error ("identifier " ^ s ^ " should be a function or a variable") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined as a function or as a variable") ext
in
(t, Terms.get_term_type t)
| (PFunApp ((f,ext), tlist)) ->
let (tl', tyl) = List.split (List.map (check_eq_term f_allowed env) tlist) in
let f' = get_fun env (f,ext) tyl in
f_allowed f' ext;
if (f'.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match tl' with
[t] -> (t, snd f'.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f', tl'), snd f'.f_type)
| (PTuple tlist) ->
let (tl', tyl) = List.split (List.map (check_eq_term f_allowed env) tlist) in
(FunApp (Terms.get_tuple_fun tyl, tl'), Param.bitstring_type)
let check_equation env t1 t2 =
let var_env = create_env env in
let (t1', ty1) = check_eq_term f_eq_tuple var_env t1 in
let (t2', ty2) = check_eq_term f_eq_tuple var_env t2 in
if ty1 != ty2 then
begin
let ext = merge_ext (snd t1) (snd t2) in
input_error "the two members of an equation should have the same type" ext
end;
TermsEq.register_equation (t1',t2')
let check_red tlist options =
match tlist with
(_,(PFunApp((f,ext),l),_),_)::_ ->
begin
if List.mem f special_functions then
input_error (f ^ " not allowed here") ext;
if StringMap.mem f (!global_env) then
input_error ("identifier " ^ f ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let red_list, ty_red_list = List.split (List.map
(function (env, (PFunApp((f',ext'),l1),_), t2) ->
if f <> f' then
input_error ("In \"reduc\", all rewrite rules should begin with the same function " ^ f) ext';
let var_env = create_env env in
let ((lhs, tylhs), (rhs, tyrhs)) = (List.split (List.map (check_eq_term f_eq_tuple var_env) l1),
check_eq_term f_eq_tuple var_env t2)
in
let var_list_rhs = ref [] in
Terms.get_vars var_list_rhs rhs;
if not (List.for_all (fun v -> List.exists (Terms.occurs_var v) lhs) (!var_list_rhs)) then
Parsing_helper.input_error "All variables of the right-hand side of a \"reduc\" definition\nshould also occur in the left-hand side." ext';
(lhs, rhs), (tylhs, tyrhs)
| _, (_, ext1), _ -> input_error ("In \"reduc\", all rewrite rules should begin with function application") ext1) tlist)
in
match ty_red_list with
[] -> internal_error "reduction with empty list"
| (tylhs,tyrhs)::r ->
List.iter (fun (tylhs',tyrhs') ->
if not (Terms.eq_lists tylhs tylhs') then
input_error ("the arguments of function " ^ f ^ " do not have the same type in all rewrite rules") ext;
if not (tyrhs == tyrhs') then
input_error ("the result of function " ^ f ^ " does not have the same type in all rewrite rules") ext
) r;
let cat = Red red_list in
let is_private = ref false in
List.iter (function
| ("private",_) -> is_private := true
| (_,ext) ->
input_error "for functions defined by rewrite rules, the only allowed option is private" ext) options;
let fsymb = { f_name = f;
f_type = tylhs, tyrhs;
f_private = !is_private;
f_options = 0;
f_cat = cat;
f_initial_cat = cat
}
in
Hashtbl.add fun_decls f fsymb;
global_env := StringMap.add f (EFun fsymb) (!global_env)
end
| (_,(_, ext1), _) :: l ->
input_error ("In \"reduc\", all rewrite rules should begin with function application") ext1
| [] -> internal_error "reduction with empty list"
let pred_env = Param.pred_env
let rec interpret_info ty r = function
("memberOptim", ext) ->
if List.length ty != 2 then
input_error "memberOptim makes sense only for predicates of arity 2" ext;
r.p_prop <- r.p_prop lor Param.pred_ELEM
| ("refTransAtt", ext) ->
begin
match ty with
[t1;t2] when t1 == t2 -> r.p_prop <- r.p_prop lor Param.pred_REFTRANS
| _ -> input_error "refTransAtt makes sense only for predicates with 2 arguments of the same type" ext
end
| ("decompData",ext) ->
if List.exists (fun t -> t != Param.any_type) ty then
input_error "decompData makes sense only for predicates that are polymorphic in all their arguments" ext;
r.p_prop <- r.p_prop lor Param.pred_TUPLE
| ("decompDataSelect",ext) ->
if List.exists (fun t -> t != Param.any_type) ty then
input_error "decompDataSelect makes sense only for predicates that are polymorphic in all their arguments" ext;
r.p_prop <- r.p_prop lor Param.pred_TUPLE lor Param.pred_TUPLE_SELECT
| ("block",_) -> r.p_prop <- r.p_prop lor Param.pred_BLOCKING
| (s,ext) -> input_error ("unknown predicate qualifier " ^ s) ext
let check_pred (c,ext) tl info =
if c = "attacker" || c = "mess" || c = "event" || c = "inj-event" then
input_error ("predicate name " ^ c ^ " is reserved. You cannot declare it") ext;
if StringMap.mem c (!global_env) then
input_error ("identifier " ^ c ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let tyl = List.map (get_type_polym true) tl in
let r = { p_name = c; p_type = tyl; p_prop = 0; p_info = [] } in
List.iter (interpret_info tyl r) info;
if List.exists (fun t -> t == Param.any_type) tyl then
r.p_info <- [PolymPred(c, r.p_prop, tyl)];
Hashtbl.add pred_env c r;
global_env := StringMap.add c (EPred r) (!global_env)
let get_pred env (c, ext) tl =
try
match StringMap.find c env with
EPred r ->
if not ((List.length r.p_type == List.length tl) && (List.for_all2 (fun t1 t2 -> t1 == t2 || t1 == Param.any_type) r.p_type tl)) then
input_error ("predicate " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " r.p_type) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
if List.exists (fun t -> t == Param.any_type) r.p_type then
Param.get_pred (PolymPred(r.p_name, r.p_prop, tl))
else
r
| _ -> input_error (c ^ " should be a predicate") ext
with Not_found ->
input_error ("undeclared predicate " ^ c ) ext
type pred_or_fun =
IsPred of predicate
| IsFun of funsymb
let get_pred_or_fun env (c,ext) tl' =
try
match StringMap.find c env with
EPred r ->
if not ((List.length r.p_type == List.length tl') && (List.for_all2 (fun t1 t2 -> t1 == t2 || t1 == Param.any_type) r.p_type tl')) then
input_error ("predicate " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " r.p_type) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
let p' =
if List.exists (fun t -> t == Param.any_type) r.p_type then
Param.get_pred (PolymPred(r.p_name, r.p_prop, tl'))
else
r
in
IsPred p'
| EFun r ->
if not (Terms.eq_lists (fst r.f_type) tl') then
input_error ("function " ^ c ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst r.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
if (snd r.f_type) != Param.bool_type then
input_error ("function " ^ c ^ " returns a result of type " ^
(snd r.f_type).tname ^ " but a boolean is expected") ext;
IsFun r
| _ -> input_error (c ^ " should be a predicate or a boolean function") ext
with Not_found ->
input_error ("undeclared predicate or function " ^ c ) ext
let add_rule hyp concl constra tag =
Param.red_rules := (hyp, concl, constra, tag) :: (!Param.red_rules)
let equal_fact t1 t2 =
Pred(Param.get_pred (Equal(Terms.get_term_type t1)), [t1;t2])
let check_cterm env (p,t) =
let (tl, tyl) = List.split (List.map (check_eq_term f_any env) t) in
(get_pred env p tyl, tl)
let rec check_hyp (hyp_accu,constra_accu) env (fact, ext) =
match fact with
PIdent p ->
let (p',l') = check_cterm env (p,[]) in
(Pred(p',l')::hyp_accu, constra_accu)
| PTuple _ -> input_error "tuples not allowed here" ext
| PFunApp((f,fext) as p, l) ->
match f,l with
"<>", [t1;t2] ->
let (t1', ty1) = check_eq_term f_any env t1 in
let (t2', ty2) = check_eq_term f_any env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext;
(hyp_accu, [Neq(t1', t2')] :: constra_accu)
| "=", [t1;t2] ->
let (t1', ty1) = check_eq_term f_any env t1 in
let (t2', ty2) = check_eq_term f_any env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
((equal_fact t1' t2')::hyp_accu, constra_accu)
| "&&", [h1;h2] ->
check_hyp (check_hyp (hyp_accu,constra_accu) env h1) env h2
| ("<>" | "=" | "&&"), _ -> internal_error ("Bad arity for special function " ^ f)
| ("||" | "not" | "choice"), _ -> input_error (f ^ " not allowed here") fext
| _ ->
let (p',l') = check_cterm env (p,l) in
(Pred(p',l')::hyp_accu, constra_accu)
let check_simple_fact env (fact, ext) =
match fact with
PIdent p ->
let (p',l') = check_cterm env (p,[]) in
Pred(p',l')
| PTuple _ -> input_error "tuples not allowed here" ext
| PFunApp((f,fext) as p,l) ->
match f with
"=" | "<>" | "&&" | "||" | "not" | "choice" -> input_error (f ^ " not allowed here") fext
| _ ->
let (p',l') = check_cterm env (p,l) in
Pred(p',l')
let check_clause = function
(env, PFact(c)) ->
begin
let env = create_env env in
let concl = check_simple_fact env c in
add_rule [] concl [] LblClause
end
| (env, PClause(i,c)) ->
begin
try
let env = create_env env in
let (hyp, constra) = check_hyp ([],[]) env i in
let concl = check_simple_fact env c in
add_rule hyp concl
(Rules.simplify_constra_list (concl :: hyp) constra) LblClause
with Rules.FalseConstraint -> ()
end
| (env, PEquiv(i,c,select)) ->
let env = create_env env in
let (hyp, constra) = check_hyp ([],[]) env i in
if constra != [] then
Parsing_helper.user_error "Inequality constraints not allowed in equivalences";
let concl = check_simple_fact env c in
add_rule hyp concl [] LblEquiv;
List.iter (fun h -> add_rule [concl] h [] LblEquiv) hyp;
if not select then Terms.add_unsel concl
let freenames = Param.freenames
let create_name s ty is_free =
let cat = Name { prev_inputs = None; prev_inputs_meaning = [] } in
{ f_name = s;
f_type = ty;
f_cat = cat;
f_initial_cat = cat;
f_private = not is_free;
f_options = 0 }
let create_name_uniq s ty is_free =
let cat = Name { prev_inputs = None; prev_inputs_meaning = [] } in
{ f_name = s ^ "_" ^ (string_of_int (Terms.new_var_name()));
f_type = ty;
f_cat = cat;
f_initial_cat = cat;
f_private = not is_free;
f_options = 0 }
let add_free_name (s,ext) t options =
let is_private = ref false in
List.iter (function
| ("private",_) -> is_private := true
| (_,ext) ->
input_error "for free names, the only allowed option is private" ext) options;
let ty = get_type t in
if StringMap.mem s (!global_env) then
input_error ("identifier " ^ s ^ " already declared (as a free name, a function, a predicate, or a type)") ext;
let r = create_name s ([],ty) (not (!is_private)) in
global_env := StringMap.add s (EName r) (!global_env);
freenames := r :: !freenames
let get_non_interf_name env (s,ext) =
try
match StringMap.find s env with
EName r ->
check_single ext s;
if not r.f_private then
input_error ("Non-interference is certainly false on public values, such as " ^ s) ext
else
r
| _ ->
input_error ("Non-interference can only be tested on private free names") ext
with Not_found ->
input_error ("Name " ^ s ^ " is not declared") ext
let rec check_ni_term env (term,ext) =
match term with
(PIdent (s,ext)) ->
let t =
try
match StringMap.find s env with
EVar v -> Var v
| EFun f ->
if fst f.f_type <> [] then
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext;
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " has been defined by reduction. It should not appear in non-interference queries") ext);
FunApp(f, [])
| EName r ->
FunApp (r, [])
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined as a variable, a function, or a name") ext
in
(t, Terms.get_term_type t)
| (PFunApp ((s,ext), tlist)) ->
let (tl, tyl) = List.split (List.map (check_ni_term env) tlist) in
let f = get_fun env (s,ext) tyl in
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " has been defined by reduction. It should not appear in non-interference queries") ext);
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match tl with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f, tl), snd f.f_type)
| (PTuple tlist) ->
let (l, tl) = List.split (List.map (check_ni_term env) tlist) in
(FunApp (Terms.get_tuple_fun tl, l), Param.bitstring_type)
let get_non_interf env (id, lopt) =
let n = get_non_interf_name (create_env env) id in
(n,
match lopt with
None -> None
| Some l ->
Some (List.map (fun t ->
let (t', ty) = check_ni_term (create_env env) t in
if ty != snd n.f_type then
input_error ("this term has type " ^ ty.tname ^ " but should have type " ^ (snd n.f_type).tname) (snd t);
t'
) l))
let copy_binder b =
let b' = Terms.new_var b.sname b.btype in
match b.link with
NoLink ->
Terms.link b (TLink (Var b'));
b'
| _ -> Parsing_helper.internal_error ("unexpected link in copy_binder " ^ b.sname)
let rec copy_pat = function
PatVar b -> PatVar (copy_binder b)
| PatTuple(f,l) -> PatTuple(f, List.map copy_pat l)
| PatEqual(t) -> PatEqual (Terms.copy_term3 t)
let rec copy_process add_in_glob_table = function
Nil -> Nil
| Par(p1,p2) -> Par(copy_process add_in_glob_table p1, copy_process add_in_glob_table p2)
| Restr(n,p) ->
if add_in_glob_table then
let n' = create_name_uniq n.f_name n.f_type false in
Hashtbl.add glob_table n.f_name n';
Restr(n', Reduction_helper.process_subst (copy_process add_in_glob_table p) n (FunApp(n',[])))
else
Restr(n, copy_process add_in_glob_table p)
| Repl(p,occ) -> Repl(copy_process add_in_glob_table p, new_occurrence())
| Let(pat, t, p, q, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Let(pat', Terms.copy_term3 t, copy_process add_in_glob_table p, copy_process add_in_glob_table q, new_occurrence()))
| Input(t, pat, p, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Input(Terms.copy_term3 t, pat', copy_process add_in_glob_table p, new_occurrence()))
| Output(tc,t,p, occ) -> Output(Terms.copy_term3 tc, Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Test(t,t',p,q,occ) -> Test(Terms.copy_term3 t, Terms.copy_term3 t', copy_process add_in_glob_table p, copy_process add_in_glob_table q,new_occurrence())
| Event(t, p, occ) -> Event(Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Insert(t, p, occ) -> Insert(Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence())
| Get(pat, t, p, occ) ->
Terms.auto_cleanup (fun () ->
let pat' = copy_pat pat in
Get(pat', Terms.copy_term3 t, copy_process add_in_glob_table p, new_occurrence()))
| Phase(n,p) -> Phase(n, copy_process add_in_glob_table p)
| LetFilter(bl, f, p, q, occ) ->
Terms.auto_cleanup (fun () ->
let bl' = List.map copy_binder bl in
LetFilter(bl', Terms.copy_fact3 f, copy_process add_in_glob_table p, copy_process add_in_glob_table q, new_occurrence()))
let pdeftbl = (Hashtbl.create 7 : (string, binder list * process) Hashtbl.t)
let get_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b -> Var b
| EName r -> FunApp (r,[])
| EFun f ->
if fst f.f_type = [] then
FunApp(f,[])
else
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("Variable, function, or name " ^ s ^ " not declared") ext
let rec cross_product l1 = function
[] -> []
| (a::l) -> (List.map (fun l1i -> (l1i,a)) l1) @ (cross_product l1 l)
let rec split n l =
if n = 0 then ([],l) else
match l with
[] -> Parsing_helper.internal_error "split"
| (a::l') -> let l1,l2 = split (n-1) l' in (a::l1,l2)
let rec split_every n = function
[] -> []
| l ->
let (l1,l2) = split n l in
l1 :: (split_every n l2)
let no_expand_fun = function
[p] -> p
| _ -> Parsing_helper.internal_error "no_expand_fun expecting a list with a single element"
let pairing_expand (fa,la) (fl,ll) =
if fa == no_expand_fun then
if fl == no_expand_fun then
(no_expand_fun, cross_product la ll)
else
(fl, cross_product la ll)
else
if fl == no_expand_fun then
(fa, cross_product la ll)
else
let len = List.length la in
((fun l -> let l' = split_every len l in fl (List.map fa l')),
cross_product la ll)
let check_no_ref ext vlist (fex, tex) =
let fNil = fex (List.map (fun _ -> Nil) tex) in
if List.exists (fun v -> Reduction_helper.occurs_var_proc v fNil) vlist then
input_error "Cannot expand term because a variable in the expanded part would be referenced before being defined" ext
let rec check_term env (term, ext) =
match term with
PPIdent i ->
let t = get_ident_any env i in
(no_expand_fun, [t], Terms.get_term_type t)
| PPFunApp((s,ext),l) ->
let (fex',lex',tl') = check_term_list env l in
if s = "choice" then
begin
match tl' with
[t1;t2] when t1 == t2 ->
let f = Param.choice_fun t1 in
(fex', List.map (fun l' -> FunApp(f, l')) lex', t1)
| _ ->
input_error ("function choice expects two arguments of same type but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
end
else
begin
if List.mem s special_functions then
input_error (s ^ " not allowed here") ext;
try
match StringMap.find s env with
EFun f ->
if not (Terms.eq_lists (fst f.f_type) tl') then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
(fex', List.map (function
[t] -> t
| _ -> internal_error "type converter functions should always be unary"
) lex', snd f.f_type)
else
(fex', List.map (fun l' -> FunApp(f, l')) lex', snd f.f_type)
| ELetFun(args, fex, tex, ty) ->
let tyargs = List.map (fun v -> v.btype) args in
if not (Terms.eq_lists tyargs tl') then
input_error ("letfun function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " tyargs) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext;
let var_map = List.map (fun v -> (v, Terms.new_var v.sname v.btype)) args in
((fun l ->
fex' (List.map (fun tl' ->
let p = ref (Terms.auto_cleanup (fun () ->
List.iter (fun (v,v') -> Terms.link v (TLink (Var v'))) var_map;
copy_process false (fex l))) in
List.iter2 (fun (_,v') t' ->
p := Let(PatVar v', t', (!p), Nil, new_occurrence())) var_map tl';
!p
) lex')),
Terms.auto_cleanup (fun () ->
List.iter (fun (v,v') -> Terms.link v (TLink (Var v'))) var_map;
List.map Terms.copy_term3 tex), ty)
| _ ->
input_error (s ^ " should be a function") ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PPTuple l ->
let (fex',lex',tl') = check_term_list env l in
let f = Terms.get_tuple_fun tl' in
(fex', List.map (fun l' -> FunApp(f, l')) lex', Param.bitstring_type)
| PPRestr((s,ext),tyid,t) ->
let ty = get_type tyid in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") ext;
let r = create_name s (Param.tmp_type, ty) false in
let env' = StringMap.add s (EName r) env in
let (fex, tex, ty) = check_term env' t in
((fun l -> Restr(r, fex l)), tex, ty)
| PPTest(c,p1,p2) ->
let rec interpret_cond p1 p2 = function
(PPIdent pred), ext -> interpret_cond p1 p2 (PPFunApp(pred,[]), ext)
| (PPTuple _), ext ->
input_error "tuples allowed in terms, but not at this level of conditions" ext
| (PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _), ext -> input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| (PPFunApp((f,fext), l)), ext0 ->
match f, l with
"||", [c1;c2] ->
interpret_cond p1 (PPTest(c2,p1,p2), ext) c1
| "&&", [c1;c2] ->
if c1 & & c2 then p1 else p2
is equivalent to
if c1 then ( if c2 then p1 else p2 ) else p2
is equivalent to
if c1 then (if c2 then p1 else p2) else p2 *)
interpret_cond (PPTest(c2,p1,p2), ext) p2 c1
| "not", [c] ->
interpret_cond p2 p1 c
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext0;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
let (fexthen,texthen, tythen) = check_term env p1 in
let (fexelse,texelse, tyelse) = check_term env p2 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t1, t2) ->
Test(t1, t2, fexthen thenpart, fexelse elsepart,
new_occurrence())) tex)), texthen @ texelse, tythen)
| "<>", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext0;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
let (fexthen,texthen, tythen) = check_term env p2 in
let (fexelse,texelse, tyelse) = check_term env p1 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t1, t2) ->
Test(t1, t2, fexthen thenpart, fexelse elsepart,
new_occurrence())) tex)), texthen @ texelse, tythen)
| ("||" | "&&" | "=" | "<>" | "not"), _ ->
internal_error ("Bad arity for special function " ^ f)
| "choice", _ ->
input_error "choice allowed in terms, but not at this level of conditions" ext0
| _ ->
let (fex, lex', tl') = check_term_list env l in
let (fexthen,texthen, tythen) = check_term env p1 in
let (fexelse,texelse, tyelse) = check_term env p2 in
if tythen != tyelse then
input_error "the then and else branches should have the same type" ext;
let lenthen = List.length texthen in
match get_pred_or_fun env (f, fext) tl' with
IsPred p' ->
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun testi ->
LetFilter([], Pred(p', testi), fexthen thenpart, fexelse elsepart,
new_occurrence())) lex')), texthen @ texelse, tythen)
| IsFun f ->
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun testi ->
Test(FunApp(f, testi), FunApp(true_cst, []),
fexthen thenpart, fexelse elsepart,
new_occurrence())) lex')), texthen @ texelse, tythen)
in
interpret_cond p1 p2 c
| PPLet(pat,t,p,p') ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
let (fexthen,texthen, tythen) = check_term env' p in
let (fexelse,texelse, tyelse) = check_term env p' in
if tythen != tyelse then
input_error "the in and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
fex (List.map (fun (t', pat') ->
Let(pat', t', fexthen thenpart, fexelse elsepart,
new_occurrence())) lex)), texthen @ texelse, tythen)
| PPLetIn(pat,t,p) ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
let (fexin, texin, tyin) = check_term env' p in
((fun l ->
fex (List.map (fun (t', pat') ->
Let(pat', t', fexin l, Nil, new_occurrence())) lex)),
texin, tyin)
| PPLetFilter(identlist,(fact,ext),p,q) ->
let (env', vlist) = List.fold_left (fun (env, vlist) ((s,e),t) ->
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let ty = get_type t in
let v = Terms.new_var s ty in
(StringMap.add s (EVar v) env, v:: vlist)) (env,[]) identlist in
let vlist = List.rev vlist in
let (ffex, fex') = check_fact env' (fact,ext) in
Verify that ffex does not reference the variables of vlist
check_no_ref ext vlist (ffex, fex');
let (fexthen,texthen, tythen) = check_term env' p in
let (fexelse,texelse, tyelse) = check_term env q in
if tythen != tyelse then
input_error "the in and else branches should have the same type" ext;
let lenthen = List.length texthen in
((fun l ->
let (thenpart, elsepart) = split lenthen l in
ffex (List.map (fun f' ->
LetFilter(vlist, f', fexthen thenpart, fexelse elsepart,
new_occurrence())) fex')), texthen @ texelse, tythen)
and check_term_list env = function
[] -> (no_expand_fun, [[]], [])
| (a::l) ->
let (afex, alex, ta) = check_term env a in
let (lfex, llex, tl) = check_term_list env l in
let (f, l') = pairing_expand (afex, alex) (lfex, llex) in
(f, List.map (fun (a,l'') -> a::l'') l', ta::tl)
and check_fl_term env (p,l) =
let (fex', lex', tl') = check_term_list env l in
match get_pred_or_fun env p tl' with
IsPred p' ->
(fex', List.map (fun l' -> Pred(p', l')) lex')
| IsFun r ->
(fex', List.map (fun l' -> equal_fact (FunApp(r, l')) (FunApp(true_cst, []))) lex')
and check_fact env' (fact, ext) =
match fact with
PPIdent p ->
check_fl_term env' (p,[])
| PPTuple _ -> input_error "tuples not allowed here" ext
| PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _ ->
input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| PPFunApp((f,fext) as p,l) ->
match f, l with
"=", [t1;t2] ->
let (fex1', tex1', ty1) = check_term env' t1 in
let (fex2', tex2', ty2) = check_term env' t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
let (fex, lex) = pairing_expand (fex1', tex1') (fex2', tex2') in
(fex, List.map (fun (t1', t2') -> equal_fact t1' t2') lex)
| "=", _ -> internal_error ("Bad arity for special function " ^ f)
| ("<>" | "&&" | "||" | "not" | "choice"), _ -> input_error (f ^ " not allowed here") fext
| _ ->
check_fl_term env' (p,l)
and check_pat env def_in_this_pat tyopt = function
PPatVar ((s,e), topt) ->
let ty =
match topt, tyopt with
None, None ->
input_error ("variable " ^ s ^ " should be declared with a type") e
| Some (t,e), None ->
get_type (t,e)
| None, Some ty ->
ty
| Some (t,e), Some ty ->
let ty' = get_type (t,e) in
if ty != ty' then
input_error ("variable " ^ s ^ " is declared of type " ^ t ^ " but should be of type " ^ ty.tname) e;
ty
in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let v = Terms.new_var s ty in
(no_expand_fun, [PatVar v], StringMap.add s (EVar v) env, v::def_in_this_pat)
| PPatTuple l ->
let (fex',lex',env',def_in_this_pat') = check_pat_list dummy_ext env def_in_this_pat (List.map (fun _ -> None) l) l in
let f = Terms.get_tuple_fun (List.map Reduction_helper.get_pat_type (List.hd lex')) in
(fex',List.map (fun l' -> PatTuple(f, l')) lex', env', def_in_this_pat')
| PPatFunApp((s,ext),l) ->
begin
try
match StringMap.find s env with
EFun f ->
begin
match tyopt with
None -> ()
| Some ty ->
if ty != snd f.f_type then
input_error ("pattern is of type " ^ (snd f.f_type).tname ^ " but should be of type " ^ ty.tname) ext;
end;
let (fex',lex',env',def_in_this_pat') = check_pat_list ext env def_in_this_pat (List.map (fun t -> Some t) (fst f.f_type)) l in
if f.f_cat <> Tuple then
input_error ("only data functions are allowed in patterns, not " ^ s) ext;
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
(fex', List.map (function
[t] -> t
| _ -> internal_error "type converter functions should always be unary") lex', env', def_in_this_pat')
else
(fex', List.map (fun l' -> PatTuple(f, l')) lex', env', def_in_this_pat')
| _ ->
input_error ("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PPatEqual t ->
let (fex', tex', ty') = check_term env t in
Verify that fex ' does not reference the variables of def_in_this_pat ,
which are defined in this pattern , so will not be defined before fex '
which are defined in this pattern, so will not be defined before fex' *)
check_no_ref (snd t) def_in_this_pat (fex', tex');
begin
match tyopt with
None -> ()
| Some ty ->
if ty != ty' then
input_error ("pattern is of type " ^ ty'.tname ^ " but should be of type " ^ ty.tname) (snd t);
end;
(fex', List.map (fun t' -> PatEqual t') tex', env, def_in_this_pat)
and check_pat_list ext env def_in_this_pat tyl tl =
match (tl, tyl) with
[],[] -> (no_expand_fun, [[]], env, def_in_this_pat)
| (a::l),(ty::tyl) ->
let (afex, alex, env', def_in_this_pat') = check_pat env def_in_this_pat ty a in
let (lfex, llex, env'', def_in_this_pat'') = check_pat_list ext env' def_in_this_pat' tyl l in
let (f, l') = pairing_expand (afex, alex) (lfex, llex) in
(f, List.map (fun (a',l'') -> a'::l'') l', env'', def_in_this_pat'')
| _ -> input_error "wrong arity for pattern" ext
let event_fun_table = Hashtbl.create 7
let check_event (name, ext) argtypes =
let tyarg = List.map get_type argtypes in
let tyarg = if !Param.key_compromise = 0 then tyarg else Param.sid_type :: tyarg in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { f_name = name;
f_type = tyarg, Param.event_type;
f_cat = Eq[];
f_initial_cat = Eq[];
f_private = true;
f_options = 0 }
in
Hashtbl.add event_fun_table name r;
global_env := StringMap.add name (EEvent r) (!global_env)
let get_event_fun env (s,ext) tl =
try
let r = StringMap.find s env in
match r with
EEvent p ->
if not (Terms.eq_lists (fst p.f_type) tl) then
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst p.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
p
| _ -> input_error (s ^ " should be an event") ext
with Not_found ->
input_error ("event " ^ s ^ " not defined") ext
let check_table (name, ext) argtypes =
let tyarg = List.map get_type argtypes in
if StringMap.mem name (!global_env) then
input_error ("identifier " ^ name ^ " already defined (as a free name, a function, a predicate, or a type)") ext;
let r = { f_name = name;
f_type = tyarg, Param.table_type;
f_cat = Eq[];
f_initial_cat = Eq[];
f_private = true;
f_options = 0 }
in
global_env := StringMap.add name (ETable r) (!global_env)
let get_table_fun env (s,ext) tl =
try
let r = StringMap.find s env in
match r with
ETable p ->
if not (Terms.eq_lists (fst p.f_type) tl) then
input_error ("table " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst p.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl)) ext;
p
| _ -> input_error (s ^ " should be a table") ext
with Not_found ->
input_error ("event " ^ s ^ " not defined") ext
let rec has_destr = function
Var _ -> false
| FunApp(f,l) ->
(match f.f_cat with
Eq _ | Tuple | Choice | Name _ -> false
| _ -> true) || (List.exists has_destr l)
let rec check_process env = function
PNil -> Nil
| PPar (p1,p2) ->
Par(check_process env p1, check_process env p2)
| PRepl p ->
Repl(check_process env p, new_occurrence())
| PTest(c,p1,p2) ->
let rec interpret_cond p1 p2 = function
(PPIdent pred), ext -> interpret_cond p1 p2 (PPFunApp(pred,[]), ext)
| (PPTuple _), ext ->
input_error "tuples allowed in terms, but not at this level of conditions" ext
| (PPRestr _ | PPTest _ | PPLetIn _ | PPLet _ | PPLetFilter _), ext -> input_error "new, if, let allowed in terms, but not at this position in conditions" ext
| (PPFunApp((f,fext), l)), ext ->
match f, l with
"||", [c1;c2] ->
interpret_cond p1 (PTest(c2,p1,p2)) c1
| "&&", [c1;c2] ->
if c1 & & c2 then p1 else p2
is equivalent to
if c1 then ( if c2 then p1 else p2 ) else p2
is equivalent to
if c1 then (if c2 then p1 else p2) else p2 *)
interpret_cond (PTest(c2,p1,p2)) p2 c1
| "not", [c] ->
interpret_cond p2 p1 c
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
fex (List.map (fun (t1',t2') ->
Test(t1', t2',
check_process env p1,
check_process env p2,
new_occurrence())) tex)
| "<>", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" ext;
let (fex,tex) = pairing_expand (fex1',tex1') (fex2',tex2') in
fex (List.map (fun (t1',t2') ->
Test(t1', t2',
check_process env p2,
check_process env p1,
new_occurrence())) tex)
| ("||" | "&&" | "=" | "<>" | "not"), _ ->
internal_error ("Bad arity for special function " ^ f)
| "choice", _ ->
input_error "choice allowed in terms, but not at this level of conditions" ext
| _ ->
let (fex, lex', tl') = check_term_list env l in
match get_pred_or_fun env (f,fext) tl' with
IsPred p' ->
fex (List.map (fun f ->
LetFilter([], Pred(p', f),
check_process env p1,
check_process env p2,
new_occurrence())) lex')
| IsFun f' ->
fex (List.map (fun f ->
Test(FunApp(f', f), FunApp(true_cst, []),
check_process env p1,
check_process env p2,
new_occurrence())) lex')
in
interpret_cond p1 p2 c
| PLetDef ((s,ext), args) ->
let (fex, tlex, tyl) = check_term_list env args in
begin
try
let (param, p') = Hashtbl.find pdeftbl s in
let ptype = List.map (fun b -> b.btype) param in
if not (Terms.eq_lists ptype tyl) then
input_error ("process " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " ptype) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tyl)) ext;
fex (List.map (fun tl ->
if !Terms.current_bound_vars != [] then
Parsing_helper.internal_error "bound vars should be cleaned up (pitsyntax)";
let p = ref p' in
List.iter2 (fun t v ->
if has_destr t then
p := Let(PatVar v, t, (!p), Nil, new_occurrence())
else
Terms.link v (TLink t)) tl param;
let p'' = copy_process false (!p) in
Terms.cleanup();
p'') tlex)
with Not_found ->
input_error ("process " ^ s ^ " not defined") ext
end
| PRestr((s,ext),t,p) ->
let ty = get_type t in
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") ext;
let r = create_name s (Param.tmp_type, ty) false in
Restr(r, check_process (StringMap.add s (EName r) env) p)
| PInput(tc,pat,p) ->
let (ftexc, texc', tyc) = check_term env tc in
if tyc != Param.channel_type then
input_error ("this term has type " ^ tyc.tname ^ " but should have type channel") (snd tc);
let (fpex, patex',env',_) = check_pat env [] None pat in
let (fex, lex) = pairing_expand (ftexc,texc') (fpex, patex') in
fex (List.map (fun (tc', pat') ->
Input(tc', pat', check_process env' p,
new_occurrence())) lex)
| POutput(tc,t,p) ->
let (ftexc, texc', tyc) = check_term env tc in
if tyc != Param.channel_type then
input_error ("this term has type " ^ tyc.tname ^ " but should have type channel") (snd tc);
let (ftex, tex, ty) = check_term env t in
let (fex, lex) = pairing_expand (ftexc,texc') (ftex, tex) in
fex (List.map (fun (tc', t) ->
Output(tc', t, check_process env p,
new_occurrence())) lex)
| PLet(pat,t,p,p') ->
let (ftex, tex', ty) = check_term env t in
let (fpex, patex', env',_) = check_pat env [] (Some ty) pat in
let (fex, lex) = pairing_expand (ftex,tex') (fpex, patex') in
fex (List.map (fun (t', pat') ->
Let(pat', t', check_process env' p,
check_process env p',
new_occurrence())) lex)
| PLetFilter(identlist,(fact,ext),p,q) ->
let (env', vlist) = List.fold_left (fun (env, vlist) ((s,e),t) ->
if (StringMap.mem s env) then
input_warning ("identifier " ^ s ^ " rebound") e;
let ty = get_type t in
let v = Terms.new_var s ty in
(StringMap.add s (EVar v) env, v:: vlist)) (env,[]) identlist in
let vlist = List.rev vlist in
let (ffex, fex') = check_fact env' (fact,ext) in
Verify that ffex does not reference the variables of vlist
check_no_ref ext vlist (ffex, fex');
ffex (List.map (fun f' ->
LetFilter(vlist, f', check_process env' p,
check_process env q,
new_occurrence())) fex')
| PEvent((i,ext),l,p) ->
let (fex, lex', tl) = check_term_list env l in
if !Param.key_compromise == 0 then
let f = get_event_fun env (i,ext) tl in
fex (List.map (fun l' ->
Event(FunApp(f, l'), check_process env p,
new_occurrence())) lex')
else
let f = get_event_fun env (i,ext) (Param.sid_type :: tl) in
fex (List.map (fun l' ->
Event(FunApp(f, (Terms.new_var_def Param.sid_type) :: l'),
check_process env p,
new_occurrence())) lex')
| PInsert((i,ext),l,p) ->
let (fex, lex', tl) = check_term_list env l in
let f = get_table_fun env (i,ext) tl in
fex (List.map (fun l' ->
Insert(FunApp(f, l'), check_process env p,
new_occurrence())) lex')
| PGet((i,ext),patl,t,p) ->
begin
try
match StringMap.find i env with
ETable f ->
TO DO when check_term will allow & & , || , < > , = ( thanks to destructors
with inequality conditions ) , check_term will be enough instead of interpret_cond
with inequality conditions), check_term will be enough instead of interpret_cond *)
let rec interpret_cond env = function
(PPFunApp((f,fext), l)), ext0 when (f = "&&" || f = "=") ->
begin
match f, l with
"&&", [c1;c2] ->
let (fex1',tex1', ty1) = interpret_cond env c1 in
let (fex2',tex2', ty2) = interpret_cond env c2 in
if ty1 != Param.bool_type then
input_error "this argument of && should be a boolean" (snd c1);
if ty2 != Param.bool_type then
input_error "this argument of && should be a boolean" (snd c2);
let (fex',lex') = pairing_expand (fex1',tex1') (fex2',tex2') in
(fex', List.map (function (t1,t2) -> FunApp(Terms.and_fun, [t1;t2])) lex', Param.bool_type)
| "=", [t1;t2] ->
let (fex1',tex1', ty1) = check_term env t1 in
let (fex2',tex2', ty2) = check_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" ext0;
let (fex',lex') = pairing_expand (fex1',tex1') (fex2',tex2') in
let feq = Terms.equal_fun ty1 in
(fex', List.map (function (t1,t2) -> FunApp(feq, [t1;t2])) lex', Param.bool_type)
| _ ->
internal_error ("Bad arity for special function " ^ f)
end
| t -> check_term env t
in
let (fex',lex',env',_) = check_pat_list ext env [] (List.map (fun t -> Some t) (fst f.f_type)) patl in
let (ftex, tex', ty) = interpret_cond env' t in
if ty != Param.bool_type then
input_error ("this term has type " ^ ty.tname ^ " but should have type bool") (snd t);
let (fex, lex) = pairing_expand (fex', lex') (ftex,tex') in
fex (List.map (fun (l', t') ->
Get(PatTuple(f, l'), t', check_process env' p,
new_occurrence())) lex)
| _ ->
input_error ("only functions can be applied, not " ^ i) ext
with Not_found ->
input_error ("function " ^ i ^ " not defined") ext
end
| PPhase(n, p) ->
Phase(n, check_process env p)
let query_list = ref ([] : (envdecl * tquery list) list)
let need_vars_in_names = Reduction_helper.need_vars_in_names
let noninterf_list = ref ([] : (funsymb * term list option) list list)
let not_list = ref ([] : (envdecl * gterm_e) list)
let nounif_list = ref ([] : (envdecl * nounif_t) list)
let weaksecret_list = ref ([] : funsymb list)
let rec nvn_t (term, ext0) =
match term with
PGIdent _ -> ()
| PGFunApp(_,l) -> List.iter nvn_t l
| PGPhase(_,l, _) -> List.iter nvn_t l
| PGTuple l -> List.iter nvn_t l
| PGName ((s,ext),bl) ->
List.iter (fun ((s',ext'),t) ->
if (s' <> "") && (s'.[0] != '!') then
begin
try
let r = Hashtbl.find glob_table s in
need_vars_in_names := (r.f_name, s',ext') :: (!need_vars_in_names)
with Not_found ->
()
end;
nvn_t t
) bl
| PGLet(_,t,t') -> nvn_t t; nvn_t t'
let nvn_q = function
PRealQuery q -> nvn_t q
| PPutBegin(i, l) -> ()
let rec nvn_f (f,ext0) =
match f with
PFGIdent (s,ext) -> ()
| PFGFunApp((s,ext),l) -> List.iter nvn_f l
| PFGTuple l -> List.iter nvn_f l
| PFGName ((s,ext),bl) ->
List.iter (fun ((s',ext'),t) ->
if (s' <> "") && (s'.[0] != '!') then
begin
try
let r = Hashtbl.find glob_table s in
need_vars_in_names := (r.f_name, s',ext') :: (!need_vars_in_names)
with Not_found ->
()
end;
nvn_f t
) bl
| PFGAny (s,ext) -> ()
| PFGLet(_,t,t') -> nvn_f t; nvn_f t'
let rec nvn_nounif = function
BFLet(_,t,nounif) -> nvn_f t; nvn_nounif nounif
| BFNoUnif((id,fl,n),_) -> List.iter nvn_f fl
Macro expansion
let macrotable = ref StringMap.empty
let rename_table = ref StringMap.empty
let expansion_number = ref 0
let rename_ident i =
match i with
"=" | "<>" | "not" | "&&" | "||" | "event" | "inj-event" | "==>" | "choice" -> i
| _ -> if i.[0] = '!' then i else
try
StringMap.find i (!rename_table)
with Not_found ->
let r = "@" ^ (string_of_int (!expansion_number)) ^ "_" ^ i in
rename_table := StringMap.add i r (!rename_table);
r
let rename_ie (i,ext) = (rename_ident i, ext)
let rec rename_term (t,ext) =
let t' = match t with
PIdent i -> PIdent (rename_ie i)
| PFunApp(f,l) -> PFunApp(rename_ie f, List.map rename_term l)
| PTuple l -> PTuple(List.map rename_term l)
in
(t',ext)
let rec rename_format = function
PFIdent i -> PFIdent (rename_ie i)
| PFFunApp(f,l) -> PFFunApp(rename_ie f, List.map rename_format l)
| PFTuple l -> PFTuple(List.map rename_format l)
| PFName _ -> internal_error "Names not allowed in formats with -in pi"
| PFAny i -> PFAny (rename_ie i)
let rename_format_fact (i,l) = (rename_ie i, List.map rename_format l)
let rec rename_gformat (t,ext) =
let t' = match t with
PFGIdent i -> PFGIdent (rename_ie i)
| PFGFunApp(f,l) -> PFGFunApp(rename_ie f, List.map rename_gformat l)
| PFGTuple l -> PFGTuple(List.map rename_gformat l)
| PFGName(i,l) -> PFGName(rename_ie i, List.map (fun (i,t) -> (rename_ie i, rename_gformat t)) l)
| PFGAny i -> PFGAny (rename_ie i)
| PFGLet(i,t,t') -> PFGLet(rename_ie i, rename_gformat t, rename_gformat t')
in
(t',ext)
let rec rename_nounif = function
BFLet(i,f,t) -> BFLet(rename_ie i, rename_gformat f, rename_nounif t)
| BFNoUnif((i,l,n'),n) -> BFNoUnif((rename_ie i, List.map rename_gformat l, n'), n)
let rec rename_gterm (t,ext) =
let t' = match t with
PGIdent i -> PGIdent (rename_ie i)
| PGFunApp(f,l) -> PGFunApp(rename_ie f, List.map rename_gterm l)
| PGPhase(i,l,n) -> PGPhase(rename_ie i, List.map rename_gterm l, n)
| PGTuple l -> PGTuple(List.map rename_gterm l)
| PGName(i,l) -> PGName(rename_ie i, List.map (fun (i,t) -> (rename_ie i, rename_gterm t)) l)
| PGLet(i,t,t') -> PGLet(rename_ie i, rename_gterm t, rename_gterm t')
in
(t',ext)
let rename_query = function
PPutBegin(b,l) -> PPutBegin(b, List.map rename_ie l)
| PRealQuery t -> PRealQuery(rename_gterm t)
let rename_clause = function
PClause(t,t') -> PClause(rename_term t, rename_term t')
| PFact t -> PFact(rename_term t)
| PEquiv(t,t',b) -> PEquiv(rename_term t, rename_term t', b)
let rec rename_pterm (t,ext) =
let t' = match t with
PPIdent i -> PPIdent (rename_ie i)
| PPFunApp(f,l) -> PPFunApp(rename_ie f, List.map rename_pterm l)
| PPTuple(l) -> PPTuple(List.map rename_pterm l)
| PPRestr(i,ty,t) -> PPRestr(rename_ie i, rename_ie ty, rename_pterm t)
| PPTest(t1,t2,t3) -> PPTest(rename_pterm t1, rename_pterm t2, rename_pterm t3)
| PPLetIn(pat, t1, t2) -> PPLetIn(rename_pat pat, rename_pterm t1, rename_pterm t2)
| PPLet(pat, t1, t2, t3) -> PPLet(rename_pat pat, rename_pterm t1, rename_pterm t2, rename_pterm t3)
| PPLetFilter(l, t1, t2, t3) -> PPLetFilter(List.map(fun (i,ty) -> (rename_ie i, rename_ie ty)) l, rename_pterm t1, rename_pterm t2, rename_pterm t3)
in
(t',ext)
and rename_pat = function
PPatVar(i,tyopt) -> PPatVar(rename_ie i, match tyopt with
None -> None
| Some ty -> Some (rename_ie ty))
| PPatTuple l -> PPatTuple(List.map rename_pat l)
| PPatFunApp(f,l) -> PPatFunApp(rename_ie f, List.map rename_pat l)
| PPatEqual t -> PPatEqual (rename_pterm t)
let rec rename_process = function
PNil -> PNil
| PPar(p1,p2) -> PPar(rename_process p1, rename_process p2)
| PRepl(p) -> PRepl(rename_process p)
| PRestr(i,ty,p) -> PRestr(rename_ie i, rename_ie ty, rename_process p)
| PLetDef(i,l) -> PLetDef(rename_ie i, List.map rename_pterm l)
| PTest(t,p1,p2) -> PTest(rename_pterm t, rename_process p1, rename_process p2)
| PInput(t,pat,p) -> PInput(rename_pterm t, rename_pat pat, rename_process p)
| POutput(t1,t2,p) -> POutput(rename_pterm t1, rename_pterm t2, rename_process p)
| PLet(pat, t, p1, p2) -> PLet(rename_pat pat, rename_pterm t, rename_process p1, rename_process p2)
| PLetFilter(l, t, p1, p2) -> PLetFilter(List.map (fun (i,ty) -> (rename_ie i, rename_ie ty)) l, rename_pterm t, rename_process p1, rename_process p2)
| PEvent(i,l,p) -> PEvent(rename_ie i ,List.map rename_pterm l, rename_process p)
| PInsert(i,l,p) -> PInsert(rename_ie i ,List.map rename_pterm l, rename_process p)
| PGet(i,patl,t,p) -> PGet(rename_ie i ,List.map rename_pat patl, rename_pterm t, rename_process p)
| PPhase(n,p) -> PPhase(n, rename_process p)
let rename_env env = List.map (fun (i,ty) -> (rename_ie i, rename_ie ty)) env
let rename_decl = function
TTypeDecl i -> TTypeDecl (rename_ie i)
| TFunDecl(i,l,ty,opt) -> TFunDecl(rename_ie i, List.map rename_ie l, rename_ie ty, opt)
| TEventDecl(i,l) -> TEventDecl(rename_ie i, List.map rename_ie l)
| TTableDecl(i,l) -> TTableDecl(rename_ie i, List.map rename_ie l)
| TConstDecl(i,ty,opt) -> TConstDecl(rename_ie i, rename_ie ty, opt)
| TReduc(l,opt) -> TReduc(List.map (fun (env,t1,t2) -> (rename_env env, rename_term t1, rename_term t2)) l, opt)
| TEquation(env, t1, t2) -> TEquation(rename_env env, rename_term t1, rename_term t2)
| TPredDecl(i,l,opt) -> TPredDecl(rename_ie i, List.map rename_ie l, opt)
| TSet ((_,ext),_) ->
input_error "set is not allowed inside macro definitions" ext
| TPDef(i,env,p) -> TPDef(rename_ie i, rename_env env, rename_process p)
| TQuery(env, l) -> TQuery(rename_env env, List.map rename_query l)
| TNoninterf(env, l) -> TNoninterf(rename_env env, List.map (fun (i,tlopt) ->
(rename_ie i, match tlopt with
None -> None
| Some tl -> Some (List.map rename_term tl))) l)
| TWeaksecret i -> TWeaksecret (rename_ie i)
| TNoUnif(env, nounif) -> TNoUnif(rename_env env, rename_nounif nounif)
| TNot(env, t) -> TNot(rename_env env, rename_gterm t)
| TElimtrue(env, f) -> TElimtrue(rename_env env, rename_term f)
| TFree(i,ty, opt) -> TFree(rename_ie i, rename_ie ty, opt)
| TClauses l -> TClauses (List.map (fun (env, cl) -> (rename_env env, rename_clause cl)) l)
| TDefine((s1,ext1),argl,def) ->
input_error "macro definitions are not allowed inside macro definitions" ext1
| TExpand((s1,ext1),argl) ->
internal_error "macro-expansion inside a macro should have been expanded at macro definition point"
| TLetFun(i,env,t) -> TLetFun(rename_ie i, rename_env env, rename_pterm t)
let apply argl paraml already_def def =
rename_table := StringMap.empty;
incr expansion_number;
List.iter (fun s ->
rename_table := StringMap.add s s (!rename_table)) already_def;
List.iter2 (fun (a,_) (p,_) ->
rename_table := StringMap.add p a (!rename_table)) argl paraml;
let def' = List.map rename_decl def in
rename_table := StringMap.empty;
def'
let rec check_one = function
TTypeDecl(i) -> check_type_decl i
| TFunDecl(f,argt,rest,i) -> check_fun_decl f argt rest i
| TConstDecl(f,rest,i) -> check_fun_decl f [] rest i
| TEquation(env,t1,t2) -> check_equation env t1 t2
| TReduc (r,i) -> check_red r i
| TPredDecl (p, argt, info) -> check_pred p argt info
| TEventDecl(i, args) -> check_event i args
| TTableDecl(i, args) -> check_table i args
| TPDef ((s,ext), args, p) ->
let env = ref (!global_env) in
let arglist = List.map (fun ((s',ext'),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s' (!env) with
EVar _ -> input_error ("variable " ^ s' ^ " already defined") ext'
| _ -> ()
with Not_found ->
()
end;
let v = Terms.new_var s' t in
env := StringMap.add s' (EVar v) (!env);
v
) args
in
let p' = check_process (!env) p in
Hashtbl.add pdeftbl s (arglist, p')
| TQuery (env,q) ->
query_list := (env,q) :: (!query_list)
| TNoninterf (env, lnoninterf) ->
noninterf_list := (List.map (get_non_interf env) lnoninterf) :: (!noninterf_list);
| TWeaksecret i ->
weaksecret_list := (get_non_interf_name (!global_env) i) ::(!weaksecret_list)
| TNoUnif (env, nounif) ->
nounif_list := (env, nounif) :: (!nounif_list)
| TElimtrue(env, fact) ->
let env = create_env env in
Param.elim_true := (check_simple_fact env fact) :: (!Param.elim_true)
| TNot (env, no) ->
not_list := (env, no) :: (!not_list)
| TFree (name,ty,i) ->
add_free_name name ty i
| TClauses c ->
List.iter check_clause c
| TLetFun ((s,ext), args, p) ->
let env = ref (!global_env) in
let arglist = List.map (fun ((s',ext'),ty) ->
let t = get_type ty in
begin
try
match StringMap.find s' (!env) with
EVar _ -> input_error ("variable " ^ s' ^ " already defined") ext'
| _ -> ()
with Not_found ->
()
end;
let v = Terms.new_var s' t in
env := StringMap.add s' (EVar v) (!env);
v
) args
in
let (fex, tex, ty) = check_term (!env) p in
global_env := StringMap.add s (ELetFun(arglist, fex, tex, ty)) (!global_env)
| TDefine((s1,ext1),argl,def) ->
if StringMap.mem s1 (!macrotable) then
input_error ("Macro " ^ s1 ^ " already defined.") ext1
else
let rec expand_inside_macro = function
TDefine((s,ext),_,_)::l ->
input_error "macro definitions are not allowed inside macro definitions" ext
| TExpand((s2,ext2), argl2)::l ->
begin
try
let (paraml2, def2, already_def2) = StringMap.find s2 (!macrotable) in
if List.length argl2 != List.length paraml2 then
input_error ("Macro " ^ s2 ^ " expects " ^ (string_of_int (List.length paraml2)) ^
" arguments, but is here given " ^ (string_of_int (List.length argl2)) ^ " arguments.") ext2;
(apply argl2 paraml2 already_def2 def2) @ (expand_inside_macro l)
with Not_found ->
input_error ("Macro " ^ s2 ^ " not defined.") ext2
end
| a::l -> a::(expand_inside_macro l)
| [] -> []
in
let def = expand_inside_macro def in
let already_def = ref [] in
StringMap.iter (fun s _ -> already_def := s :: (!already_def)) (!global_env);
macrotable := StringMap.add s1 (argl, def, !already_def) (!macrotable)
| TExpand((s1,ext1),argl) ->
begin
try
let (paraml, def, already_def ) = StringMap.find s1 (!macrotable) in
if List.length argl != List.length paraml then
input_error ("Macro " ^ s1 ^ " expects " ^ (string_of_int (List.length paraml)) ^
" arguments, but is here given " ^ (string_of_int (List.length argl)) ^ " arguments.") ext1;
List.iter check_one (apply argl paraml already_def def)
with Not_found ->
input_error ("Macro " ^ s1 ^ " not defined.") ext1
end
| TSet _ -> internal_error "set declaration should have been handled before"
let rec set_max_used_phase = function
Nil -> ()
| Par(p1,p2) -> set_max_used_phase p1; set_max_used_phase p2
| Repl (p,_) -> set_max_used_phase p
| Restr(n,p) -> set_max_used_phase p
| Test(_,_,p1,p2,_) -> set_max_used_phase p1; set_max_used_phase p2
| Input(_,_, p,_) -> set_max_used_phase p
| Output(_,_,p,_) -> set_max_used_phase p
| Let(_,_,p1, p2,_) -> set_max_used_phase p1; set_max_used_phase p2
| LetFilter(_,_,p,q,_) -> set_max_used_phase p; set_max_used_phase q
| Event(_,p,_) -> set_max_used_phase p
| Insert(_,p,_) -> set_max_used_phase p
| Get(_,_,p,_) -> set_max_used_phase p
| Phase(n,p) ->
if n > !Param.max_used_phase then
Param.max_used_phase := n;
set_max_used_phase p
let parse_file s =
init_fun_decl();
let (decl, proc) = parse_with_lib s in
List.iter (function
TSet((p,ext),v) ->
begin
match (p,v) with
"attacker", S ("passive",_) -> Param.active_attacker := false
| "attacker", S ("active",_) -> Param.active_attacker := true
| "keyCompromise", S ("strict",_) -> Param.key_compromise := 2
| "keyCompromise", S ("approx",_) -> Param.key_compromise := 1
| "keyCompromise", S ("none",_) -> Param.key_compromise := 0
| "movenew", _ -> Param.boolean_param Param.move_new p ext v
| "verboseClauses", S ("explained",_) -> Param.verbose_explain_clauses := Param.ExplainedClauses
| "verboseClauses", S ("short",_) -> Param.verbose_explain_clauses := Param.Clauses
| "verboseClauses", S ("none",_) -> Param.verbose_explain_clauses := Param.NoClauses
| "explainDerivation", _ -> Param.boolean_param Param.explain_derivation p ext v
| "predicatesImplementable", S("check",_) -> Param.check_pred_calls := true
| "predicatesImplementable", S("nocheck",_) -> Param.check_pred_calls := false
| "eqInNames", _ ->
Param.boolean_param Param.eq_in_names p ext v;
if !Param.eq_in_names then Param.reconstruct_trace := false
| "reconstructTrace", _ -> Param.boolean_param Param.reconstruct_trace p ext v
| "traceBacktracking", _ -> Param.boolean_param Param.trace_backtracking p ext v
| "unifyDerivation", _ -> Param.boolean_param Param.unify_derivation p ext v
| "traceDisplay", S ("none",_) -> Param.trace_display := Param.NoDisplay
| "traceDisplay", S ("short",_) -> Param.trace_display := Param.ShortDisplay
| "traceDisplay", S ("long",_) -> Param.trace_display := Param.LongDisplay
| "ignoreTypes", S (("all" | "true"), _) -> Param.ignore_types := true
| "ignoreTypes", S ("attacker", _) -> Param.ignore_types := false; Param.untyped_attacker := true
| "ignoreTypes", S (("none" | "false"), _) -> Param.ignore_types := false; Param.untyped_attacker := false
| _,_ -> Param.common_parameters p ext v
end
| _ -> ()) decl;
List.iter (function
TSet _ -> ()
| x -> check_one x) decl;
let p = Terms.auto_cleanup (fun () ->
copy_process true (check_process (!global_env) proc))
in
List.iter (fun (_, q) -> List.iter nvn_q q) (!query_list);
List.iter (fun (_, no) -> nvn_t no) (!not_list);
List.iter (fun (_, nounif) -> nvn_nounif nounif) (!nounif_list);
if !Param.key_compromise = 2 then
Param.max_used_phase := 1
else
set_max_used_phase p;
p
let display () =
print_string "Functions ";
Hashtbl.iter (fun _ fsymb ->
print_string (fsymb.f_name ^ "(" ^ (Terms.tl_to_string ", " (fst fsymb.f_type))
^ "):" ^ (snd fsymb.f_type).tname ^ ". ")
) fun_decls;
print_string "\n"
let non_compromised_session = FunApp(Param.session1, [])
Note : when check_query , get_queries are applied before the
translation of the process into Horn clauses has been done ,
the arity of names may not be correctly initialized . In this case ,
update_arity_names should be called after the translation of the
process to update it .
translation of the process into Horn clauses has been done,
the arity of names may not be correctly initialized. In this case,
update_arity_names should be called after the translation of the
process to update it. *)
let get_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
TLink t -> t
| NoLink -> Var b
| _ -> internal_error "unexpected link in get_ident_any"
end
| EName r ->
FunApp(r, [])
| EFun f ->
if fst f.f_type == [] then
FunApp(f,[])
else
input_error ("function " ^ s ^ " has expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ -> input_error ("identifier " ^ s ^ " should be a variable, a free name, or a function") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined") ext
let rec check_query_term env (term, ext0) =
match term with
PGIdent i ->
let t = get_ident_any env i in
(t, Terms.get_term_type t)
| PGPhase _ -> input_error ("phase unexpected in query terms") ext0
| PGFunApp((s,ext),l) ->
if List.mem s ["="; "<>"; "==>"; "&&"; "||"; "event"; "inj-event"] then
input_error (s ^ " unexpected in query terms") ext;
begin
try
match StringMap.find s env with
EFun f ->
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " is defined by \"reduc\". Such a function should not be used in a query") ext);
let (l', tl') = List.split (List.map (check_query_term env) l) in
if Terms.eq_lists (fst f.f_type) tl' then
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match l' with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FunApp(f, l'), snd f.f_type)
else
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
| _ -> input_error("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PGTuple l ->
let (l', tl') = List.split (List.map (check_query_term env) l) in
(FunApp(Terms.get_tuple_fun tl', l'), Param.bitstring_type)
| PGName ((s,ext),bl) ->
begin
try
let r = Hashtbl.find glob_table s in
check_single ext s;
if fst r.f_type == Param.tmp_type then
begin
let v = Terms.new_var Param.def_var_name (snd r.f_type) in
v.link <- PGTLink (env, (term,ext0));
(Var v, snd r.f_type)
end
else
begin
match r.f_cat with
Name { prev_inputs_meaning = sl } ->
List.iter (fun ((s',ext'),_) ->
if not (List.mem s' sl) then
input_error ("variable " ^ s' ^ " not defined at restriction " ^ s) ext') bl;
let p = List.map2 (fun s'' ty ->
if s'' = "!comp" then non_compromised_session else
binding_find env s'' ty bl) sl (fst r.f_type)
in
(FunApp(r, p), snd r.f_type)
| _ -> internal_error "name expected here"
end
with Not_found ->
input_error (s ^ " should be a name") ext
end
| PGLet(id,t,t') -> check_query_term (add_binding env (id,t)) t'
and binding_find env s ty = function
[] -> Terms.new_var_def ty
| ((s',ext),t)::l ->
if s' = s then
begin
let (t', ty') = check_query_term env t in
if ty' != ty then
input_error ("this variable is of type " ^ ty.tname ^ " but is given a value of type " ^ ty'.tname) ext;
t'
end
else
binding_find env s ty l
and add_binding env ((i,ext),t) =
begin
try
match StringMap.find i env with
EVar _ -> input_error ("variable " ^ i ^ " already defined") ext
| _ -> ()
with Not_found -> ()
end;
let (t', ty') = check_query_term env t in
let v = Terms.new_var i ty' in
v.link <- TLink t';
StringMap.add i (EVar v) env
let check_mess env e tl n =
match tl with
[t1;t2] ->
if n > !Param.max_used_phase then
begin
input_warning "phase greater than the maximum phase used in the process.\nIs that really what you want?" e;
Param.max_used_phase := n;
end;
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != Param.channel_type then
input_error ("First argument of mess is of type " ^ ty1.tname ^ " and should be of type channel") e;
let mess_n = Param.get_pred (Mess((if n = -1 then (!Param.max_used_phase) else n),
ty2))
in
QFact(mess_n, [t1';t2'])
| _ ->
input_error "arity of predicate mess should be 2" e
let check_attacker env e tl n =
match tl with
[t1] ->
if n > !Param.max_used_phase then
begin
input_warning "phase greater than the maximum phase used in the process.\nIs that really what you want?" e;
Param.max_used_phase := n;
end;
let (t1', ty1) = check_query_term env t1 in
let att_n = Param.get_pred (Attacker((if n = -1 then (!Param.max_used_phase) else n),
ty1))
in
QFact(att_n, [t1'])
| _ ->
input_error "arity of predicate attacker should be 1" e
let rec check_event env (f,e) =
match f with
PGFunApp(("<>", _), [t1; t2]) ->
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an inequality test should have the same type" e;
QNeq(t1', t2')
| PGFunApp(("=", _), [t1; t2]) ->
let (t1', ty1) = check_query_term env t1 in
let (t2', ty2) = check_query_term env t2 in
if ty1 != ty2 then
input_error "the two arguments of an equality test should have the same type" e;
QEq(t1', t2')
| PGFunApp(("event",e'),tl0) ->
begin
match tl0 with
[PGFunApp((s,e''), tl),_] ->
let (tl', tyl') = List.split (List.map (check_query_term env) tl) in
if !Param.key_compromise == 0 then
QSEvent(false, FunApp((get_event_fun env (s, e'') tyl'), tl'))
else
QSEvent(false, FunApp((get_event_fun env (s, e'') (Param.sid_type :: tyl')),
(Terms.new_var_def Param.sid_type)::tl'))
| _ -> input_error "predicate event should have one argument, which is a function application" e'
end
| PGFunApp(("inj-event",e'),tl0) ->
begin
match tl0 with
[PGFunApp((s,e''), tl),_] ->
let (tl', tyl') = List.split (List.map (check_query_term env) tl) in
if !Param.key_compromise == 0 then
QSEvent(true, FunApp((get_event_fun env (s, e'') tyl'), tl'))
else
QSEvent(true, FunApp((get_event_fun env (s, e'') (Param.sid_type :: tyl')),
(Terms.new_var_def Param.sid_type)::tl'))
| _ -> input_error "predicate inj-event should have one argument, which is a function application" e'
end
| PGFunApp(("attacker",_), tl) ->
check_attacker env e tl (-1)
| PGFunApp(("mess",_), tl) ->
check_mess env e tl (-1)
| PGFunApp((s, ext) as p, tl) ->
if List.mem s ["||"; "&&"; "not"; "==>"] then
input_error (s ^ " unexpected in events") ext;
let (tl', tyl) = List.split (List.map (check_query_term env) tl) in
QFact(get_pred env p tyl, tl')
| PGPhase((s, ext), tl, n) ->
begin
match s with
"mess" -> check_mess env e tl n
| "attacker" -> check_attacker env e tl n
| _ -> input_error "phases can be used only with attacker or mess" ext
end
| PGIdent p ->
QFact(get_pred env p [], [])
| PGLet(id,t,t') -> check_event (add_binding env (id,t)) t'
| _ -> input_error "an event should be a predicate application" e
let rec check_hyp env = function
PGFunApp(("==>", _), [ev; hypll]), _ ->
let ev' = check_event env ev in
(
match ev' with
QNeq _ | QEq _ -> input_error "Inequalities or equalities cannot occur before ==> in queries" (snd ev)
| _ -> ()
);
let hypll' = check_hyp env hypll in
[[NestedQuery(Before(ev', hypll'))]]
| PGFunApp(("||", _), [he1;he2]), _ ->
(check_hyp env he1) @ (check_hyp env he2)
| PGFunApp(("&&", _), [he1;he2]), _ ->
let he1' = check_hyp env he1 in
let he2' = check_hyp env he2 in
List.concat (List.map (fun e1 -> List.map (fun e2 -> e1 @ e2) he2') he1')
| PGLet(id,t,t'), _ -> check_hyp (add_binding env (id,t)) t'
| ev -> [[QEvent(check_event env ev)]]
let rec check_real_query_top env = function
PGFunApp(("==>", _), [ev; hypll]), _ ->
let ev' = check_event env ev in
let ev'' =
match ev' with
QNeq _ | QEq _ -> user_error "Inequalities or equalities cannot occur before ==> in queries\n"
| QFact _ -> ev'
| QSEvent _ when !Param.key_compromise == 0 -> ev'
| QSEvent(inj, FunApp(f, sid::l)) ->
QSEvent(inj, FunApp(f, non_compromised_session::l))
| QSEvent(_,_) ->
internal_error "Bad format for events in queries"
in
let hypll' = check_hyp env hypll in
Before(ev'', hypll')
| PGLet(id,t,t'), _ -> check_real_query_top (add_binding env (id,t)) t'
| ev ->
let ev' = check_event env ev in
let ev'' =
match ev' with
QNeq _ | QEq _ -> user_error "Inequalities or equalities cannot occur alone queries\n"
| QFact _ -> ev'
| QSEvent _ when !Param.key_compromise == 0 -> ev'
| QSEvent(inj, FunApp(f, sid::l)) ->
QSEvent(inj, FunApp(f, non_compromised_session::l))
| QSEvent(_,_) ->
internal_error "Bad format for events in queries"
in
Before(ev'', [])
let rec check_query_list env = function
[] -> []
| (PRealQuery q)::lq ->
(RealQuery(check_real_query_top env q))::(check_query_list env lq)
| (PPutBegin(i, l))::lq ->
let l' = List.map (fun (s,e) ->
try
match StringMap.find s env with
EEvent r -> r
| _ -> input_error (s ^ " should be an event") e
with Not_found ->
input_error ("unknown event " ^s) e) l
in
(PutBegin(i,l'))::(check_query_list env lq)
let rec has_inj = function
Before(_,ll) ->
List.exists (List.exists (function
NestedQuery q -> has_inj q
| QEvent (QSEvent (i,_)) -> i
| QEvent (_) -> false)) ll
let rec check_inj_coherent_r q =
if has_inj q then
match q with
Before(e,ll) ->
let e' =
match e with
QFact _ | QNeq _ | QEq _ -> user_error "In a query e ==> h, if h contains an injective event, then e must be an event or better inj-event\n"
in
Before(e', List.map (List.map (function
QEvent e -> QEvent e
| NestedQuery q' -> NestedQuery (check_inj_coherent_r q'))) ll)
else q
let check_inj_coherent = function
(PutBegin(_,_) as q) -> q
| RealQuery q -> RealQuery (check_inj_coherent_r q)
let transl_query (env,q) =
let q' = check_query_list (create_env env) q in
let q'' = List.map check_inj_coherent q' in
Pievent.init_event_status_table event_fun_table;
List.iter Pievent.set_event_status q'';
q''
let query_to_facts q =
let facts = ref [] in
List.iter (function
PutBegin(_) -> ()
| RealQuery(Before(e,_)) -> match e with
QSEvent(_,(FunApp(f,l) as param)) ->
facts :=
(if (Pievent.get_event_status f).end_status = Inj then
Pred(Param.end_pred_inj, [Var(Terms.new_var "endsid" Param.sid_type);param])
else
Pred(Param.end_pred, [param])) :: (!facts)
| QSEvent(_, _) ->
user_error ("Events should be function applications\n")
| QFact(p,l) ->
facts := (Pred(p,l)) :: (!facts)
| QNeq _ | QEq _ -> internal_error "no Neq/Eq queries"
) q;
!facts
let rec update_type_names_t = function
Var v ->
begin
match v.link with
PGTLink (env, t) ->
let (t', _) = check_query_term env t in
v.link <- TLink t';
t'
| TLink t -> t
| NoLink -> Var v
| _ -> internal_error "unexpected link in update_type_names_t"
end
| FunApp(f,l) -> FunApp(f, List.map update_type_names_t l)
let update_type_names_e = function
QSEvent(b,t) -> QSEvent(b, update_type_names_t t)
| QFact(p,tl) -> QFact(p, List.map update_type_names_t tl)
| QNeq(t1,t2) -> QNeq(update_type_names_t t1, update_type_names_t t2)
| QEq(t1,t2) -> QEq(update_type_names_t t1, update_type_names_t t2)
let rec update_type_names_r = function
Before(ev,hypll) -> Before(update_type_names_e ev, List.map (List.map update_type_names_h) hypll)
and update_type_names_h = function
QEvent(ev) -> QEvent(update_type_names_e ev)
| NestedQuery(q) -> NestedQuery(update_type_names_r q)
let update_type_names = function
PutBegin(b,l) -> PutBegin(b,l)
| RealQuery q -> RealQuery(update_type_names_r q)
Noninterf queries
let get_noninterf_queries () =
!noninterf_list
let get_weaksecret_queries () =
!weaksecret_list
let get_not() =
List.map (fun (env, no) -> check_event (create_env env) no) (!not_list)
For . Very similar to queries , except that * v is allowed
and events are not allowed
and events are not allowed *)
let fget_ident_any env (s, ext) =
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
FLink t -> (t, b.btype)
| NoLink -> (FVar b, b.btype)
| _ -> internal_error "unexpected link in fget_ident_any"
end
| EName r ->
(FFunApp(r, []), snd r.f_type)
| EFun f ->
if fst f.f_type == [] then
(FFunApp(f,[]), snd f.f_type)
else
input_error ("function " ^ s ^ " expects " ^
(string_of_int (List.length (fst f.f_type))) ^
" arguments but is used without arguments") ext
| _ ->
input_error ("identifier " ^ s ^ " should be a variable, a function, or a name") ext
with Not_found ->
input_error ("identifier " ^ s ^ " not defined") ext
let rec check_gformat env (term, ext0) =
match term with
PFGIdent i -> fget_ident_any env i
| PFGFunApp((s,ext),l) ->
begin
try
match StringMap.find s env with
EFun f ->
(match f.f_cat with
Eq _ | Tuple -> ()
| _ -> input_error ("function " ^ s ^ " is defined by \"reduc\". Such a function should not be used in a \"nounif\" declaration") ext);
let (l', tl') = List.split (List.map (check_gformat env) l) in
if Terms.eq_lists (fst f.f_type) tl' then
if (f.f_options land Param.fun_TYPECONVERTER != 0) && (!Param.ignore_types) then
match l' with
[t] -> (t, snd f.f_type)
| _ -> internal_error "type converter functions should always be unary"
else
(FFunApp(f, l'), snd f.f_type)
else
input_error ("function " ^ s ^ " expects arguments of type " ^
(Terms.tl_to_string ", " (fst f.f_type)) ^
" but is given arguments of type " ^
(Terms.tl_to_string ", " tl')) ext
| _ -> input_error("only functions can be applied, not " ^ s) ext
with Not_found ->
input_error ("function " ^ s ^ " not defined") ext
end
| PFGTuple l ->
let (l', tl') = List.split (List.map (check_gformat env) l) in
(FFunApp(Terms.get_tuple_fun tl', l'), Param.bitstring_type)
| PFGAny (s,ext) ->
begin
try
match StringMap.find s env with
EVar b ->
begin
match b.link with
NoLink -> (FAny b, b.btype)
| FLink _ -> input_error "variables preceded by * must not be defined by a binding" ext
| _ -> internal_error "unexpected link in check_gformat"
end
| _ -> input_error (s ^ " should be a variable") ext
with Not_found ->
input_error ("variable " ^ s ^ " is not defined") ext
end
| PFGName ((s,ext),bl) ->
begin
try
let r = Hashtbl.find glob_table s in
check_single ext s;
if fst r.f_type == Param.tmp_type then
Parsing_helper.internal_error "Names should have their arity at this point"
else
begin
match r.f_cat with
Name { prev_inputs_meaning = sl } ->
List.iter (fun ((s',ext'),_) ->
if not (List.mem s' sl) then
input_error ("variable " ^ s' ^ " not defined at restriction " ^ s) ext') bl;
let p = List.map2 (fun s'' ty ->
fbinding_find env s'' ty bl) sl (fst r.f_type)
in
(FFunApp(r, p), snd r.f_type)
| _ -> internal_error "name expected here"
end
with Not_found ->
input_error (s ^ " should be a name") ext
end
| PFGLet(id,t,t') -> check_gformat (add_fbinding env (id,t)) t'
and fbinding_find env s ty = function
[] -> FAny (Terms.new_var Param.def_var_name ty)
| ((s',ext),t)::l ->
if s' = s then
begin
let (t', ty') = check_gformat env t in
if ty' != ty then
input_error ("this variable is of type " ^ ty.tname ^ " but is given a value of type " ^ ty'.tname) ext;
t'
end
else
fbinding_find env s ty l
and add_fbinding env ((i,ext),t) =
begin
try
match StringMap.find i env with
EVar _ -> input_error ("variable " ^ i ^ " already defined") ext
| _ -> ()
with Not_found -> ()
end;
let (t', ty') = check_gformat env t in
let v = Terms.new_var i ty' in
v.link <- FLink t';
StringMap.add i (EVar v) env
let check_gfact_format env ((s, ext), tl, n) =
match s with
"attacker" ->
begin
match tl with
[t1] ->
if n > !Param.max_used_phase then
input_warning "nounif declaration for a phase greater than used" ext;
let (t1', ty1) = check_gformat env t1 in
let att_n = Param.get_pred (Attacker((if n = -1 then (!Param.max_used_phase) else n), ty1))
in
(att_n, [t1'])
| _ ->
input_error "arity of predicate attacker should be 1" ext
end
| "mess" ->
begin
match tl with
[t1;t2] ->
if n > !Param.max_used_phase then
input_warning "nounif declaration for a phase greater than used" ext;
let (t1', ty1) = check_gformat env t1 in
let (t2', ty2) = check_gformat env t2 in
if ty1 != Param.channel_type then
input_error ("First argument of mess is of type " ^ ty1.tname ^ " and should be of type channel") ext;
let mess_n = Param.get_pred (Mess((if n = -1 then (!Param.max_used_phase) else n), ty2))
in
(mess_n, [t1';t2'])
| _ ->
input_error "arity of predicate mess should be 2" ext
end
| s ->
if n != -1 then
input_error "declared predicates do not depend on phases, so no phase should be specified in such facts in queries" ext;
let (tl', tyl) = List.split (List.map (check_gformat env) tl) in
(get_pred env (s,ext) tyl, tl')
let rec handle_nounif env = function
BFLet(id,t,nounif) -> handle_nounif (add_fbinding env (id,t)) nounif
| BFNoUnif(fact,n) -> (check_gfact_format env fact, -n)
let get_nounif() =
List.map (fun (env, nounif) -> handle_nounif (create_env env) nounif) (!nounif_list)
|
006635eadfdd1d8c73e4bee251575b01dbbf47a26e5595531511c94d10ddd045 | AndreaCrotti/elo | utils.cljs | (ns byf.utils
"Various utility functions for Clojurescript"
(:require [clojure.string :as str]
[goog.object :as object]
[re-frame.core :as rf]))
(def min-width 500)
(defn classes
[cls]
(str/join " "
(map name
(filter some? cls))))
(defn set-val
([handler-key transform-fn]
#(rf/dispatch [handler-key (-> % .-target .-value transform-fn)]))
([handler-key]
(set-val handler-key identity)))
(defn mobile?
[]
(< js/window.screen.availWidth min-width))
(defn comparator-by
"Compares two objects based on field"
[field]
#(compare (object/get %1 (name field))
(object/get %2 (name field))))
| null | https://raw.githubusercontent.com/AndreaCrotti/elo/98c4b13b2c4e0605015d5d17a8be6cbb78c3f3f6/src/cljs/byf/utils.cljs | clojure | (ns byf.utils
"Various utility functions for Clojurescript"
(:require [clojure.string :as str]
[goog.object :as object]
[re-frame.core :as rf]))
(def min-width 500)
(defn classes
[cls]
(str/join " "
(map name
(filter some? cls))))
(defn set-val
([handler-key transform-fn]
#(rf/dispatch [handler-key (-> % .-target .-value transform-fn)]))
([handler-key]
(set-val handler-key identity)))
(defn mobile?
[]
(< js/window.screen.availWidth min-width))
(defn comparator-by
"Compares two objects based on field"
[field]
#(compare (object/get %1 (name field))
(object/get %2 (name field))))
| |
eec785ee5643ebfb1ba1147c619f81e60333575f9d390fdcb839a5810d0d90d7 | screenshotbot/screenshotbot-oss | run-builder.lisp | ;;;; Copyright 2018-Present Modern Interpreters Inc.
;;;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/replay/run-builder
(:use #:cl)
(:nicknames :screenshotbot/pro/replay/run-builder)
(:import-from #:screenshotbot/sdk/bundle
#:local-image
#:list-images)
(:import-from #:screenshotbot/model/image
#:with-tmp-image-file
#:image-blob
#:make-image
#:find-image)
(:import-from #:bknr.datastore
#:blob-pathname)
(:import-from #:screenshotbot/sdk/sdk
#:upload-image-directory)
(:import-from #:util/object-id
#:oid)
(:local-nicknames (#:a #:alexandria))
(:export
#:recorder-run-builder
#:all-screenshots
#:record-screenshot))
(in-package :screenshotbot/replay/run-builder)
(defclass screenshot ()
((title :initarg :title
:reader screenshot-title)
(image :initarg :image
:reader screenshot-image)))
(defclass all-screenshots ()
((screenshots :initform nil
:accessor screenshots)
(company :initarg :company
:accessor company)))
(defmethod record-screenshot ((self all-screenshots)
&key title md5 fetch)
(let ((hash md5))
(let ((image (or
(find-image (company self) hash)
(with-tmp-image-file (:pathname tmpfile)
(delete-file tmpfile)
(funcall fetch tmpfile)
(make-image :company (company self)
:pathname tmpfile
:hash hash
:verified-p t)))))
(push (make-instance
'screenshot
:title title
:image image)
(screenshots self)))))
(defmethod list-all-screenshots ((self all-screenshots))
(error "unimplemented"))
(defmethod list-images ((self all-screenshots))
(error "unimplemented"))
(defmethod upload-image-directory ((self all-screenshots))
(loop for im in (screenshots self)
collect `((:id . ,(oid (screenshot-image im)))
(:name . ,(screenshot-title im)))))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/b833d9995edc36d8188cd4b7f23a14bed26d6f1f/src/screenshotbot/replay/run-builder.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/replay/run-builder
(:use #:cl)
(:nicknames :screenshotbot/pro/replay/run-builder)
(:import-from #:screenshotbot/sdk/bundle
#:local-image
#:list-images)
(:import-from #:screenshotbot/model/image
#:with-tmp-image-file
#:image-blob
#:make-image
#:find-image)
(:import-from #:bknr.datastore
#:blob-pathname)
(:import-from #:screenshotbot/sdk/sdk
#:upload-image-directory)
(:import-from #:util/object-id
#:oid)
(:local-nicknames (#:a #:alexandria))
(:export
#:recorder-run-builder
#:all-screenshots
#:record-screenshot))
(in-package :screenshotbot/replay/run-builder)
(defclass screenshot ()
((title :initarg :title
:reader screenshot-title)
(image :initarg :image
:reader screenshot-image)))
(defclass all-screenshots ()
((screenshots :initform nil
:accessor screenshots)
(company :initarg :company
:accessor company)))
(defmethod record-screenshot ((self all-screenshots)
&key title md5 fetch)
(let ((hash md5))
(let ((image (or
(find-image (company self) hash)
(with-tmp-image-file (:pathname tmpfile)
(delete-file tmpfile)
(funcall fetch tmpfile)
(make-image :company (company self)
:pathname tmpfile
:hash hash
:verified-p t)))))
(push (make-instance
'screenshot
:title title
:image image)
(screenshots self)))))
(defmethod list-all-screenshots ((self all-screenshots))
(error "unimplemented"))
(defmethod list-images ((self all-screenshots))
(error "unimplemented"))
(defmethod upload-image-directory ((self all-screenshots))
(loop for im in (screenshots self)
collect `((:id . ,(oid (screenshot-image im)))
(:name . ,(screenshot-title im)))))
|
9ef6c7deac4f3b67e10e253d814b0daa3a9281e6e0b3ef056c09e59d9b30db73 | 2600hz/kazoo | cb_media.erl | %%%-----------------------------------------------------------------------------
( C ) 2011 - 2020 , 2600Hz
%%% @doc Account module
%%% Store/retrieve media files
%%%
%%%
@author
%%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(cb_media).
-export([init/0
,allowed_methods/0, allowed_methods/1, allowed_methods/2
,resource_exists/0, resource_exists/1, resource_exists/2
,authorize/1, authorize/2, authorize/3
,validate/1, validate/2, validate/3
,content_types_provided/2, content_types_provided/3
,content_types_accepted/2, content_types_accepted/3
,languages_provided/1, languages_provided/2, languages_provided/3
,put/1
,post/2, post/3
,delete/2, delete/3
,acceptable_content_types/0
]).
-include("crossbar.hrl").
-define(SERVER, ?MODULE).
-define(BIN_DATA, <<"raw">>).
-define(LANGUAGES, <<"languages">>).
-define(PROMPTS, <<"prompts">>).
-define(MEDIA_MIME_TYPES
,?AUDIO_CONTENT_TYPES
++ ?VIDEO_CONTENT_TYPES
++ ?BASE64_CONTENT_TYPES
).
-define(CB_LIST, <<"media/crossbar_listing">>).
-define(CB_LIST_BY_LANG, <<"media/listing_by_language">>).
-define(CB_LIST_BY_PROMPT, <<"media/listing_by_prompt">>).
-define(MOD_CONFIG_CAT, <<(?CONFIG_CAT)/binary, ".media">>).
-define(DEFAULT_VOICE
,list_to_binary([kazoo_tts:default_voice(), $/, kazoo_tts:default_language()])
).
-define(NORMALIZATION_FORMAT
,kapps_config:get_ne_binary(?MOD_CONFIG_CAT, <<"normalization_format">>, <<"mp3">>)
).
%%%=============================================================================
%%% API
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec init() -> 'ok'.
init() ->
{'ok', _} = application:ensure_all_started('kazoo_media'),
_ = crossbar_bindings:bind(<<"*.content_types_provided.media">>, ?MODULE, 'content_types_provided'),
_ = crossbar_bindings:bind(<<"*.content_types_accepted.media">>, ?MODULE, 'content_types_accepted'),
_ = crossbar_bindings:bind(<<"*.allowed_methods.media">>, ?MODULE, 'allowed_methods'),
_ = crossbar_bindings:bind(<<"*.authorize.media">>, ?MODULE, 'authorize'),
_ = crossbar_bindings:bind(<<"*.resource_exists.media">>, ?MODULE, 'resource_exists'),
_ = crossbar_bindings:bind(<<"*.languages_provided.media">>, ?MODULE, 'languages_provided'),
_ = crossbar_bindings:bind(<<"*.validate.media">>, ?MODULE, 'validate'),
_ = crossbar_bindings:bind(<<"*.execute.put.media">>, ?MODULE, 'put'),
_ = crossbar_bindings:bind(<<"*.execute.post.media">>, ?MODULE, 'post'),
_ = crossbar_bindings:bind(<<"*.execute.delete.media">>, ?MODULE, 'delete'),
'ok'.
%%------------------------------------------------------------------------------
%% @doc This function determines the verbs that are appropriate for the
%% given Nouns. For example `/accounts/' can only accept `GET' and `PUT'.
%%
%% Failure here returns `405 Method Not Allowed'.
%% @end
%%------------------------------------------------------------------------------
-spec allowed_methods() -> http_methods().
allowed_methods() ->
[?HTTP_GET, ?HTTP_PUT].
-spec allowed_methods(path_token()) -> http_methods().
allowed_methods(?LANGUAGES) ->
[?HTTP_GET];
allowed_methods(?PROMPTS) ->
[?HTTP_GET];
allowed_methods(_MediaId) ->
[?HTTP_GET, ?HTTP_POST, ?HTTP_DELETE].
-spec allowed_methods(path_token(), path_token()) -> http_methods().
allowed_methods(?LANGUAGES, _Language) ->
[?HTTP_GET];
allowed_methods(?PROMPTS, _PromptId) ->
[?HTTP_GET];
allowed_methods(_MediaId, ?BIN_DATA) ->
[?HTTP_GET, ?HTTP_POST].
%%------------------------------------------------------------------------------
%% @doc This function determines if the provided list of Nouns are valid.
Failure here returns ` 404 Not Found ' .
%% @end
%%------------------------------------------------------------------------------
-spec resource_exists() -> 'true'.
resource_exists() -> 'true'.
-spec resource_exists(path_token()) -> 'true'.
resource_exists(_) -> 'true'.
-spec resource_exists(path_token(), path_token()) -> 'true'.
resource_exists(?LANGUAGES, _Language) -> 'true';
resource_exists(?PROMPTS, _PromptId) -> 'true';
resource_exists(_, ?BIN_DATA) -> 'true'.
-spec authorize(cb_context:context()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize(cb_context:context(), path_token()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context, _) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize(cb_context:context(), path_token(), path_token()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context, _, _) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize_media(cb_context:context(), req_nouns(), kz_term:api_binary()) -> boolean().
authorize_media(_Context, [{<<"media">>, [?PROMPTS]}], 'undefined') ->
lager:debug("allowing system prompts request"),
'true';
authorize_media(_Context, [{<<"media">>, [?LANGUAGES]}], 'undefined') ->
lager:debug("allowing system languages request"),
'true';
authorize_media(_Context, [{<<"media">>, [?PROMPTS, _PromptId]}], 'undefined') ->
lager:debug("allowing system prompt request for ~s", [_PromptId]),
'true';
authorize_media(_Context, [{<<"media">>, [?LANGUAGES, _Language]}], 'undefined') ->
lager:debug("allowing system language request for ~s", [_Language]),
'true';
authorize_media(Context, [{<<"media">>, _}|_], 'undefined') ->
IsAuthenticated = cb_context:is_authenticated(Context),
IsSuperDuperAdmin = cb_context:is_superduper_admin(Context),
IsReqVerbGet = cb_context:req_verb(Context) =:= ?HTTP_GET,
case IsAuthenticated
andalso (IsSuperDuperAdmin
orelse IsReqVerbGet
)
of
'true' -> 'true';
'false' -> {'stop', cb_context:add_system_error('forbidden', Context)}
end;
authorize_media(Context, [{<<"media">>, _}, {<<"accounts">>, [AccountId]}], AccountId) ->
cb_simple_authz:authorize(Context);
authorize_media(_Context, _Nouns, _AccountId) ->
'false'.
%%------------------------------------------------------------------------------
%% @doc Add content types accepted and provided by this module
%% @end
%%------------------------------------------------------------------------------
-spec acceptable_content_types() -> [cowboy_content_type()].
acceptable_content_types() ->
?MEDIA_MIME_TYPES.
-spec content_types_provided(cb_context:context(), path_token()) ->
cb_context:context().
content_types_provided(Context, MediaId) ->
Verb = cb_context:req_verb(Context),
ContentType = cb_context:req_header(Context, <<"accept">>),
case ?HTTP_GET =:= Verb
andalso api_util:content_type_matches(ContentType, acceptable_content_types())
of
'false' -> Context;
'true' ->
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, ?HTTP_GET)
end.
-spec content_types_provided(cb_context:context(), path_token(), path_token()) ->
cb_context:context().
content_types_provided(Context, MediaId, ?BIN_DATA) ->
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, cb_context:req_verb(Context)).
-spec content_types_provided_for_media(cb_context:context(), path_token(), path_token(), http_method()) ->
cb_context:context().
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, ?HTTP_GET) ->
Context1 = load_media_meta(Context, MediaId),
case cb_context:resp_status(Context1) of
'success' ->
JObj = cb_context:doc(Context1),
case kz_doc:attachment_names(JObj) of
[] -> Context1;
[Attachment|_] ->
CT = kz_doc:attachment_content_type(JObj, Attachment),
[Type, SubType] = binary:split(CT, <<"/">>),
cb_context:set_content_types_provided(Context, [{'to_binary', [{Type, SubType}]}])
end;
_Status -> Context1
end;
content_types_provided_for_media(Context, _MediaId, ?BIN_DATA, _Verb) ->
Context.
-spec content_types_accepted(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
content_types_accepted(Context, _MediaId) ->
Verb = cb_context:req_verb(Context),
ContentType = cb_context:req_header(Context, <<"content-type">>),
case ?HTTP_POST =:= Verb
andalso api_util:content_type_matches(ContentType, acceptable_content_types())
of
'false' -> Context;
'true' ->
CTA = [{'from_binary', acceptable_content_types()}],
cb_context:set_content_types_accepted(Context, CTA)
end.
-spec content_types_accepted(cb_context:context(), path_token(), path_token()) ->
cb_context:context().
content_types_accepted(Context, _MediaId, ?BIN_DATA) ->
content_types_accepted_for_upload(Context, cb_context:req_verb(Context)).
-spec content_types_accepted_for_upload(cb_context:context(), http_method()) ->
cb_context:context().
content_types_accepted_for_upload(Context, ?HTTP_POST) ->
CTA = [{'from_binary', acceptable_content_types()}],
cb_context:set_content_types_accepted(Context, CTA);
content_types_accepted_for_upload(Context, _Verb) ->
Context.
%%------------------------------------------------------------------------------
%% @doc If you provide alternative languages, return a list of languages and optional
%% quality value:
%% `[<<"en">>, <<"en-gb;q=0.7">>, <<"da;q=0.5">>]'
%% @end
%%------------------------------------------------------------------------------
-spec languages_provided(cb_context:context()) -> cb_context:context().
languages_provided(Context) ->
Context.
-spec languages_provided(cb_context:context(), path_token()) -> cb_context:context().
languages_provided(Context, _Id) ->
Context.
-spec languages_provided(cb_context:context(), path_token(), path_token()) -> cb_context:context().
languages_provided(Context, _Id, _Path) ->
Context.
%%------------------------------------------------------------------------------
%% @doc This function determines if the parameters and content are correct
%% for this request.
%%
Failure here returns 400 .
%% @end
%%------------------------------------------------------------------------------
-spec validate(cb_context:context()) -> cb_context:context().
validate(Context) ->
validate_media_docs(Context, cb_context:req_verb(Context)).
-spec validate(cb_context:context(), path_token()) -> cb_context:context().
validate(Context, ?LANGUAGES) ->
load_available_languages(Context);
validate(Context, ?PROMPTS) ->
load_available_prompts(Context);
validate(Context, MediaId) ->
validate_media_doc(Context, MediaId, cb_context:req_verb(Context)).
-spec validate(cb_context:context(), path_token(), path_token()) -> cb_context:context().
validate(Context, ?LANGUAGES, Language) ->
load_media_docs_by_language(Context, kz_term:to_lower_binary(Language));
validate(Context, ?PROMPTS, PromptId) ->
load_media_docs_by_prompt(Context, PromptId);
validate(Context, MediaId, ?BIN_DATA) ->
lager:debug("uploading binary data to '~s'", [MediaId]),
validate_media_binary(Context, MediaId, cb_context:req_verb(Context), cb_context:req_files(Context)).
-spec validate_media_docs(cb_context:context(), http_method()) -> cb_context:context().
validate_media_docs(Context, ?HTTP_GET) ->
load_media_summary(Context);
validate_media_docs(Context, ?HTTP_PUT) ->
validate_request('undefined', Context).
-spec validate_media_doc(cb_context:context(), kz_term:ne_binary(), http_method()) -> cb_context:context().
validate_media_doc(Context, MediaId, ?HTTP_GET) ->
case api_util:content_type_matches(cb_context:req_header(Context, <<"accept">>)
,acceptable_content_types()
)
of
'false' -> load_media_meta(Context, MediaId);
'true' -> validate_media_binary(Context, MediaId, ?HTTP_GET, [])
end;
validate_media_doc(Context, MediaId, ?HTTP_POST) ->
validate_media_doc_update(Context, MediaId, cb_context:req_header(Context, <<"content-type">>));
validate_media_doc(Context, MediaId, ?HTTP_DELETE) ->
load_media_meta(Context, MediaId).
-spec validate_media_doc_update(cb_context:context(), kz_term:ne_binary(), kz_term:api_ne_binary()) -> cb_context:context().
validate_media_doc_update(Context, MediaId, ContentType) ->
lager:debug("trying to update doc with content ~s", [ContentType]),
case api_util:content_type_matches(ContentType, acceptable_content_types()) of
'false' -> validate_request(MediaId, Context);
'true' -> validate_media_binary(Context, MediaId, ?HTTP_POST, cb_context:req_files(Context))
end.
-spec validate_media_binary(cb_context:context(), kz_term:ne_binary(), http_method(), kz_term:proplist()) -> cb_context:context().
validate_media_binary(Context, MediaId, ?HTTP_GET, _Files) ->
lager:debug("fetch media contents for '~s'", [MediaId]),
load_media_binary(Context, MediaId);
validate_media_binary(Context, _MediaId, ?HTTP_POST, []) ->
error_missing_file(Context);
validate_media_binary(Context, MediaId, ?HTTP_POST, [{_Filename, FileObj}]) ->
Context1 = load_media_meta(Context, MediaId),
lager:debug("loaded media meta for '~s'", [MediaId]),
case cb_context:resp_status(Context1) of
'success' ->
maybe_normalize_upload(Context1, MediaId, FileObj);
_Status -> Context1
end;
validate_media_binary(Context, _MediaId, ?HTTP_POST, _Files) ->
cb_context:add_validation_error(<<"file">>
,<<"maxItems">>
,kz_json:from_list([{<<"message">>, <<"Please provide a single media file">>}])
,Context
).
-spec error_missing_file(cb_context:context()) -> cb_context:context().
error_missing_file(Context) ->
cb_context:add_validation_error(<<"file">>
,<<"required">>
,kz_json:from_list([{<<"message">>, <<"Please provide an media file">>}])
,Context
).
-spec maybe_normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) -> cb_context:context().
maybe_normalize_upload(Context, MediaId, FileJObj) ->
case kapps_config:get_is_true(?MOD_CONFIG_CAT, <<"normalize_media">>, 'false') of
'true' ->
lager:debug("normalizing uploaded media"),
normalize_upload(Context, MediaId, FileJObj);
'false' ->
lager:debug("normalization not enabled, leaving upload as-is"),
validate_upload(Context, MediaId, FileJObj)
end.
-spec normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) ->
cb_context:context().
normalize_upload(Context, MediaId, FileJObj) ->
normalize_upload(Context, MediaId, FileJObj
,kz_json:get_ne_binary_value([<<"headers">>, <<"content_type">>], FileJObj)
).
-spec normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object(), kz_term:api_binary()) ->
cb_context:context().
normalize_upload(Context, MediaId, FileJObj, UploadContentType) ->
FromExt = kz_mime:to_extension(UploadContentType),
ToExt = ?NORMALIZATION_FORMAT,
lager:info("upload is of type '~s', normalizing from ~s to ~s"
,[UploadContentType, FromExt, ToExt]
),
{UpdatedContext, UpdatedFileJObj}
= cb_modules_util:normalize_media_upload(Context, FromExt, ToExt, FileJObj, []),
validate_upload(UpdatedContext
,MediaId
,UpdatedFileJObj
).
-spec validate_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) -> cb_context:context().
validate_upload(Context, MediaId, FileJObj) ->
CT = kz_json:get_value([<<"headers">>, <<"content_type">>], FileJObj, <<"application/octet-stream">>),
Size = kz_json:get_integer_value([<<"headers">>, <<"content_length">>]
,FileJObj
,iolist_size(kz_json:get_value(<<"contents">>, FileJObj, <<>>))
),
Props = [{<<"content_type">>, CT}
,{<<"content_length">>, Size}
,{<<"media_source">>, <<"upload">>}
],
validate_request(MediaId
,cb_context:set_req_data(Context
,kz_json:set_values(Props, cb_context:doc(Context))
)
).
-spec put(cb_context:context()) -> cb_context:context().
put(Context) ->
put_media(Context, cb_context:account_id(Context)).
-spec put_media(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
put_media(Context, 'undefined') ->
put_media(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), <<"ignore">>);
put_media(Context, _AccountId) ->
case is_tts(cb_context:doc(Context)) of
'true' -> create_update_tts(Context, <<"create">>);
'false' -> crossbar_doc:save(Context)
end.
-spec post(cb_context:context(), path_token()) -> cb_context:context().
post(Context, MediaId) ->
post_media_doc(Context, MediaId, cb_context:account_id(Context)).
-spec post_media_doc(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
post_media_doc(Context, MediaId, 'undefined') ->
post_media_doc(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), MediaId, <<"ignore">>);
post_media_doc(Context, MediaId, _AccountId) ->
case is_tts(cb_context:doc(Context)) of
'true' -> create_update_tts(Context, <<"update">>);
'false' -> post_media_doc_or_binary(remove_tts_keys(Context), MediaId, cb_context:req_header(Context, <<"content-type">>))
end.
-spec post_media_doc_or_binary(cb_context:context(), kz_term:ne_binary(), kz_term:api_ne_binary()) -> cb_context:context().
post_media_doc_or_binary(Context, MediaId, ContentType) ->
case api_util:content_type_matches(ContentType, acceptable_content_types()) of
'false' -> crossbar_doc:save(Context);
'true' -> post(Context, MediaId, ?BIN_DATA)
end.
-spec post(cb_context:context(), path_token(), path_token()) -> cb_context:context().
post(Context, MediaId, ?BIN_DATA) ->
post_media_binary(Context, MediaId, cb_context:account_id(Context)).
-spec remove_tts_keys(cb_context:context()) -> cb_context:context().
remove_tts_keys(Context) ->
cb_context:set_doc(Context
,kz_json:delete_keys([<<"pvt_previous_tts">>, <<"pvt_previous_voice">>]
,cb_context:doc(Context)
)).
-spec post_media_binary(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
post_media_binary(Context, MediaId, 'undefined') ->
post_media_binary(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), MediaId, <<"ignore">>);
post_media_binary(Context, MediaId, _AccountId) ->
update_media_binary(Context, MediaId).
create_update_tts(Context, <<"create">>) ->
C1 = update_and_save_tts_doc(Context),
maybe_update_media_file(C1, <<"create">>, 'true', cb_context:resp_status(C1));
create_update_tts(Context, <<"update">>) ->
maybe_update_media_file(Context, <<"update">>, is_tts_changed(cb_context:doc(Context)), cb_context:resp_status(Context)).
-spec maybe_update_media_file(cb_context:context(), kz_term:ne_binary(), boolean(), crossbar_status()) ->
cb_context:context().
maybe_update_media_file(Context, CreateOrUpdate, 'true', 'success') ->
JObj = cb_context:doc(Context),
Text = kz_json:get_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_value([<<"tts">>, <<"voice">>], JObj, ?DEFAULT_VOICE),
try kazoo_tts:create(Text, Voice) of
{'error', Reason} ->
_ = maybe_delete_tts(Context, kz_term:to_binary(Reason), CreateOrUpdate),
crossbar_util:response('error', kz_term:to_binary(Reason), Context);
{'error', 'tts_provider_failure', Reason} ->
_ = maybe_delete_tts(Context, kz_term:to_binary(Reason), CreateOrUpdate);
{'ok', ContentType, Content} ->
MediaId = kz_doc:id(JObj),
Headers = kz_json:from_list([{<<"content_type">>, ContentType}
,{<<"content_length">>, iolist_size(Content)}
]),
FileJObj = kz_json:from_list([{<<"headers">>, Headers}
,{<<"contents">>, Content}
]),
FileName = list_to_binary(["text_to_speech_"
,kz_term:to_binary(kz_time:now_s())
,".wav"
]),
C1 = update_media_binary(cb_context:set_req_files(Context, [{FileName, FileJObj}]), MediaId),
case cb_context:resp_status(C1) =:= 'success'
andalso CreateOrUpdate
of
'false' -> maybe_delete_tts(C1, <<"creating TTS failed unexpectedly">>, CreateOrUpdate);
<<"create">> -> Context;
<<"update">> ->
C2 = crossbar_doc:load_merge(MediaId, kz_doc:public_fields(JObj), Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
case cb_context:resp_status(C2) of
'success' -> update_and_save_tts_doc(C2);
_ -> C2
end
end
catch
_E:_R ->
lager:debug("creating tts failed unexpectedly: ~s: ~p", [_E, _R]),
maybe_delete_tts(Context, <<"creating TTS failed unexpectedly">>, CreateOrUpdate)
end;
maybe_update_media_file(Context, <<"update">>, 'false', 'success') ->
crossbar_doc:save(Context);
maybe_update_media_file(Context, _, _, _) ->
Context.
-spec update_and_save_tts_doc(cb_context:context()) -> cb_context:context().
update_and_save_tts_doc(Context) ->
JObj = cb_context:doc(Context),
Text = kz_json:get_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_value([<<"tts">>, <<"voice">>], JObj, ?DEFAULT_VOICE),
Doc = kz_json:set_values([{<<"pvt_previous_tts">>, Text}
,{<<"pvt_previous_voice">>, Voice}
], JObj
),
crossbar_doc:save(cb_context:set_doc(Context, Doc)).
-spec maybe_delete_tts(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary()) -> cb_context:context().
maybe_delete_tts(Context, Reason, <<"create">>) ->
_ = crossbar_doc:delete(Context),
crossbar_util:response('error', Reason, Context);
maybe_delete_tts(Context, _, <<"update">>) ->
Context.
-spec delete_type(boolean() | cb_context:context()) -> ?HARD_DELETE | ?SOFT_DELETE.
delete_type('true') ->
?HARD_DELETE;
delete_type('false') ->
?SOFT_DELETE;
delete_type(Context) ->
Prompt = kzd_media:is_prompt(cb_context:resp_data(Context)),
Hard = kz_json:is_true(<<"hard_delete">>, cb_context:req_data(Context)),
delete_type(Prompt or Hard).
-spec delete(cb_context:context(), path_token()) -> cb_context:context().
delete(Context, _MediaId) ->
crossbar_doc:delete(Context, delete_type(Context)).
-spec delete(cb_context:context(), path_token(), path_token()) -> cb_context:context().
delete(Context, MediaId, ?BIN_DATA) ->
delete_media_binary(MediaId, Context, cb_context:account_id(Context)).
%%------------------------------------------------------------------------------
%% @doc Attempt to load a summarized list of media
%% @end
%%------------------------------------------------------------------------------
-spec load_media_summary(cb_context:context()) -> cb_context:context().
load_media_summary(Context) ->
load_media_summary(Context, cb_context:account_id(Context)).
-spec load_media_summary(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
load_media_summary(Context, 'undefined') ->
lager:debug("loading system_config media"),
Options = [{'databases', [?KZ_MEDIA_DB]}
,{'mapper', crossbar_view:get_value_fun()}
],
crossbar_view:load(Context, ?CB_LIST, Options);
load_media_summary(Context, _AccountId) ->
Options = [{'mapper', crossbar_view:get_value_fun()}
],
crossbar_view:load(Context, ?CB_LIST, Options).
-spec load_available_languages(cb_context:context()) -> cb_context:context().
load_available_languages(Context) ->
load_available_languages(Context, cb_context:account_id(Context)).
-spec load_available_languages(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
load_available_languages(Context, 'undefined') ->
Options = [{'group_level', 1}
,{'databases', ?KZ_MEDIA_DB}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options);
load_available_languages(Context, _AccountId) ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options).
-spec normalize_count_results(kz_json:object(), kz_json:objects()) -> kz_json:objects().
normalize_count_results(JObj, []) ->
normalize_count_results(JObj, [kz_json:new()]);
normalize_count_results(JObj, [Acc]) ->
case kz_json:get_value(<<"key">>, JObj) of
['null'] ->
[kz_json:set_value(<<"missing">>, kz_json:get_integer_value(<<"value">>, JObj), Acc)];
[Lang] ->
[kz_json:set_value(Lang, kz_json:get_integer_value(<<"value">>, JObj), Acc)]
end.
-spec load_media_docs_by_language(cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
load_media_docs_by_language(Context, <<"missing">>) ->
lager:debug("loading media files missing a language"),
load_media_docs_by_language(Context, 'null', cb_context:account_id(Context));
load_media_docs_by_language(Context, Language) ->
lager:debug("loading media files in language ~p", [Language]),
load_media_docs_by_language(Context, Language, cb_context:account_id(Context)).
-spec load_media_docs_by_language(cb_context:context(), kz_term:ne_binary() | 'null', kz_term:api_binary()) ->
cb_context:context().
load_media_docs_by_language(Context, Language, 'undefined') ->
Options = [{'startkey', [Language]}
,{'endkey', [Language, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', crossbar_view:get_id_fun()}
,{'databases', [?KZ_MEDIA_DB]}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options);
load_media_docs_by_language(Context, Language, _AccountId) ->
Options = [{'startkey', [Language]}
,{'endkey', [Language, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', crossbar_view:get_id_fun()}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options).
%%------------------------------------------------------------------------------
%% @doc Load prompt listing
%% @end
%%------------------------------------------------------------------------------
-spec load_available_prompts(cb_context:context()) ->
cb_context:context().
load_available_prompts(Context) ->
load_available_prompts(Context, cb_context:account_id(Context)).
-spec load_available_prompts(cb_context:context(), kz_term:api_binary()) ->
cb_context:context().
load_available_prompts(Context, 'undefined') ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
,{'databases', [?KZ_MEDIA_DB]}
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options);
load_available_prompts(Context, _AccountId) ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options).
-spec load_media_docs_by_prompt(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_media_docs_by_prompt(Context, PromptId) ->
lager:debug("loading media files in prompt ~p", [PromptId]),
load_media_docs_by_prompt(Context, PromptId, cb_context:account_id(Context)).
-spec load_media_docs_by_prompt(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
load_media_docs_by_prompt(Context, PromptId, 'undefined') ->
Options = [{'startkey', [PromptId]}
,{'endkey', [PromptId, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', fun normalize_prompt_results/2}
,{'databases', [?KZ_MEDIA_DB]}
,'include_docs'
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options);
load_media_docs_by_prompt(Context, PromptId, _AccountId) ->
Options = [{'startkey', [PromptId]}
,{'endkey', [PromptId, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', fun normalize_prompt_results/2}
,'include_docs'
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options).
-spec normalize_prompt_results(kz_json:object(), kz_term:ne_binaries()) -> kz_term:ne_binaries().
normalize_prompt_results(JObj, Acc) ->
HasAttachments =
case kz_doc:attachments(kz_json:get_value(<<"doc">>, JObj)) of
'undefined' -> 'false';
As -> not kz_json:is_empty(As)
end,
[kz_json:from_list(
[{<<"id">>, kz_doc:id(JObj)}
,{<<"has_attachments">>, HasAttachments}
])
| Acc
].
%%------------------------------------------------------------------------------
%% @doc Load a media document from the database
%% @end
%%------------------------------------------------------------------------------
-spec load_media_meta(cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
load_media_meta(Context, MediaId) ->
load_media_meta(Context, MediaId, cb_context:account_id(Context)).
-spec load_media_meta(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
load_media_meta(Context, MediaId, 'undefined') ->
crossbar_doc:load(MediaId, cb_context:set_db_name(Context, ?KZ_MEDIA_DB), ?TYPE_CHECK_OPTION(kzd_media:type()));
load_media_meta(Context, MediaId, _AccountId) ->
crossbar_doc:load(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec validate_request(kz_term:api_binary(), cb_context:context()) -> cb_context:context().
validate_request(MediaId, Context) ->
OnSuccess = fun(C) -> on_successful_validation(MediaId, C) end,
cb_context:validate_request_data(<<"media">>, Context, OnSuccess).
-spec on_successful_validation(kz_term:api_binary(), cb_context:context()) -> cb_context:context().
on_successful_validation('undefined', Context) ->
Doc = cb_context:doc(Context),
Props = [{<<"pvt_type">>, kzd_media:type()}
| maybe_add_prompt_fields(Context)
],
cb_context:set_doc(Context, kz_json:set_values(Props, Doc));
on_successful_validation(MediaId, Context) ->
Context1 = crossbar_doc:load_merge(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
maybe_validate_prompt(MediaId, Context1, cb_context:resp_status(Context1)).
-spec maybe_validate_prompt(kz_term:ne_binary(), cb_context:context(), crossbar_status()) ->
cb_context:context().
maybe_validate_prompt(MediaId, Context, 'success') ->
case kzd_media:prompt_id(cb_context:doc(Context)) of
'undefined' -> Context;
PromptId ->
validate_prompt(MediaId, Context, PromptId)
end;
maybe_validate_prompt(_MediaId, Context, _Status) ->
Context.
-spec validate_prompt(kz_term:ne_binary(), cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
validate_prompt(MediaId, Context, PromptId) ->
Language = kz_term:to_lower_binary(kzd_media:language(cb_context:doc(Context))),
case kz_media_util:prompt_id(PromptId, Language) of
MediaId -> Context;
_OtherId ->
lager:info("attempt to change prompt id '~s' is not allowed on existing media doc '~s'"
,[PromptId, MediaId]
),
cb_context:add_validation_error(<<"prompt_id">>
,<<"invalid">>
,kz_json:from_list(
[{<<"message">>, <<"Changing the prompt_id on an existing prompt is not allowed">>}
,{<<"cause">>, PromptId}
])
,Context
)
end.
-spec maybe_add_prompt_fields(cb_context:context()) -> kz_term:proplist().
maybe_add_prompt_fields(Context) ->
JObj = cb_context:doc(Context),
case kzd_media:prompt_id(JObj) of
'undefined' -> [];
PromptId ->
Language = kz_term:to_lower_binary(kzd_media:language(JObj, kz_media_util:default_prompt_language())),
ID = kz_media_util:prompt_id(PromptId, Language),
lager:debug("creating properties for prompt ~s (~s)", [PromptId, Language]),
[{<<"_id">>, ID}
,{<<"language">>, Language}
,{<<"name">>, kz_json:get_value(<<"name">>, JObj, ID)}
]
end.
%%------------------------------------------------------------------------------
%% @doc Load the binary attachment of a media doc
%% @end
%%------------------------------------------------------------------------------
-spec load_media_binary(cb_context:context(), path_token()) -> cb_context:context().
load_media_binary(Context, MediaId) ->
Context1 = load_media_meta(Context, MediaId),
case cb_context:resp_status(Context1) of
'success' ->
case kz_doc:attachment_names(cb_context:doc(Context1)) of
[] -> crossbar_util:response_bad_identifier(MediaId, Context);
[Attachment|_] ->
LoadedContext = crossbar_doc:load_attachment(cb_context:doc(Context1)
,Attachment
,?TYPE_CHECK_OPTION(kzd_media:type())
,Context1
),
cb_context:add_resp_headers(LoadedContext
,#{<<"content-disposition">> => <<"attachment; filename=", Attachment/binary>>
,<<"content-type">> => kz_doc:attachment_content_type(cb_context:doc(Context1), Attachment)
}
)
end;
_Status -> Context1
end.
%%------------------------------------------------------------------------------
%% @doc Update the binary attachment of a media doc
%% @end
%%------------------------------------------------------------------------------
-spec update_media_binary(cb_context:context(), path_token()) ->
cb_context:context().
update_media_binary(Context, MediaId) ->
update_media_binary(crossbar_util:maybe_remove_attachments(Context)
,MediaId
,cb_context:req_files(Context)
).
-spec update_media_binary(cb_context:context(), path_token(), req_files()) ->
cb_context:context().
update_media_binary(Context, _MediaId, []) -> Context;
update_media_binary(Context, MediaId, [{Filename, FileObj}|Files]) ->
Contents = kz_json:get_value(<<"contents">>, FileObj),
CT = kz_json:get_value([<<"headers">>, <<"content_type">>], FileObj),
lager:debug("file content type: ~s", [CT]),
Opts = [{'content_type', CT} | ?TYPE_CHECK_OPTION(kzd_media:type())],
AttachmentName = cb_modules_util:attachment_name(Filename, CT),
Context1 = crossbar_doc:save_attachment(MediaId
,AttachmentName
,Contents
,Context
,Opts
),
case cb_context:resp_status(Context1) of
'success' -> update_media_binary(Context1, MediaId, Files);
_Failure ->
lager:info("failed to save attachment ~s to ~s", [AttachmentName, MediaId]),
Context1
end.
%%------------------------------------------------------------------------------
%% @doc Delete the binary attachment of a media doc
%% @end
%%------------------------------------------------------------------------------
-spec delete_media_binary(path_token(), cb_context:context(), kz_term:api_binary()) -> cb_context:context().
delete_media_binary(MediaId, Context, 'undefined') ->
delete_media_binary(MediaId, cb_context:set_db_name(Context, ?KZ_MEDIA_DB), <<"ignore">>);
delete_media_binary(MediaId, Context, _AccountId) ->
Context1 = crossbar_doc:load(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
case cb_context:resp_status(Context1) of
'success' ->
case kz_doc:attachment_names(cb_context:doc(Context1)) of
[] -> crossbar_util:response_bad_identifier(MediaId, Context);
[AttachmentId|_] ->
crossbar_doc:delete_attachment(MediaId, AttachmentId, Context)
end;
_Status -> Context1
end.
-spec is_tts(kz_json:object()) -> boolean().
is_tts(JObj) ->
kz_json:get_ne_binary_value(<<"media_source">>, JObj) =:= <<"tts">>.
-spec is_tts_changed(kz_json:object()) -> boolean().
is_tts_changed(JObj) ->
Text = kz_json:get_ne_binary_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_ne_binary_value([<<"tts">>, <<"voice">>], JObj),
PreText = kz_json:get_value(<<"pvt_previous_tts">>, JObj),
PrevVoice = kz_json:get_ne_binary_value(<<"pvt_previous_voice">>, JObj),
Text =/= PreText
orelse Voice =/= PrevVoice.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/crossbar/src/modules/cb_media.erl | erlang | -----------------------------------------------------------------------------
@doc Account module
Store/retrieve media files
@end
-----------------------------------------------------------------------------
=============================================================================
API
=============================================================================
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc This function determines the verbs that are appropriate for the
given Nouns. For example `/accounts/' can only accept `GET' and `PUT'.
Failure here returns `405 Method Not Allowed'.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc This function determines if the provided list of Nouns are valid.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Add content types accepted and provided by this module
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc If you provide alternative languages, return a list of languages and optional
quality value:
`[<<"en">>, <<"en-gb;q=0.7">>, <<"da;q=0.5">>]'
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc This function determines if the parameters and content are correct
for this request.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Attempt to load a summarized list of media
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Load prompt listing
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Load a media document from the database
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Load the binary attachment of a media doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Update the binary attachment of a media doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Delete the binary attachment of a media doc
@end
------------------------------------------------------------------------------ | ( C ) 2011 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(cb_media).
-export([init/0
,allowed_methods/0, allowed_methods/1, allowed_methods/2
,resource_exists/0, resource_exists/1, resource_exists/2
,authorize/1, authorize/2, authorize/3
,validate/1, validate/2, validate/3
,content_types_provided/2, content_types_provided/3
,content_types_accepted/2, content_types_accepted/3
,languages_provided/1, languages_provided/2, languages_provided/3
,put/1
,post/2, post/3
,delete/2, delete/3
,acceptable_content_types/0
]).
-include("crossbar.hrl").
-define(SERVER, ?MODULE).
-define(BIN_DATA, <<"raw">>).
-define(LANGUAGES, <<"languages">>).
-define(PROMPTS, <<"prompts">>).
-define(MEDIA_MIME_TYPES
,?AUDIO_CONTENT_TYPES
++ ?VIDEO_CONTENT_TYPES
++ ?BASE64_CONTENT_TYPES
).
-define(CB_LIST, <<"media/crossbar_listing">>).
-define(CB_LIST_BY_LANG, <<"media/listing_by_language">>).
-define(CB_LIST_BY_PROMPT, <<"media/listing_by_prompt">>).
-define(MOD_CONFIG_CAT, <<(?CONFIG_CAT)/binary, ".media">>).
-define(DEFAULT_VOICE
,list_to_binary([kazoo_tts:default_voice(), $/, kazoo_tts:default_language()])
).
-define(NORMALIZATION_FORMAT
,kapps_config:get_ne_binary(?MOD_CONFIG_CAT, <<"normalization_format">>, <<"mp3">>)
).
-spec init() -> 'ok'.
init() ->
{'ok', _} = application:ensure_all_started('kazoo_media'),
_ = crossbar_bindings:bind(<<"*.content_types_provided.media">>, ?MODULE, 'content_types_provided'),
_ = crossbar_bindings:bind(<<"*.content_types_accepted.media">>, ?MODULE, 'content_types_accepted'),
_ = crossbar_bindings:bind(<<"*.allowed_methods.media">>, ?MODULE, 'allowed_methods'),
_ = crossbar_bindings:bind(<<"*.authorize.media">>, ?MODULE, 'authorize'),
_ = crossbar_bindings:bind(<<"*.resource_exists.media">>, ?MODULE, 'resource_exists'),
_ = crossbar_bindings:bind(<<"*.languages_provided.media">>, ?MODULE, 'languages_provided'),
_ = crossbar_bindings:bind(<<"*.validate.media">>, ?MODULE, 'validate'),
_ = crossbar_bindings:bind(<<"*.execute.put.media">>, ?MODULE, 'put'),
_ = crossbar_bindings:bind(<<"*.execute.post.media">>, ?MODULE, 'post'),
_ = crossbar_bindings:bind(<<"*.execute.delete.media">>, ?MODULE, 'delete'),
'ok'.
-spec allowed_methods() -> http_methods().
allowed_methods() ->
[?HTTP_GET, ?HTTP_PUT].
-spec allowed_methods(path_token()) -> http_methods().
allowed_methods(?LANGUAGES) ->
[?HTTP_GET];
allowed_methods(?PROMPTS) ->
[?HTTP_GET];
allowed_methods(_MediaId) ->
[?HTTP_GET, ?HTTP_POST, ?HTTP_DELETE].
-spec allowed_methods(path_token(), path_token()) -> http_methods().
allowed_methods(?LANGUAGES, _Language) ->
[?HTTP_GET];
allowed_methods(?PROMPTS, _PromptId) ->
[?HTTP_GET];
allowed_methods(_MediaId, ?BIN_DATA) ->
[?HTTP_GET, ?HTTP_POST].
Failure here returns ` 404 Not Found ' .
-spec resource_exists() -> 'true'.
resource_exists() -> 'true'.
-spec resource_exists(path_token()) -> 'true'.
resource_exists(_) -> 'true'.
-spec resource_exists(path_token(), path_token()) -> 'true'.
resource_exists(?LANGUAGES, _Language) -> 'true';
resource_exists(?PROMPTS, _PromptId) -> 'true';
resource_exists(_, ?BIN_DATA) -> 'true'.
-spec authorize(cb_context:context()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize(cb_context:context(), path_token()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context, _) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize(cb_context:context(), path_token(), path_token()) -> boolean() | {'stop', cb_context:context()}.
authorize(Context, _, _) ->
authorize_media(Context, cb_context:req_nouns(Context), cb_context:account_id(Context)).
-spec authorize_media(cb_context:context(), req_nouns(), kz_term:api_binary()) -> boolean().
authorize_media(_Context, [{<<"media">>, [?PROMPTS]}], 'undefined') ->
lager:debug("allowing system prompts request"),
'true';
authorize_media(_Context, [{<<"media">>, [?LANGUAGES]}], 'undefined') ->
lager:debug("allowing system languages request"),
'true';
authorize_media(_Context, [{<<"media">>, [?PROMPTS, _PromptId]}], 'undefined') ->
lager:debug("allowing system prompt request for ~s", [_PromptId]),
'true';
authorize_media(_Context, [{<<"media">>, [?LANGUAGES, _Language]}], 'undefined') ->
lager:debug("allowing system language request for ~s", [_Language]),
'true';
authorize_media(Context, [{<<"media">>, _}|_], 'undefined') ->
IsAuthenticated = cb_context:is_authenticated(Context),
IsSuperDuperAdmin = cb_context:is_superduper_admin(Context),
IsReqVerbGet = cb_context:req_verb(Context) =:= ?HTTP_GET,
case IsAuthenticated
andalso (IsSuperDuperAdmin
orelse IsReqVerbGet
)
of
'true' -> 'true';
'false' -> {'stop', cb_context:add_system_error('forbidden', Context)}
end;
authorize_media(Context, [{<<"media">>, _}, {<<"accounts">>, [AccountId]}], AccountId) ->
cb_simple_authz:authorize(Context);
authorize_media(_Context, _Nouns, _AccountId) ->
'false'.
-spec acceptable_content_types() -> [cowboy_content_type()].
acceptable_content_types() ->
?MEDIA_MIME_TYPES.
-spec content_types_provided(cb_context:context(), path_token()) ->
cb_context:context().
content_types_provided(Context, MediaId) ->
Verb = cb_context:req_verb(Context),
ContentType = cb_context:req_header(Context, <<"accept">>),
case ?HTTP_GET =:= Verb
andalso api_util:content_type_matches(ContentType, acceptable_content_types())
of
'false' -> Context;
'true' ->
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, ?HTTP_GET)
end.
-spec content_types_provided(cb_context:context(), path_token(), path_token()) ->
cb_context:context().
content_types_provided(Context, MediaId, ?BIN_DATA) ->
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, cb_context:req_verb(Context)).
-spec content_types_provided_for_media(cb_context:context(), path_token(), path_token(), http_method()) ->
cb_context:context().
content_types_provided_for_media(Context, MediaId, ?BIN_DATA, ?HTTP_GET) ->
Context1 = load_media_meta(Context, MediaId),
case cb_context:resp_status(Context1) of
'success' ->
JObj = cb_context:doc(Context1),
case kz_doc:attachment_names(JObj) of
[] -> Context1;
[Attachment|_] ->
CT = kz_doc:attachment_content_type(JObj, Attachment),
[Type, SubType] = binary:split(CT, <<"/">>),
cb_context:set_content_types_provided(Context, [{'to_binary', [{Type, SubType}]}])
end;
_Status -> Context1
end;
content_types_provided_for_media(Context, _MediaId, ?BIN_DATA, _Verb) ->
Context.
-spec content_types_accepted(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
content_types_accepted(Context, _MediaId) ->
Verb = cb_context:req_verb(Context),
ContentType = cb_context:req_header(Context, <<"content-type">>),
case ?HTTP_POST =:= Verb
andalso api_util:content_type_matches(ContentType, acceptable_content_types())
of
'false' -> Context;
'true' ->
CTA = [{'from_binary', acceptable_content_types()}],
cb_context:set_content_types_accepted(Context, CTA)
end.
-spec content_types_accepted(cb_context:context(), path_token(), path_token()) ->
cb_context:context().
content_types_accepted(Context, _MediaId, ?BIN_DATA) ->
content_types_accepted_for_upload(Context, cb_context:req_verb(Context)).
-spec content_types_accepted_for_upload(cb_context:context(), http_method()) ->
cb_context:context().
content_types_accepted_for_upload(Context, ?HTTP_POST) ->
CTA = [{'from_binary', acceptable_content_types()}],
cb_context:set_content_types_accepted(Context, CTA);
content_types_accepted_for_upload(Context, _Verb) ->
Context.
-spec languages_provided(cb_context:context()) -> cb_context:context().
languages_provided(Context) ->
Context.
-spec languages_provided(cb_context:context(), path_token()) -> cb_context:context().
languages_provided(Context, _Id) ->
Context.
-spec languages_provided(cb_context:context(), path_token(), path_token()) -> cb_context:context().
languages_provided(Context, _Id, _Path) ->
Context.
Failure here returns 400 .
-spec validate(cb_context:context()) -> cb_context:context().
validate(Context) ->
validate_media_docs(Context, cb_context:req_verb(Context)).
-spec validate(cb_context:context(), path_token()) -> cb_context:context().
validate(Context, ?LANGUAGES) ->
load_available_languages(Context);
validate(Context, ?PROMPTS) ->
load_available_prompts(Context);
validate(Context, MediaId) ->
validate_media_doc(Context, MediaId, cb_context:req_verb(Context)).
-spec validate(cb_context:context(), path_token(), path_token()) -> cb_context:context().
validate(Context, ?LANGUAGES, Language) ->
load_media_docs_by_language(Context, kz_term:to_lower_binary(Language));
validate(Context, ?PROMPTS, PromptId) ->
load_media_docs_by_prompt(Context, PromptId);
validate(Context, MediaId, ?BIN_DATA) ->
lager:debug("uploading binary data to '~s'", [MediaId]),
validate_media_binary(Context, MediaId, cb_context:req_verb(Context), cb_context:req_files(Context)).
-spec validate_media_docs(cb_context:context(), http_method()) -> cb_context:context().
validate_media_docs(Context, ?HTTP_GET) ->
load_media_summary(Context);
validate_media_docs(Context, ?HTTP_PUT) ->
validate_request('undefined', Context).
-spec validate_media_doc(cb_context:context(), kz_term:ne_binary(), http_method()) -> cb_context:context().
validate_media_doc(Context, MediaId, ?HTTP_GET) ->
case api_util:content_type_matches(cb_context:req_header(Context, <<"accept">>)
,acceptable_content_types()
)
of
'false' -> load_media_meta(Context, MediaId);
'true' -> validate_media_binary(Context, MediaId, ?HTTP_GET, [])
end;
validate_media_doc(Context, MediaId, ?HTTP_POST) ->
validate_media_doc_update(Context, MediaId, cb_context:req_header(Context, <<"content-type">>));
validate_media_doc(Context, MediaId, ?HTTP_DELETE) ->
load_media_meta(Context, MediaId).
-spec validate_media_doc_update(cb_context:context(), kz_term:ne_binary(), kz_term:api_ne_binary()) -> cb_context:context().
validate_media_doc_update(Context, MediaId, ContentType) ->
lager:debug("trying to update doc with content ~s", [ContentType]),
case api_util:content_type_matches(ContentType, acceptable_content_types()) of
'false' -> validate_request(MediaId, Context);
'true' -> validate_media_binary(Context, MediaId, ?HTTP_POST, cb_context:req_files(Context))
end.
-spec validate_media_binary(cb_context:context(), kz_term:ne_binary(), http_method(), kz_term:proplist()) -> cb_context:context().
validate_media_binary(Context, MediaId, ?HTTP_GET, _Files) ->
lager:debug("fetch media contents for '~s'", [MediaId]),
load_media_binary(Context, MediaId);
validate_media_binary(Context, _MediaId, ?HTTP_POST, []) ->
error_missing_file(Context);
validate_media_binary(Context, MediaId, ?HTTP_POST, [{_Filename, FileObj}]) ->
Context1 = load_media_meta(Context, MediaId),
lager:debug("loaded media meta for '~s'", [MediaId]),
case cb_context:resp_status(Context1) of
'success' ->
maybe_normalize_upload(Context1, MediaId, FileObj);
_Status -> Context1
end;
validate_media_binary(Context, _MediaId, ?HTTP_POST, _Files) ->
cb_context:add_validation_error(<<"file">>
,<<"maxItems">>
,kz_json:from_list([{<<"message">>, <<"Please provide a single media file">>}])
,Context
).
-spec error_missing_file(cb_context:context()) -> cb_context:context().
error_missing_file(Context) ->
cb_context:add_validation_error(<<"file">>
,<<"required">>
,kz_json:from_list([{<<"message">>, <<"Please provide an media file">>}])
,Context
).
-spec maybe_normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) -> cb_context:context().
maybe_normalize_upload(Context, MediaId, FileJObj) ->
case kapps_config:get_is_true(?MOD_CONFIG_CAT, <<"normalize_media">>, 'false') of
'true' ->
lager:debug("normalizing uploaded media"),
normalize_upload(Context, MediaId, FileJObj);
'false' ->
lager:debug("normalization not enabled, leaving upload as-is"),
validate_upload(Context, MediaId, FileJObj)
end.
-spec normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) ->
cb_context:context().
normalize_upload(Context, MediaId, FileJObj) ->
normalize_upload(Context, MediaId, FileJObj
,kz_json:get_ne_binary_value([<<"headers">>, <<"content_type">>], FileJObj)
).
-spec normalize_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object(), kz_term:api_binary()) ->
cb_context:context().
normalize_upload(Context, MediaId, FileJObj, UploadContentType) ->
FromExt = kz_mime:to_extension(UploadContentType),
ToExt = ?NORMALIZATION_FORMAT,
lager:info("upload is of type '~s', normalizing from ~s to ~s"
,[UploadContentType, FromExt, ToExt]
),
{UpdatedContext, UpdatedFileJObj}
= cb_modules_util:normalize_media_upload(Context, FromExt, ToExt, FileJObj, []),
validate_upload(UpdatedContext
,MediaId
,UpdatedFileJObj
).
-spec validate_upload(cb_context:context(), kz_term:ne_binary(), kz_json:object()) -> cb_context:context().
validate_upload(Context, MediaId, FileJObj) ->
CT = kz_json:get_value([<<"headers">>, <<"content_type">>], FileJObj, <<"application/octet-stream">>),
Size = kz_json:get_integer_value([<<"headers">>, <<"content_length">>]
,FileJObj
,iolist_size(kz_json:get_value(<<"contents">>, FileJObj, <<>>))
),
Props = [{<<"content_type">>, CT}
,{<<"content_length">>, Size}
,{<<"media_source">>, <<"upload">>}
],
validate_request(MediaId
,cb_context:set_req_data(Context
,kz_json:set_values(Props, cb_context:doc(Context))
)
).
-spec put(cb_context:context()) -> cb_context:context().
put(Context) ->
put_media(Context, cb_context:account_id(Context)).
-spec put_media(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
put_media(Context, 'undefined') ->
put_media(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), <<"ignore">>);
put_media(Context, _AccountId) ->
case is_tts(cb_context:doc(Context)) of
'true' -> create_update_tts(Context, <<"create">>);
'false' -> crossbar_doc:save(Context)
end.
-spec post(cb_context:context(), path_token()) -> cb_context:context().
post(Context, MediaId) ->
post_media_doc(Context, MediaId, cb_context:account_id(Context)).
-spec post_media_doc(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
post_media_doc(Context, MediaId, 'undefined') ->
post_media_doc(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), MediaId, <<"ignore">>);
post_media_doc(Context, MediaId, _AccountId) ->
case is_tts(cb_context:doc(Context)) of
'true' -> create_update_tts(Context, <<"update">>);
'false' -> post_media_doc_or_binary(remove_tts_keys(Context), MediaId, cb_context:req_header(Context, <<"content-type">>))
end.
-spec post_media_doc_or_binary(cb_context:context(), kz_term:ne_binary(), kz_term:api_ne_binary()) -> cb_context:context().
post_media_doc_or_binary(Context, MediaId, ContentType) ->
case api_util:content_type_matches(ContentType, acceptable_content_types()) of
'false' -> crossbar_doc:save(Context);
'true' -> post(Context, MediaId, ?BIN_DATA)
end.
-spec post(cb_context:context(), path_token(), path_token()) -> cb_context:context().
post(Context, MediaId, ?BIN_DATA) ->
post_media_binary(Context, MediaId, cb_context:account_id(Context)).
-spec remove_tts_keys(cb_context:context()) -> cb_context:context().
remove_tts_keys(Context) ->
cb_context:set_doc(Context
,kz_json:delete_keys([<<"pvt_previous_tts">>, <<"pvt_previous_voice">>]
,cb_context:doc(Context)
)).
-spec post_media_binary(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
post_media_binary(Context, MediaId, 'undefined') ->
post_media_binary(cb_context:set_db_name(Context, ?KZ_MEDIA_DB), MediaId, <<"ignore">>);
post_media_binary(Context, MediaId, _AccountId) ->
update_media_binary(Context, MediaId).
create_update_tts(Context, <<"create">>) ->
C1 = update_and_save_tts_doc(Context),
maybe_update_media_file(C1, <<"create">>, 'true', cb_context:resp_status(C1));
create_update_tts(Context, <<"update">>) ->
maybe_update_media_file(Context, <<"update">>, is_tts_changed(cb_context:doc(Context)), cb_context:resp_status(Context)).
-spec maybe_update_media_file(cb_context:context(), kz_term:ne_binary(), boolean(), crossbar_status()) ->
cb_context:context().
maybe_update_media_file(Context, CreateOrUpdate, 'true', 'success') ->
JObj = cb_context:doc(Context),
Text = kz_json:get_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_value([<<"tts">>, <<"voice">>], JObj, ?DEFAULT_VOICE),
try kazoo_tts:create(Text, Voice) of
{'error', Reason} ->
_ = maybe_delete_tts(Context, kz_term:to_binary(Reason), CreateOrUpdate),
crossbar_util:response('error', kz_term:to_binary(Reason), Context);
{'error', 'tts_provider_failure', Reason} ->
_ = maybe_delete_tts(Context, kz_term:to_binary(Reason), CreateOrUpdate);
{'ok', ContentType, Content} ->
MediaId = kz_doc:id(JObj),
Headers = kz_json:from_list([{<<"content_type">>, ContentType}
,{<<"content_length">>, iolist_size(Content)}
]),
FileJObj = kz_json:from_list([{<<"headers">>, Headers}
,{<<"contents">>, Content}
]),
FileName = list_to_binary(["text_to_speech_"
,kz_term:to_binary(kz_time:now_s())
,".wav"
]),
C1 = update_media_binary(cb_context:set_req_files(Context, [{FileName, FileJObj}]), MediaId),
case cb_context:resp_status(C1) =:= 'success'
andalso CreateOrUpdate
of
'false' -> maybe_delete_tts(C1, <<"creating TTS failed unexpectedly">>, CreateOrUpdate);
<<"create">> -> Context;
<<"update">> ->
C2 = crossbar_doc:load_merge(MediaId, kz_doc:public_fields(JObj), Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
case cb_context:resp_status(C2) of
'success' -> update_and_save_tts_doc(C2);
_ -> C2
end
end
catch
_E:_R ->
lager:debug("creating tts failed unexpectedly: ~s: ~p", [_E, _R]),
maybe_delete_tts(Context, <<"creating TTS failed unexpectedly">>, CreateOrUpdate)
end;
maybe_update_media_file(Context, <<"update">>, 'false', 'success') ->
crossbar_doc:save(Context);
maybe_update_media_file(Context, _, _, _) ->
Context.
-spec update_and_save_tts_doc(cb_context:context()) -> cb_context:context().
update_and_save_tts_doc(Context) ->
JObj = cb_context:doc(Context),
Text = kz_json:get_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_value([<<"tts">>, <<"voice">>], JObj, ?DEFAULT_VOICE),
Doc = kz_json:set_values([{<<"pvt_previous_tts">>, Text}
,{<<"pvt_previous_voice">>, Voice}
], JObj
),
crossbar_doc:save(cb_context:set_doc(Context, Doc)).
-spec maybe_delete_tts(cb_context:context(), kz_term:ne_binary(), kz_term:ne_binary()) -> cb_context:context().
maybe_delete_tts(Context, Reason, <<"create">>) ->
_ = crossbar_doc:delete(Context),
crossbar_util:response('error', Reason, Context);
maybe_delete_tts(Context, _, <<"update">>) ->
Context.
-spec delete_type(boolean() | cb_context:context()) -> ?HARD_DELETE | ?SOFT_DELETE.
delete_type('true') ->
?HARD_DELETE;
delete_type('false') ->
?SOFT_DELETE;
delete_type(Context) ->
Prompt = kzd_media:is_prompt(cb_context:resp_data(Context)),
Hard = kz_json:is_true(<<"hard_delete">>, cb_context:req_data(Context)),
delete_type(Prompt or Hard).
-spec delete(cb_context:context(), path_token()) -> cb_context:context().
delete(Context, _MediaId) ->
crossbar_doc:delete(Context, delete_type(Context)).
-spec delete(cb_context:context(), path_token(), path_token()) -> cb_context:context().
delete(Context, MediaId, ?BIN_DATA) ->
delete_media_binary(MediaId, Context, cb_context:account_id(Context)).
-spec load_media_summary(cb_context:context()) -> cb_context:context().
load_media_summary(Context) ->
load_media_summary(Context, cb_context:account_id(Context)).
-spec load_media_summary(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
load_media_summary(Context, 'undefined') ->
lager:debug("loading system_config media"),
Options = [{'databases', [?KZ_MEDIA_DB]}
,{'mapper', crossbar_view:get_value_fun()}
],
crossbar_view:load(Context, ?CB_LIST, Options);
load_media_summary(Context, _AccountId) ->
Options = [{'mapper', crossbar_view:get_value_fun()}
],
crossbar_view:load(Context, ?CB_LIST, Options).
-spec load_available_languages(cb_context:context()) -> cb_context:context().
load_available_languages(Context) ->
load_available_languages(Context, cb_context:account_id(Context)).
-spec load_available_languages(cb_context:context(), kz_term:api_binary()) -> cb_context:context().
load_available_languages(Context, 'undefined') ->
Options = [{'group_level', 1}
,{'databases', ?KZ_MEDIA_DB}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options);
load_available_languages(Context, _AccountId) ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options).
-spec normalize_count_results(kz_json:object(), kz_json:objects()) -> kz_json:objects().
normalize_count_results(JObj, []) ->
normalize_count_results(JObj, [kz_json:new()]);
normalize_count_results(JObj, [Acc]) ->
case kz_json:get_value(<<"key">>, JObj) of
['null'] ->
[kz_json:set_value(<<"missing">>, kz_json:get_integer_value(<<"value">>, JObj), Acc)];
[Lang] ->
[kz_json:set_value(Lang, kz_json:get_integer_value(<<"value">>, JObj), Acc)]
end.
-spec load_media_docs_by_language(cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
load_media_docs_by_language(Context, <<"missing">>) ->
lager:debug("loading media files missing a language"),
load_media_docs_by_language(Context, 'null', cb_context:account_id(Context));
load_media_docs_by_language(Context, Language) ->
lager:debug("loading media files in language ~p", [Language]),
load_media_docs_by_language(Context, Language, cb_context:account_id(Context)).
-spec load_media_docs_by_language(cb_context:context(), kz_term:ne_binary() | 'null', kz_term:api_binary()) ->
cb_context:context().
load_media_docs_by_language(Context, Language, 'undefined') ->
Options = [{'startkey', [Language]}
,{'endkey', [Language, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', crossbar_view:get_id_fun()}
,{'databases', [?KZ_MEDIA_DB]}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options);
load_media_docs_by_language(Context, Language, _AccountId) ->
Options = [{'startkey', [Language]}
,{'endkey', [Language, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', crossbar_view:get_id_fun()}
],
crossbar_view:load(Context, ?CB_LIST_BY_LANG, Options).
-spec load_available_prompts(cb_context:context()) ->
cb_context:context().
load_available_prompts(Context) ->
load_available_prompts(Context, cb_context:account_id(Context)).
-spec load_available_prompts(cb_context:context(), kz_term:api_binary()) ->
cb_context:context().
load_available_prompts(Context, 'undefined') ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
,{'databases', [?KZ_MEDIA_DB]}
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options);
load_available_prompts(Context, _AccountId) ->
Options = [{'group_level', 1}
,{'mapper', fun normalize_count_results/2}
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options).
-spec load_media_docs_by_prompt(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_media_docs_by_prompt(Context, PromptId) ->
lager:debug("loading media files in prompt ~p", [PromptId]),
load_media_docs_by_prompt(Context, PromptId, cb_context:account_id(Context)).
-spec load_media_docs_by_prompt(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) -> cb_context:context().
load_media_docs_by_prompt(Context, PromptId, 'undefined') ->
Options = [{'startkey', [PromptId]}
,{'endkey', [PromptId, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', fun normalize_prompt_results/2}
,{'databases', [?KZ_MEDIA_DB]}
,'include_docs'
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options);
load_media_docs_by_prompt(Context, PromptId, _AccountId) ->
Options = [{'startkey', [PromptId]}
,{'endkey', [PromptId, crossbar_view:high_value_key()]}
,{'reduce', 'false'}
,{'mapper', fun normalize_prompt_results/2}
,'include_docs'
],
crossbar_view:load(Context, ?CB_LIST_BY_PROMPT, Options).
-spec normalize_prompt_results(kz_json:object(), kz_term:ne_binaries()) -> kz_term:ne_binaries().
normalize_prompt_results(JObj, Acc) ->
HasAttachments =
case kz_doc:attachments(kz_json:get_value(<<"doc">>, JObj)) of
'undefined' -> 'false';
As -> not kz_json:is_empty(As)
end,
[kz_json:from_list(
[{<<"id">>, kz_doc:id(JObj)}
,{<<"has_attachments">>, HasAttachments}
])
| Acc
].
-spec load_media_meta(cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
load_media_meta(Context, MediaId) ->
load_media_meta(Context, MediaId, cb_context:account_id(Context)).
-spec load_media_meta(cb_context:context(), kz_term:ne_binary(), kz_term:api_binary()) ->
cb_context:context().
load_media_meta(Context, MediaId, 'undefined') ->
crossbar_doc:load(MediaId, cb_context:set_db_name(Context, ?KZ_MEDIA_DB), ?TYPE_CHECK_OPTION(kzd_media:type()));
load_media_meta(Context, MediaId, _AccountId) ->
crossbar_doc:load(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())).
-spec validate_request(kz_term:api_binary(), cb_context:context()) -> cb_context:context().
validate_request(MediaId, Context) ->
OnSuccess = fun(C) -> on_successful_validation(MediaId, C) end,
cb_context:validate_request_data(<<"media">>, Context, OnSuccess).
-spec on_successful_validation(kz_term:api_binary(), cb_context:context()) -> cb_context:context().
on_successful_validation('undefined', Context) ->
Doc = cb_context:doc(Context),
Props = [{<<"pvt_type">>, kzd_media:type()}
| maybe_add_prompt_fields(Context)
],
cb_context:set_doc(Context, kz_json:set_values(Props, Doc));
on_successful_validation(MediaId, Context) ->
Context1 = crossbar_doc:load_merge(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
maybe_validate_prompt(MediaId, Context1, cb_context:resp_status(Context1)).
-spec maybe_validate_prompt(kz_term:ne_binary(), cb_context:context(), crossbar_status()) ->
cb_context:context().
maybe_validate_prompt(MediaId, Context, 'success') ->
case kzd_media:prompt_id(cb_context:doc(Context)) of
'undefined' -> Context;
PromptId ->
validate_prompt(MediaId, Context, PromptId)
end;
maybe_validate_prompt(_MediaId, Context, _Status) ->
Context.
-spec validate_prompt(kz_term:ne_binary(), cb_context:context(), kz_term:ne_binary()) ->
cb_context:context().
validate_prompt(MediaId, Context, PromptId) ->
Language = kz_term:to_lower_binary(kzd_media:language(cb_context:doc(Context))),
case kz_media_util:prompt_id(PromptId, Language) of
MediaId -> Context;
_OtherId ->
lager:info("attempt to change prompt id '~s' is not allowed on existing media doc '~s'"
,[PromptId, MediaId]
),
cb_context:add_validation_error(<<"prompt_id">>
,<<"invalid">>
,kz_json:from_list(
[{<<"message">>, <<"Changing the prompt_id on an existing prompt is not allowed">>}
,{<<"cause">>, PromptId}
])
,Context
)
end.
-spec maybe_add_prompt_fields(cb_context:context()) -> kz_term:proplist().
maybe_add_prompt_fields(Context) ->
JObj = cb_context:doc(Context),
case kzd_media:prompt_id(JObj) of
'undefined' -> [];
PromptId ->
Language = kz_term:to_lower_binary(kzd_media:language(JObj, kz_media_util:default_prompt_language())),
ID = kz_media_util:prompt_id(PromptId, Language),
lager:debug("creating properties for prompt ~s (~s)", [PromptId, Language]),
[{<<"_id">>, ID}
,{<<"language">>, Language}
,{<<"name">>, kz_json:get_value(<<"name">>, JObj, ID)}
]
end.
-spec load_media_binary(cb_context:context(), path_token()) -> cb_context:context().
load_media_binary(Context, MediaId) ->
Context1 = load_media_meta(Context, MediaId),
case cb_context:resp_status(Context1) of
'success' ->
case kz_doc:attachment_names(cb_context:doc(Context1)) of
[] -> crossbar_util:response_bad_identifier(MediaId, Context);
[Attachment|_] ->
LoadedContext = crossbar_doc:load_attachment(cb_context:doc(Context1)
,Attachment
,?TYPE_CHECK_OPTION(kzd_media:type())
,Context1
),
cb_context:add_resp_headers(LoadedContext
,#{<<"content-disposition">> => <<"attachment; filename=", Attachment/binary>>
,<<"content-type">> => kz_doc:attachment_content_type(cb_context:doc(Context1), Attachment)
}
)
end;
_Status -> Context1
end.
-spec update_media_binary(cb_context:context(), path_token()) ->
cb_context:context().
update_media_binary(Context, MediaId) ->
update_media_binary(crossbar_util:maybe_remove_attachments(Context)
,MediaId
,cb_context:req_files(Context)
).
-spec update_media_binary(cb_context:context(), path_token(), req_files()) ->
cb_context:context().
update_media_binary(Context, _MediaId, []) -> Context;
update_media_binary(Context, MediaId, [{Filename, FileObj}|Files]) ->
Contents = kz_json:get_value(<<"contents">>, FileObj),
CT = kz_json:get_value([<<"headers">>, <<"content_type">>], FileObj),
lager:debug("file content type: ~s", [CT]),
Opts = [{'content_type', CT} | ?TYPE_CHECK_OPTION(kzd_media:type())],
AttachmentName = cb_modules_util:attachment_name(Filename, CT),
Context1 = crossbar_doc:save_attachment(MediaId
,AttachmentName
,Contents
,Context
,Opts
),
case cb_context:resp_status(Context1) of
'success' -> update_media_binary(Context1, MediaId, Files);
_Failure ->
lager:info("failed to save attachment ~s to ~s", [AttachmentName, MediaId]),
Context1
end.
-spec delete_media_binary(path_token(), cb_context:context(), kz_term:api_binary()) -> cb_context:context().
delete_media_binary(MediaId, Context, 'undefined') ->
delete_media_binary(MediaId, cb_context:set_db_name(Context, ?KZ_MEDIA_DB), <<"ignore">>);
delete_media_binary(MediaId, Context, _AccountId) ->
Context1 = crossbar_doc:load(MediaId, Context, ?TYPE_CHECK_OPTION(kzd_media:type())),
case cb_context:resp_status(Context1) of
'success' ->
case kz_doc:attachment_names(cb_context:doc(Context1)) of
[] -> crossbar_util:response_bad_identifier(MediaId, Context);
[AttachmentId|_] ->
crossbar_doc:delete_attachment(MediaId, AttachmentId, Context)
end;
_Status -> Context1
end.
-spec is_tts(kz_json:object()) -> boolean().
is_tts(JObj) ->
kz_json:get_ne_binary_value(<<"media_source">>, JObj) =:= <<"tts">>.
-spec is_tts_changed(kz_json:object()) -> boolean().
is_tts_changed(JObj) ->
Text = kz_json:get_ne_binary_value([<<"tts">>, <<"text">>], JObj),
Voice = kz_json:get_ne_binary_value([<<"tts">>, <<"voice">>], JObj),
PreText = kz_json:get_value(<<"pvt_previous_tts">>, JObj),
PrevVoice = kz_json:get_ne_binary_value(<<"pvt_previous_voice">>, JObj),
Text =/= PreText
orelse Voice =/= PrevVoice.
|
0da2bf9b54f91c573ce2b6416250f5e29a5c41d3f6aa8087ba2017444960ab69 | cjay/vulkyrie | Instance.hs | {-# LANGUAGE Strict #-}
module Vulkyrie.Vulkan.Instance
( createVulkanInstance
) where
import Data.Text (pack)
import qualified Data.Text as Text
import Foreign.C.String (peekCString)
import Graphics.Vulkan
import Graphics.Vulkan.Core_1_0
import Graphics.Vulkan.Marshal.Create
import Vulkyrie.Program
import Vulkyrie.Program.Foreign
import Vulkyrie.Resource
| Run an action with vulkan instance
createVulkanInstance :: String -- ^ application name
-> String -- ^ engine name
-> [CString]
-- ^ required extensions
passed as a list of , because they are
available either via vulkan - api pattern synonyms ,
or from GLFW
-> [String]
-- ^ required layer names
-> MetaResource VkInstance
createVulkanInstance progName engineName extensions layers =
metaResource destroyVulkanInstance $ do
extStrings <- liftIO $ mapM (fmap pack . peekCString) extensions
logDebug $ Text.unlines
$ "Enabling instance extensions: " : map (" " <>) extStrings
logDebug $ Text.unlines
$ "Enabling instance layers: " : map ((" " <>) . pack) layers
withVkPtr iCreateInfo $ \iciPtr ->
allocaPeek $ runVk . vkCreateInstance iciPtr VK_NULL
where
appInfo = createVk @VkApplicationInfo
$ set @"sType" VK_STRUCTURE_TYPE_APPLICATION_INFO
&* set @"pNext" VK_NULL
&* setStrRef @"pApplicationName" progName
&* set @"applicationVersion" (_VK_MAKE_VERSION 1 0 0)
&* setStrRef @"pEngineName" engineName
&* set @"engineVersion" (_VK_MAKE_VERSION 1 0 0)
&* set @"apiVersion" (_VK_MAKE_VERSION 1 0 68)
iCreateInfo = createVk @VkInstanceCreateInfo
$ set @"sType" VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
&* set @"pNext" VK_NULL
&* setVkRef @"pApplicationInfo" appInfo
&* set @"enabledLayerCount" (fromIntegral $ length layers)
&* setStrListRef @"ppEnabledLayerNames" layers
&* set @"enabledExtensionCount" (fromIntegral $ length extensions)
&* setListRef @"ppEnabledExtensionNames" extensions
destroyVulkanInstance :: VkInstance -> Prog r ()
destroyVulkanInstance vkInstance
= liftIO (vkDestroyInstance vkInstance VK_NULL) >> (logDebug "Destroyed vkInstance.")
| null | https://raw.githubusercontent.com/cjay/vulkyrie/7ec31181bd456c863da96743c216ff6610d3cd00/src/Vulkyrie/Vulkan/Instance.hs | haskell | # LANGUAGE Strict #
^ application name
^ engine name
^ required extensions
^ required layer names | module Vulkyrie.Vulkan.Instance
( createVulkanInstance
) where
import Data.Text (pack)
import qualified Data.Text as Text
import Foreign.C.String (peekCString)
import Graphics.Vulkan
import Graphics.Vulkan.Core_1_0
import Graphics.Vulkan.Marshal.Create
import Vulkyrie.Program
import Vulkyrie.Program.Foreign
import Vulkyrie.Resource
| Run an action with vulkan instance
-> [CString]
passed as a list of , because they are
available either via vulkan - api pattern synonyms ,
or from GLFW
-> [String]
-> MetaResource VkInstance
createVulkanInstance progName engineName extensions layers =
metaResource destroyVulkanInstance $ do
extStrings <- liftIO $ mapM (fmap pack . peekCString) extensions
logDebug $ Text.unlines
$ "Enabling instance extensions: " : map (" " <>) extStrings
logDebug $ Text.unlines
$ "Enabling instance layers: " : map ((" " <>) . pack) layers
withVkPtr iCreateInfo $ \iciPtr ->
allocaPeek $ runVk . vkCreateInstance iciPtr VK_NULL
where
appInfo = createVk @VkApplicationInfo
$ set @"sType" VK_STRUCTURE_TYPE_APPLICATION_INFO
&* set @"pNext" VK_NULL
&* setStrRef @"pApplicationName" progName
&* set @"applicationVersion" (_VK_MAKE_VERSION 1 0 0)
&* setStrRef @"pEngineName" engineName
&* set @"engineVersion" (_VK_MAKE_VERSION 1 0 0)
&* set @"apiVersion" (_VK_MAKE_VERSION 1 0 68)
iCreateInfo = createVk @VkInstanceCreateInfo
$ set @"sType" VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
&* set @"pNext" VK_NULL
&* setVkRef @"pApplicationInfo" appInfo
&* set @"enabledLayerCount" (fromIntegral $ length layers)
&* setStrListRef @"ppEnabledLayerNames" layers
&* set @"enabledExtensionCount" (fromIntegral $ length extensions)
&* setListRef @"ppEnabledExtensionNames" extensions
destroyVulkanInstance :: VkInstance -> Prog r ()
destroyVulkanInstance vkInstance
= liftIO (vkDestroyInstance vkInstance VK_NULL) >> (logDebug "Destroyed vkInstance.")
|
74d789f963fe7ede8d1648efeba7e861986edcf1238bb87d2ea1e4efa1b28ec1 | simonmichael/shelltestrunner | Print.hs | -- Print tests in any of the supported formats.
-- Useful for debugging and for migrating between formats.
-- Issues:
-- converting v1 -> v2/v3
a > > > = 0 often gets converted to a > > > 2 // or > 2 // , when > = or nothing would be preferred ( but semantically less accurate , therefore risky to choose automatically )
-- converting v3 -> v3
-- loses comments at the top of the file, even above an explicit < delimiter
-- may lose other data
module Print
where
import Safe (lastMay)
import Import
import Types
-- | Print a shell test. See CLI documentation for details.
-- For v3 (the preferred, lightweight format), avoid printing most unnecessary things
-- (stdout delimiter, 0 exit status value).
printShellTest
:: String -- ^ Shelltest format. Value of option @--print[=FORMAT]@.
-> ShellTest -- ^ Test to print
-> IO ()
printShellTest format ShellTest{command=c,stdin=i,comments=comments,trailingComments=trailingComments,
stdoutExpected=o_expected,stderrExpected=e_expected,exitCodeExpected=x_expected}
= do
case format of
"v1" -> do
printComments comments
printCommand "" c
printStdin "<<<" i
printStdouterr False ">>>" o_expected
printStdouterr False ">>>2" e_expected
printExitStatus True True ">>>=" x_expected
printComments trailingComments
"v2" -> do
printComments comments
printStdin "<<<" i
printCommand "$$$ " c
printStdouterr True ">>>" o_expected
printStdouterr True ">>>2" e_expected
printExitStatus trailingblanklines True ">>>=" x_expected
printComments trailingComments
"v3" -> do
printComments comments
printStdin "<" i
printCommand "$ " c
printStdouterr True ">" o_expected
printStdouterr True ">2" e_expected
printExitStatus trailingblanklines False ">=" x_expected
printComments trailingComments
_ -> fail $ "Unsupported --print format: " ++ format
where
trailingblanklines = case (o_expected, e_expected) of
(Just (Lines _ o), Just (Lines _ e)) -> hasblanks $ if null e then o else e
_ -> False
where hasblanks s = maybe False null $ lastMay $ lines s
printComments :: [String] -> IO ()
printComments = mapM_ putStrLn
printStdin :: String -> Maybe String -> IO ()
printStdin _ (Just "") = return ()
printStdin _ Nothing = return ()
printStdin prefix (Just s) = printf "%s\n%s" prefix s
printCommand :: String -> TestCommand -> IO ()
printCommand prefix (ReplaceableCommand s) = printf "%s%s\n" prefix s
printCommand prefix (FixedCommand s) = printf "%s %s\n" prefix s
-- Print an expected stdout or stderr test, prefixed with the given delimiter.
If no expected value is specified , print nothing if first argument is true
( for format 1 , which ignores unspecified out / err ) , otherwise print a dummy test .
printStdouterr :: Bool -> String -> Maybe Matcher -> IO ()
printStdouterr alwaystest prefix Nothing = when alwaystest $ printf "%s //\n" prefix
printStdouterr _ _ (Just (Lines _ "")) = return ()
printStdouterr _ _ (Just (Numeric _)) = fail "FATAL: Cannot handle Matcher (Numeric) for stdout/stderr."
printStdouterr _ _ (Just (NegativeNumeric _)) = fail "FATAL: Cannot handle Matcher (NegativeNumeric) for stdout/stderr."
printStdouterr _ prefix (Just (Lines _ s)) | prefix==">" = printf "%s" s -- omit v3's > delimiter, really no need for it
printStdouterr _ prefix (Just (Lines _ s)) = printf "%s\n%s" prefix s
printStdouterr _ prefix (Just regex) = printf "%s %s\n" prefix (show regex)
-- | Print an expected exit status clause, prefixed with the given delimiter.
If zero is expected :
if the first argument is not true , nothing will be printed ;
otherwise if the second argument is not true , only the delimiter will be printed .
printExitStatus :: Bool -> Bool -> String -> Matcher -> IO ()
printExitStatus _ _ _ (Lines _ _) = fail "FATAL: Cannot handle Matcher (Lines) for exit status."
printExitStatus always showzero prefix (Numeric "0") = when always $ printf "%s %s\n" prefix (if showzero then "0" else "")
printExitStatus _ _ prefix s = printf "%s %s\n" prefix (show s)
| null | https://raw.githubusercontent.com/simonmichael/shelltestrunner/10084f69dc0e39588535dce9c39211cc198414b3/src/Print.hs | haskell | Print tests in any of the supported formats.
Useful for debugging and for migrating between formats.
Issues:
converting v1 -> v2/v3
converting v3 -> v3
loses comments at the top of the file, even above an explicit < delimiter
may lose other data
| Print a shell test. See CLI documentation for details.
For v3 (the preferred, lightweight format), avoid printing most unnecessary things
(stdout delimiter, 0 exit status value).
^ Shelltest format. Value of option @--print[=FORMAT]@.
^ Test to print
Print an expected stdout or stderr test, prefixed with the given delimiter.
omit v3's > delimiter, really no need for it
| Print an expected exit status clause, prefixed with the given delimiter. | a > > > = 0 often gets converted to a > > > 2 // or > 2 // , when > = or nothing would be preferred ( but semantically less accurate , therefore risky to choose automatically )
module Print
where
import Safe (lastMay)
import Import
import Types
printShellTest
-> IO ()
printShellTest format ShellTest{command=c,stdin=i,comments=comments,trailingComments=trailingComments,
stdoutExpected=o_expected,stderrExpected=e_expected,exitCodeExpected=x_expected}
= do
case format of
"v1" -> do
printComments comments
printCommand "" c
printStdin "<<<" i
printStdouterr False ">>>" o_expected
printStdouterr False ">>>2" e_expected
printExitStatus True True ">>>=" x_expected
printComments trailingComments
"v2" -> do
printComments comments
printStdin "<<<" i
printCommand "$$$ " c
printStdouterr True ">>>" o_expected
printStdouterr True ">>>2" e_expected
printExitStatus trailingblanklines True ">>>=" x_expected
printComments trailingComments
"v3" -> do
printComments comments
printStdin "<" i
printCommand "$ " c
printStdouterr True ">" o_expected
printStdouterr True ">2" e_expected
printExitStatus trailingblanklines False ">=" x_expected
printComments trailingComments
_ -> fail $ "Unsupported --print format: " ++ format
where
trailingblanklines = case (o_expected, e_expected) of
(Just (Lines _ o), Just (Lines _ e)) -> hasblanks $ if null e then o else e
_ -> False
where hasblanks s = maybe False null $ lastMay $ lines s
printComments :: [String] -> IO ()
printComments = mapM_ putStrLn
printStdin :: String -> Maybe String -> IO ()
printStdin _ (Just "") = return ()
printStdin _ Nothing = return ()
printStdin prefix (Just s) = printf "%s\n%s" prefix s
printCommand :: String -> TestCommand -> IO ()
printCommand prefix (ReplaceableCommand s) = printf "%s%s\n" prefix s
printCommand prefix (FixedCommand s) = printf "%s %s\n" prefix s
If no expected value is specified , print nothing if first argument is true
( for format 1 , which ignores unspecified out / err ) , otherwise print a dummy test .
printStdouterr :: Bool -> String -> Maybe Matcher -> IO ()
printStdouterr alwaystest prefix Nothing = when alwaystest $ printf "%s //\n" prefix
printStdouterr _ _ (Just (Lines _ "")) = return ()
printStdouterr _ _ (Just (Numeric _)) = fail "FATAL: Cannot handle Matcher (Numeric) for stdout/stderr."
printStdouterr _ _ (Just (NegativeNumeric _)) = fail "FATAL: Cannot handle Matcher (NegativeNumeric) for stdout/stderr."
printStdouterr _ prefix (Just (Lines _ s)) = printf "%s\n%s" prefix s
printStdouterr _ prefix (Just regex) = printf "%s %s\n" prefix (show regex)
If zero is expected :
if the first argument is not true , nothing will be printed ;
otherwise if the second argument is not true , only the delimiter will be printed .
printExitStatus :: Bool -> Bool -> String -> Matcher -> IO ()
printExitStatus _ _ _ (Lines _ _) = fail "FATAL: Cannot handle Matcher (Lines) for exit status."
printExitStatus always showzero prefix (Numeric "0") = when always $ printf "%s %s\n" prefix (if showzero then "0" else "")
printExitStatus _ _ prefix s = printf "%s %s\n" prefix (show s)
|
ed6e1ef8bc2a6f2fb5d5322054121f5db647585151839cd474038bc1d48864fd | jyh/metaprl | top_conversionals.ml | doc <:doc<
@spelling{th}
@module[Top_conversionals]
@emph{Conversions} and @emph{conversionals} are analogs of tactics and tacticals
(Section~@refmodule[Top_tacticals])
for rewriting. Conversions are used extensively in Computational Type Theory
(Section @refmodule[Itt_theory]) to express and
apply computational equivalences. The @tt{Top_conversionals}
module defines the basic conversionals provided by the @MetaPRL
prover.
Each @bf{rewrite} definition in a module defines a conversion.
For example, the definition of beta reduction in the Type
Theory (Section @refmodule[Itt_dfun]), is defined as follows:
@begin[center]
@bf{rewrite} unfold_beta : $(@lambda x. b[x])@space a @longleftrightarrow b[a]$
@end[center]
This declaration defines a conversion called @tt[unfold_beta] that can
be applied with the function @tt[rwh], which searches for the outermost
valid applications of the rewrite. Here is an example proof step:
$$
@rulebox{rwh; @tt[unfold_beta]@space 0;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< 2 + 1 = 3 @in @int>> }>> }
$$
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1998-2006 MetaPRL Group, Cornell University and
California Institute of Technology
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey @email{}
Modified by: Aleksey Nogin @email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Perv
extends Mptop
doc docoff
open Lm_debug
open Lm_printf
open Rewrite_sig
open Refiner.Refiner
open Refiner.Refiner.TermType
open Refiner.Refiner.Term
open Refiner.Refiner.TermAddr
open Refiner.Refiner.TermMan
open Refiner.Refiner.RefineError
open Refiner.Refiner.Rewrite
open Term_match_table
open Tactic_type.Tacticals
open Tactic_type.Tactic
open Tactic_type.Conversionals
open Tactic_type.Sequent
open Options_boot
open Top_options
(*
* Debugging.
*)
let _debug_conv =
create_debug (**)
{ debug_name = "conv";
debug_description = "display conversion operation";
debug_value = false
}
let debug_reduce =
create_debug (**)
{ debug_name = "reduce";
debug_description = "display reductions";
debug_value = false
}
doc <:doc<
@modsection{Conversion application}
@begin[description]
@item{@conv[rw];
Conversions are not tactics: they have a different type @tt[conv]
and they are applied differently. The basic method for applying
a conversion is to use @tt[rw], which converts a conversion to
a tactic applied to a specific clause in a sequent (these functions
are defined only for a sequent calculus). The (@tt[rw] @it[conv] $i$)
tactic applies the conversion @it[conv] to clause $i$ in
the current goal sequent.}
@item{@conv[rwc];
Conversions may be applied also to assumptions.
The (@tt[rwc] @it[conv] $a$ $c$) tactic applies the
conversion @it[conv] to the $c$-th clause in the $a$-th assumption.}
@item{@conv[rwAll] @conv[rwcAll] @conv[rwAllAll];
The (@tt[rwAll] @it[conv]) tactic applies the
conversion @it[conv] to the whole goal sequent.
The (@tt[rwcAll] @it[conv] $a$) tactic applies the
conversion @it[conv] to the whole $a$-th assumption.
The (@tt[rwAllAll] @it[conv]) tactic applies the
conversion @it[conv] to all assumptions and to the goal sequent.}
@end[description]
@docoff
>>
let rw = Tactic_type.Conversionals.rw
let rwc = Tactic_type.Conversionals.rwc
let rwAll = Tactic_type.Conversionals.rwAll
let rwcAll = Tactic_type.Conversionals.rwcAll
let rwAllAll = Tactic_type.Conversionals.rwAllAll
let rwh = Tactic_type.Conversionals.rwh
let rwch = Tactic_type.Conversionals.rwch
let rwhAll = Tactic_type.Conversionals.rwhAll
let rwchAll = Tactic_type.Conversionals.rwchAll
let rwhAllAll = Tactic_type.Conversionals.rwhAllAll
let rwa = Tactic_type.Conversionals.rwa
let rwca = Tactic_type.Conversionals.rwca
let rwaAll = Tactic_type.Conversionals.rwaAll
let rwcaAll = Tactic_type.Conversionals.rwcaAll
let rwaAllAll = Tactic_type.Conversionals.rwaAllAll
doc <:doc<
@modsection{Primitive conversions}
@begin[description]
@item{@conv[idC], @conv[failC];
The @tt[idC] conversion is the identity conversion: no rewriting
is performed. The @tt[failC] conversion always fails.}
@end[description]
@docoff
>>
let idC = Tactic_type.Conversionals.idC
let failC = Tactic_type.Conversionals.failC
let failWithC = Tactic_type.Conversionals.failWithC
let forceC = Tactic_type.Conversionals.forceC
doc <:doc<
@modsection{Conversionals}
@begin[description]
@item{@conv[thenC], @conv[orelseC];
Conversionals can be combined in the same manner as tactics.
The (@tt{$c_1$ thenC $c_2$}) conversion first applies conversion
$c_1$, and then applies $c_2$ to the result term. The (@tt{$c_1$ orelseC $c_2$})
conversion first applies $c_1$@; if $c_1$ fails (because the conversion does not
match the term being rewritten, or because of a call to @tt[failC]), $c_2$ is
applied instead.}
@item{@conv[tryC], @conv[firstC];
There are several variations on @tt[orelseC]. The (@tt[tryC] $c$) conversion
is equivalent to (@tt{$c$ orelseC idC}). The @tt[firstC] conversion takes a list of
conversions to try in order until the first one succeeds. The conversion (@tt[firstC]
$[c_1; @cdots; c_n]$) is equivalent to @tt{$c_1$ orelseC $@cdots$ orelseC $c_n$}.}
@item{@conv[progressC];
The (@tt[progressTC] $@i[conv]$) conversion applies its argument and fails
if either $@i[conv]$ fails, or $@i[conv]$ convert the term to the alpha-equal term.}
@item{@conv[untilFailC];
The (@tt[untilFailC] $c$) conversion applies conversion $c$ repeatedly
until it fails. It catches all exception and never fails itself.}
@item{@conv[repeatC];
The (@tt[repeatC] $c$) conversion applies conversion $c$ repeatedly
until it fails, or until it fails to make progress.}
@item{@conv[ifEqualC];
The (@tt[ifEqualC] $t$ $c1$ $c2$) conversion applies conversion $c1$ if the term its applied to is alpha equal to $t$ and
$c2$ otherwise.}
@item{@conv[replaceUsingC];
The (@tt[replaceUsingC] $t$ $c$) conversion applies conversion $c$ to the term $t$ and fails on any other term.}
@end[description]
@docoff
>>
let prefix_thenC = Tactic_type.Conversionals.prefix_thenC
let prefix_orelseC = Tactic_type.Conversionals.prefix_orelseC
let tryC = Tactic_type.Conversionals.tryC
let firstC = Tactic_type.Conversionals.firstC
let untilFailC = Tactic_type.Conversionals.untilFailC
let repeatC = Tactic_type.Conversionals.repeatC
let repeatForC = Tactic_type.Conversionals.repeatForC
let ifEqualC = Tactic_type.Conversionals.ifEqualC
let progressC = Tactic_type.Conversionals.progressC
let replaceUsingC = Tactic_type.Conversionals.replaceUsingC
let allSubThenC = Tactic_type.Conversionals.allSubThenC
let prefix_thenTC = Tactic_type.Conversionals.prefix_thenTC
infix thenC
infix orelseC
infix thenTC
doc <:doc<
@modsection{Addressing and search}
Generally, the terms to be rewritten do not occur at the outermost
level of a clause. The following conversionals recursively search
through the subterms of a clause for applicable rewrites.
@begin[description]
@item{@conv[someSubC], @conv[allSubC];
The most general of these is the (@tt[someSubC] $c$) conversion,
which tries applying conversion $c$ to all of the immediate subterms of
the clause. It succeeds if $c$ succeeds on any of the subterms@; it
fails otherwise. The conversion @tt[allSubC] requires success on
@emph{all} of the immediate subterms.}
@item{@conv[allSubThenC];
@tt[allSubThenC] $c1$ $c2$ tries to apply $c1$ to every immediate subterm.
If it succeed in at least one case then applies $c2$, otherwise fails .}
@item{@conv[addrC];
Subterms can also be addressed explicitly with the (@tt{addrC @it[addr] $c$})
conversion. The address is an integer list
that describes the @emph{path} leading to the term to be rewritten. For
example, the address $[ ]$ is the identity address, $[0]$ is its leftmost
subterm, $[0; 1]$ is the second subterm of the first subterm, @i[etc].
However addresses are somewhat fragile, and correct addresses can be difficult
to discover. For this reason, the use of @tt[addrC] is discouraged.}
@item{@conv[higherC];
The (@tt[higherC] $c$) conversion searches for the outermost
occurrences of subterms in the clause where conversion $c$
applies. Its definition uses @tt[someSubC].
@begin[center]
@code{let rec higherC c = c orelseC (someSubC (higherC c))}
@end[center]}
@item{@conv[lowerC], @conv[sweepDnC];
The @tt[lowerC] conversional searches for the @emph{innermost}
rewrite occurrences. The (@tt[sweepDnC] $c$) conversion applies
$c$ from the outermost to the innermost subterms.
@begin[center]
@code{let rec sweepDnC c = (tryC c) andalsoC (someSubC (sweepDnC c))}
@end[center]}
@item{@conv[sweepUpC];
The @tt[sweepUpC] conversion works from the innermost to outermost subterms.
Note that these conversions never fail@; however they may fail to
make progress if the conversion $c$ never succeeds.}
@item{@conv[findThenC];
The @tt[findThenC] conversion find the outermost term that matches a predicate
and applies a conversion at that point.}
@item{@conv[applyAllC];
The @tt[applyAllC] conversion takes a list of conversions
and applies them to all subterms possible from outermost to
innermost (it applies at most one conversion from the list at most once
to each subterm).
@begin[center]
@code{let applyAllC convs = sweepUpC (firstC convs)}
@end[center]}
@item{@conv[rwh], @conv[rwch], @conv[rwhAll], @conv[rwchAll], @conv[rwhAllAll];
For convenience, the @tt[rwh], @tt[rwch], @tt[rwhAll], @tt[rwchAll],
@tt[rwhAllAll] functions automatically
apply the @tt[higherC] conversion. For example, the tactic (@tt{rwh $conv$ $i$})
is equivalent to (@tt{rw (higherC $conv$) $i$}).}
@item{@conv[rwa], @conv[rwca], @conv[rwaAll], @conv[rwcaAll], @conv[rwaAllAll];
The @tt[rwa], @tt[rwca], @tt[rwaAll], @tt[rwcaAll],
@tt[rwaAllAll] functions take a list of conversions and
apply the @tt[applyAllC] conversion. For example, the tactic (@tt{rwa $convs$ $i$})
is equivalent to (@tt{rw (applyAllC $convs$) $i$}).}
@end[description]
@docoff
>>
let someSubC = Tactic_type.Conversionals.someSubC
let allSubC = Tactic_type.Conversionals.allSubC
let higherC = Tactic_type.Conversionals.higherC
let lowerC = Tactic_type.Conversionals.lowerC
let sweepUpC = Tactic_type.Conversionals.sweepUpC
let sweepDnC = Tactic_type.Conversionals.sweepDnC
let applyAllC = Tactic_type.Conversionals.applyAllC
let findThenC = Tactic_type.Conversionals.findThenC
doc <:doc<
@modsection{Conversion reversal}
Computational rewrites define a congruence, and all equivalence relations
in the congruence closure hold, including reversing the application of
the rewrite. However, reversed rewrites are often incompletely specified.
@begin[description]
@item{@conv[foldC], @conv[cutC];
The (@tt[foldC] $t$ $c$) takes a term $t$ and a conversion that
rewrites the term in the @emph{forward} direction, and generates
reversed conversion. For example, here is a reverse application of
the beta rewrite.
$$
@rulebox{rwh; (@tt[foldC]@space (@lambda v. v + 1)@space 2@space @tt[unfold_beta])@space 0;
<<sequent [dummy_arg] { <H> >- <:doc<2 + 1 = 3 @in @int>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>> }>>}
$$
@noindent
The @tt[cutC] conversion is used to replace a term and generate a
rewrite obligation.
$$
@rulebox{rw; (@tt[addrC]@space{} [1]@space (@tt[cutC]@space 3))@space 0;
<<sequent [dummy_arg] { <H> >- <:doc< 3 = 3 @in @int>> }>> @cr
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) @longleftrightarrow 3>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>>}>>}
$$}
@end[description]
@docoff
>>
let addrC = Tactic_type.Conversionals.addrC
let foldC = Tactic_type.Conversionals.foldC
let makeFoldC = Tactic_type.Conversionals.makeFoldC
let cutC = Tactic_type.Conversionals.cutC
(************************************************************************
* REDUCTION RESOURCE *
************************************************************************)
doc <:doc<
@resources
@bf{The @Comment!resource[reduce] resource}
The @tt{reduce} resource provides a generic method for
defining @emph{evaluation}. The @conv[reduceTopC] conversion
can be used to apply this evaluator.
The @conv[reduceC] conversion repeatedly applies @tt[reduceTopC] to any subterm.
The @tactic[reduceT] tactic applies @tt[reduceC] to the goal sequent.
For example, the @Nuprl type theory describes several
generic reductions:
@begin[description]
@item{beta; $(@lambda v. b[v])@space a @longleftrightarrow b[a]$}
@item{pair; $(@bf{match}@space (a, b)@space @bf{with}@space u, v @rightarrow c[u, v]) @longleftrightarrow c[a, b]$}
@item{union; $(@bf{match}@space @i[inl](a)@space @bf{with}@space
@i[inl](u) @rightarrow b[u]
| @i[inr](v) @rightarrow c[v]) @longleftrightarrow b[a]$}
@end[description]
Each of the modules for functions (Section @refmodule[Itt_dfun]),
tuples (Section @refmodule[Itt_dprod]), and union (Section @refmodule[Itt_union]),
defines an addition to the @hrefresource[reduce] resource: the @hrefmodule[Itt_dfun] adds
the @hrefrewrite[reduce_beta] rewrite with redex $(@lambda v. b[v])@space a$@; the
@hrefmodule[Itt_dprod] adds the @hrefrewrite[reduceSpread] rewrite with redex
$(@bf{match}@space (a, b)@space @bf{with}@space u, v @rightarrow c[u, v])$@; and the
@hrefmodule[Itt_union] adds the @hrefrewrite[reduceDecideInl] rewrite with
redex $(@bf{match}@space @i[inl](a)@space @bf{with}@space
@i[inl](u) @rightarrow b[u]
| @i[inr](v) @rightarrow c[v])$.
In modules that @tt{extends} these three theories, the @tt[reduceC]
conversion will recursively search for applications of these three
rewrites in an attempt to fully reduce the term.
The implementation of the @tt[reduce] resource and the @tt[reduceC]
conversion relies on tables to store the shape of redices, together with the
conversions for the reduction.
@docoff
>>
type reduce_conv = conv * (option_table -> conv)
type reduce_info = rule_labels * conv
type reduce_entry = term * reduce_info
(* unused
let opnames_of_terms options =
List.fold_left (fun options t -> OpnameSet.add options (opname_of_term t)) OpnameSet.empty options
*)
let wrap_reduce ?labels conv =
rule_labels_of_opt_terms labels, conv
let wrap_reduce_crw ?labels conv =
let labels =
match labels with
None -> Perv.crw_labels
| Some labels -> select_crw :: labels
in
rule_labels_of_opt_terms (Some labels), conv
let extract_data =
let rec mapsnd recrw = function
[] -> recrw
| (_, h) :: tl -> h orelseC (mapsnd recrw tl)
in
let select_option options (opts, _) =
rule_labels_are_allowed options opts
in
let rw tbl =
funC (fun e -> (**)
let t = env_term e in
let p = env_arg e in
let options = get_options p in
(* Find and apply the right tactic *)
if !debug_reduce then
eprintf "Conversionals: lookup %a%t" debug_print t eflush;
match Term_match_table.lookup_bucket tbl (select_option options) t with
Some convs ->
if !debug_reduce then
eprintf "Conversionals: applying %a%t" debug_print t eflush;
firstC (List.map snd convs)
| None ->
raise (RefineError ("Conversionals.extract_data", StringTermError ("no reduction for", t))))
in
let hrw tbl options =
let rec hrw t =
let recrw = allSubC (termC hrw) in
(* Find and apply the right tactic *)
if !debug_reduce then
eprintf "Conversionals: lookup %a%t" debug_print t eflush;
match Term_match_table.lookup_bucket tbl (select_option options) t with
Some convs ->
if !debug_reduce then
eprintf "Conversionals: applying %a%t" debug_print t eflush;
mapsnd recrw convs
| None ->
recrw
in termC hrw
in
(fun tbl -> rw tbl, hrw tbl)
(*
* Resource.
*)
let resource (reduce_entry, reduce_conv) reduce =
table_resource_info extract_data
let reduceTopC_env e =
fst (get_resource_arg (env_arg e) get_reduce_resource)
let reduceTopC = funC reduceTopC_env
let reduceC =
funC (fun e ->
let p = env_arg e in
repeatC (snd (get_resource_arg p get_reduce_resource) (get_options p)))
let reduceT = funT (fun p ->
let reduceHigherC = snd (get_resource_arg p get_reduce_resource) (get_options p) in
rwAll (repeatC reduceHigherC))
let reduceHypsT = funT (fun p ->
let reduceHigherC = snd (get_resource_arg p get_reduce_resource) (get_options p) in
onAllMHypsT (rw (repeatC reduceHigherC)))
let simpleReduceTopC = withOptionInfoC Perv.select_crw OptionExclude reduceTopC
let simpleReduceC = withOptionInfoC Perv.select_crw OptionExclude reduceC
let simpleReduceT = withOptionInfoT Perv.select_crw OptionExclude reduceT
let rec wrap_addrs conv = function
[] -> conv
| addr :: addrs -> addrLiteralC addr reduceC thenC wrap_addrs conv addrs
let cound_vars tbl t =
if is_so_var_term t then
let v, _, _ = dest_so_var t in
if Hashtbl.mem tbl v then
Hashtbl.replace tbl v ((Hashtbl.find tbl v) + 1)
else
Hashtbl.add tbl v 1
let find_conds tbl t _ =
is_so_var_term t &&
let v, _, _ = dest_so_var t in
Hashtbl.mem tbl v && ((Hashtbl.find tbl v) > 1)
let process_reduce_resource_rw_annotation ?labels name redex contractum assums addrs args loc rw =
let conv = rewrite_of_pre_rewrite rw empty_rw_args [] in
(*
* If the contractum is potentially an instance of the redex,
* add progressC to make sure we don't stop too early.
*)
let conv =
let instanceof =
try
let redex = compile_redex Strict addrs redex in
test_redex_applicability redex empty_rw_args contractum [];
true
with
RefineError _
| Not_found ->
false
in
if instanceof then begin
if !debug_reduce then
eprintf "%s: contractum is an instance of the redex@." name;
progressC conv
end
else
conv
in
match addrs, args with
{ spec_ints = [||]; spec_addrs = [||] }, [] ->
(*
* Before executing conv, run recursive reduceC on all the subterms
* that will be copied more than once to reduce the possibility
* for an exponential blow-up.
*)
let vars = Hashtbl.create 19 in
let () = List.iter (TermOp.iter_down (cound_vars vars)) (contractum :: assums) in
let addrs = find_subterm redex (find_conds vars) in
let labels = rule_labels_of_opt_terms labels in
[redex, (labels, wrap_addrs conv addrs)]
| _ ->
raise (Invalid_argument ((Simple_print.string_of_loc loc) ^ ": reduce resource annotation:
rewrite " ^ name ^": rewrites that take arguments are not supported"))
(*
* Debugging.
*)
let apply_rewrite p t =
get_resource_arg p Tactic_type.Conversionals.apply_rewrite t
(*
* -*-
* Local Variables:
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/support/tactics/top_conversionals.ml | ocaml |
* Debugging.
***********************************************************************
* REDUCTION RESOURCE *
***********************************************************************
unused
let opnames_of_terms options =
List.fold_left (fun options t -> OpnameSet.add options (opname_of_term t)) OpnameSet.empty options
Find and apply the right tactic
Find and apply the right tactic
* Resource.
* If the contractum is potentially an instance of the redex,
* add progressC to make sure we don't stop too early.
* Before executing conv, run recursive reduceC on all the subterms
* that will be copied more than once to reduce the possibility
* for an exponential blow-up.
* Debugging.
* -*-
* Local Variables:
* End:
* -*-
| doc <:doc<
@spelling{th}
@module[Top_conversionals]
@emph{Conversions} and @emph{conversionals} are analogs of tactics and tacticals
(Section~@refmodule[Top_tacticals])
for rewriting. Conversions are used extensively in Computational Type Theory
(Section @refmodule[Itt_theory]) to express and
apply computational equivalences. The @tt{Top_conversionals}
module defines the basic conversionals provided by the @MetaPRL
prover.
Each @bf{rewrite} definition in a module defines a conversion.
For example, the definition of beta reduction in the Type
Theory (Section @refmodule[Itt_dfun]), is defined as follows:
@begin[center]
@bf{rewrite} unfold_beta : $(@lambda x. b[x])@space a @longleftrightarrow b[a]$
@end[center]
This declaration defines a conversion called @tt[unfold_beta] that can
be applied with the function @tt[rwh], which searches for the outermost
valid applications of the rewrite. Here is an example proof step:
$$
@rulebox{rwh; @tt[unfold_beta]@space 0;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< 2 + 1 = 3 @in @int>> }>> }
$$
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1998-2006 MetaPRL Group, Cornell University and
California Institute of Technology
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey @email{}
Modified by: Aleksey Nogin @email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Perv
extends Mptop
doc docoff
open Lm_debug
open Lm_printf
open Rewrite_sig
open Refiner.Refiner
open Refiner.Refiner.TermType
open Refiner.Refiner.Term
open Refiner.Refiner.TermAddr
open Refiner.Refiner.TermMan
open Refiner.Refiner.RefineError
open Refiner.Refiner.Rewrite
open Term_match_table
open Tactic_type.Tacticals
open Tactic_type.Tactic
open Tactic_type.Conversionals
open Tactic_type.Sequent
open Options_boot
open Top_options
let _debug_conv =
{ debug_name = "conv";
debug_description = "display conversion operation";
debug_value = false
}
let debug_reduce =
{ debug_name = "reduce";
debug_description = "display reductions";
debug_value = false
}
doc <:doc<
@modsection{Conversion application}
@begin[description]
@item{@conv[rw];
Conversions are not tactics: they have a different type @tt[conv]
and they are applied differently. The basic method for applying
a conversion is to use @tt[rw], which converts a conversion to
a tactic applied to a specific clause in a sequent (these functions
are defined only for a sequent calculus). The (@tt[rw] @it[conv] $i$)
tactic applies the conversion @it[conv] to clause $i$ in
the current goal sequent.}
@item{@conv[rwc];
Conversions may be applied also to assumptions.
The (@tt[rwc] @it[conv] $a$ $c$) tactic applies the
conversion @it[conv] to the $c$-th clause in the $a$-th assumption.}
@item{@conv[rwAll] @conv[rwcAll] @conv[rwAllAll];
The (@tt[rwAll] @it[conv]) tactic applies the
conversion @it[conv] to the whole goal sequent.
The (@tt[rwcAll] @it[conv] $a$) tactic applies the
conversion @it[conv] to the whole $a$-th assumption.
The (@tt[rwAllAll] @it[conv]) tactic applies the
conversion @it[conv] to all assumptions and to the goal sequent.}
@end[description]
@docoff
>>
let rw = Tactic_type.Conversionals.rw
let rwc = Tactic_type.Conversionals.rwc
let rwAll = Tactic_type.Conversionals.rwAll
let rwcAll = Tactic_type.Conversionals.rwcAll
let rwAllAll = Tactic_type.Conversionals.rwAllAll
let rwh = Tactic_type.Conversionals.rwh
let rwch = Tactic_type.Conversionals.rwch
let rwhAll = Tactic_type.Conversionals.rwhAll
let rwchAll = Tactic_type.Conversionals.rwchAll
let rwhAllAll = Tactic_type.Conversionals.rwhAllAll
let rwa = Tactic_type.Conversionals.rwa
let rwca = Tactic_type.Conversionals.rwca
let rwaAll = Tactic_type.Conversionals.rwaAll
let rwcaAll = Tactic_type.Conversionals.rwcaAll
let rwaAllAll = Tactic_type.Conversionals.rwaAllAll
doc <:doc<
@modsection{Primitive conversions}
@begin[description]
@item{@conv[idC], @conv[failC];
The @tt[idC] conversion is the identity conversion: no rewriting
is performed. The @tt[failC] conversion always fails.}
@end[description]
@docoff
>>
let idC = Tactic_type.Conversionals.idC
let failC = Tactic_type.Conversionals.failC
let failWithC = Tactic_type.Conversionals.failWithC
let forceC = Tactic_type.Conversionals.forceC
doc <:doc<
@modsection{Conversionals}
@begin[description]
@item{@conv[thenC], @conv[orelseC];
Conversionals can be combined in the same manner as tactics.
The (@tt{$c_1$ thenC $c_2$}) conversion first applies conversion
$c_1$, and then applies $c_2$ to the result term. The (@tt{$c_1$ orelseC $c_2$})
conversion first applies $c_1$@; if $c_1$ fails (because the conversion does not
match the term being rewritten, or because of a call to @tt[failC]), $c_2$ is
applied instead.}
@item{@conv[tryC], @conv[firstC];
There are several variations on @tt[orelseC]. The (@tt[tryC] $c$) conversion
is equivalent to (@tt{$c$ orelseC idC}). The @tt[firstC] conversion takes a list of
conversions to try in order until the first one succeeds. The conversion (@tt[firstC]
$[c_1; @cdots; c_n]$) is equivalent to @tt{$c_1$ orelseC $@cdots$ orelseC $c_n$}.}
@item{@conv[progressC];
The (@tt[progressTC] $@i[conv]$) conversion applies its argument and fails
if either $@i[conv]$ fails, or $@i[conv]$ convert the term to the alpha-equal term.}
@item{@conv[untilFailC];
The (@tt[untilFailC] $c$) conversion applies conversion $c$ repeatedly
until it fails. It catches all exception and never fails itself.}
@item{@conv[repeatC];
The (@tt[repeatC] $c$) conversion applies conversion $c$ repeatedly
until it fails, or until it fails to make progress.}
@item{@conv[ifEqualC];
The (@tt[ifEqualC] $t$ $c1$ $c2$) conversion applies conversion $c1$ if the term its applied to is alpha equal to $t$ and
$c2$ otherwise.}
@item{@conv[replaceUsingC];
The (@tt[replaceUsingC] $t$ $c$) conversion applies conversion $c$ to the term $t$ and fails on any other term.}
@end[description]
@docoff
>>
let prefix_thenC = Tactic_type.Conversionals.prefix_thenC
let prefix_orelseC = Tactic_type.Conversionals.prefix_orelseC
let tryC = Tactic_type.Conversionals.tryC
let firstC = Tactic_type.Conversionals.firstC
let untilFailC = Tactic_type.Conversionals.untilFailC
let repeatC = Tactic_type.Conversionals.repeatC
let repeatForC = Tactic_type.Conversionals.repeatForC
let ifEqualC = Tactic_type.Conversionals.ifEqualC
let progressC = Tactic_type.Conversionals.progressC
let replaceUsingC = Tactic_type.Conversionals.replaceUsingC
let allSubThenC = Tactic_type.Conversionals.allSubThenC
let prefix_thenTC = Tactic_type.Conversionals.prefix_thenTC
infix thenC
infix orelseC
infix thenTC
doc <:doc<
@modsection{Addressing and search}
Generally, the terms to be rewritten do not occur at the outermost
level of a clause. The following conversionals recursively search
through the subterms of a clause for applicable rewrites.
@begin[description]
@item{@conv[someSubC], @conv[allSubC];
The most general of these is the (@tt[someSubC] $c$) conversion,
which tries applying conversion $c$ to all of the immediate subterms of
the clause. It succeeds if $c$ succeeds on any of the subterms@; it
fails otherwise. The conversion @tt[allSubC] requires success on
@emph{all} of the immediate subterms.}
@item{@conv[allSubThenC];
@tt[allSubThenC] $c1$ $c2$ tries to apply $c1$ to every immediate subterm.
If it succeed in at least one case then applies $c2$, otherwise fails .}
@item{@conv[addrC];
Subterms can also be addressed explicitly with the (@tt{addrC @it[addr] $c$})
conversion. The address is an integer list
that describes the @emph{path} leading to the term to be rewritten. For
example, the address $[ ]$ is the identity address, $[0]$ is its leftmost
subterm, $[0; 1]$ is the second subterm of the first subterm, @i[etc].
However addresses are somewhat fragile, and correct addresses can be difficult
to discover. For this reason, the use of @tt[addrC] is discouraged.}
@item{@conv[higherC];
The (@tt[higherC] $c$) conversion searches for the outermost
occurrences of subterms in the clause where conversion $c$
applies. Its definition uses @tt[someSubC].
@begin[center]
@code{let rec higherC c = c orelseC (someSubC (higherC c))}
@end[center]}
@item{@conv[lowerC], @conv[sweepDnC];
The @tt[lowerC] conversional searches for the @emph{innermost}
rewrite occurrences. The (@tt[sweepDnC] $c$) conversion applies
$c$ from the outermost to the innermost subterms.
@begin[center]
@code{let rec sweepDnC c = (tryC c) andalsoC (someSubC (sweepDnC c))}
@end[center]}
@item{@conv[sweepUpC];
The @tt[sweepUpC] conversion works from the innermost to outermost subterms.
Note that these conversions never fail@; however they may fail to
make progress if the conversion $c$ never succeeds.}
@item{@conv[findThenC];
The @tt[findThenC] conversion find the outermost term that matches a predicate
and applies a conversion at that point.}
@item{@conv[applyAllC];
The @tt[applyAllC] conversion takes a list of conversions
and applies them to all subterms possible from outermost to
innermost (it applies at most one conversion from the list at most once
to each subterm).
@begin[center]
@code{let applyAllC convs = sweepUpC (firstC convs)}
@end[center]}
@item{@conv[rwh], @conv[rwch], @conv[rwhAll], @conv[rwchAll], @conv[rwhAllAll];
For convenience, the @tt[rwh], @tt[rwch], @tt[rwhAll], @tt[rwchAll],
@tt[rwhAllAll] functions automatically
apply the @tt[higherC] conversion. For example, the tactic (@tt{rwh $conv$ $i$})
is equivalent to (@tt{rw (higherC $conv$) $i$}).}
@item{@conv[rwa], @conv[rwca], @conv[rwaAll], @conv[rwcaAll], @conv[rwaAllAll];
The @tt[rwa], @tt[rwca], @tt[rwaAll], @tt[rwcaAll],
@tt[rwaAllAll] functions take a list of conversions and
apply the @tt[applyAllC] conversion. For example, the tactic (@tt{rwa $convs$ $i$})
is equivalent to (@tt{rw (applyAllC $convs$) $i$}).}
@end[description]
@docoff
>>
let someSubC = Tactic_type.Conversionals.someSubC
let allSubC = Tactic_type.Conversionals.allSubC
let higherC = Tactic_type.Conversionals.higherC
let lowerC = Tactic_type.Conversionals.lowerC
let sweepUpC = Tactic_type.Conversionals.sweepUpC
let sweepDnC = Tactic_type.Conversionals.sweepDnC
let applyAllC = Tactic_type.Conversionals.applyAllC
let findThenC = Tactic_type.Conversionals.findThenC
doc <:doc<
@modsection{Conversion reversal}
Computational rewrites define a congruence, and all equivalence relations
in the congruence closure hold, including reversing the application of
the rewrite. However, reversed rewrites are often incompletely specified.
@begin[description]
@item{@conv[foldC], @conv[cutC];
The (@tt[foldC] $t$ $c$) takes a term $t$ and a conversion that
rewrites the term in the @emph{forward} direction, and generates
reversed conversion. For example, here is a reverse application of
the beta rewrite.
$$
@rulebox{rwh; (@tt[foldC]@space (@lambda v. v + 1)@space 2@space @tt[unfold_beta])@space 0;
<<sequent [dummy_arg] { <H> >- <:doc<2 + 1 = 3 @in @int>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>> }>>}
$$
@noindent
The @tt[cutC] conversion is used to replace a term and generate a
rewrite obligation.
$$
@rulebox{rw; (@tt[addrC]@space{} [1]@space (@tt[cutC]@space 3))@space 0;
<<sequent [dummy_arg] { <H> >- <:doc< 3 = 3 @in @int>> }>> @cr
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) @longleftrightarrow 3>>}>>;
<<sequent [dummy_arg] { <H> >- <:doc< ((@lambda v. v + 1)@space 2) = 3 @in @int>>}>>}
$$}
@end[description]
@docoff
>>
let addrC = Tactic_type.Conversionals.addrC
let foldC = Tactic_type.Conversionals.foldC
let makeFoldC = Tactic_type.Conversionals.makeFoldC
let cutC = Tactic_type.Conversionals.cutC
doc <:doc<
@resources
@bf{The @Comment!resource[reduce] resource}
The @tt{reduce} resource provides a generic method for
defining @emph{evaluation}. The @conv[reduceTopC] conversion
can be used to apply this evaluator.
The @conv[reduceC] conversion repeatedly applies @tt[reduceTopC] to any subterm.
The @tactic[reduceT] tactic applies @tt[reduceC] to the goal sequent.
For example, the @Nuprl type theory describes several
generic reductions:
@begin[description]
@item{beta; $(@lambda v. b[v])@space a @longleftrightarrow b[a]$}
@item{pair; $(@bf{match}@space (a, b)@space @bf{with}@space u, v @rightarrow c[u, v]) @longleftrightarrow c[a, b]$}
@item{union; $(@bf{match}@space @i[inl](a)@space @bf{with}@space
@i[inl](u) @rightarrow b[u]
| @i[inr](v) @rightarrow c[v]) @longleftrightarrow b[a]$}
@end[description]
Each of the modules for functions (Section @refmodule[Itt_dfun]),
tuples (Section @refmodule[Itt_dprod]), and union (Section @refmodule[Itt_union]),
defines an addition to the @hrefresource[reduce] resource: the @hrefmodule[Itt_dfun] adds
the @hrefrewrite[reduce_beta] rewrite with redex $(@lambda v. b[v])@space a$@; the
@hrefmodule[Itt_dprod] adds the @hrefrewrite[reduceSpread] rewrite with redex
$(@bf{match}@space (a, b)@space @bf{with}@space u, v @rightarrow c[u, v])$@; and the
@hrefmodule[Itt_union] adds the @hrefrewrite[reduceDecideInl] rewrite with
redex $(@bf{match}@space @i[inl](a)@space @bf{with}@space
@i[inl](u) @rightarrow b[u]
| @i[inr](v) @rightarrow c[v])$.
In modules that @tt{extends} these three theories, the @tt[reduceC]
conversion will recursively search for applications of these three
rewrites in an attempt to fully reduce the term.
The implementation of the @tt[reduce] resource and the @tt[reduceC]
conversion relies on tables to store the shape of redices, together with the
conversions for the reduction.
@docoff
>>
type reduce_conv = conv * (option_table -> conv)
type reduce_info = rule_labels * conv
type reduce_entry = term * reduce_info
let wrap_reduce ?labels conv =
rule_labels_of_opt_terms labels, conv
let wrap_reduce_crw ?labels conv =
let labels =
match labels with
None -> Perv.crw_labels
| Some labels -> select_crw :: labels
in
rule_labels_of_opt_terms (Some labels), conv
let extract_data =
let rec mapsnd recrw = function
[] -> recrw
| (_, h) :: tl -> h orelseC (mapsnd recrw tl)
in
let select_option options (opts, _) =
rule_labels_are_allowed options opts
in
let rw tbl =
let t = env_term e in
let p = env_arg e in
let options = get_options p in
if !debug_reduce then
eprintf "Conversionals: lookup %a%t" debug_print t eflush;
match Term_match_table.lookup_bucket tbl (select_option options) t with
Some convs ->
if !debug_reduce then
eprintf "Conversionals: applying %a%t" debug_print t eflush;
firstC (List.map snd convs)
| None ->
raise (RefineError ("Conversionals.extract_data", StringTermError ("no reduction for", t))))
in
let hrw tbl options =
let rec hrw t =
let recrw = allSubC (termC hrw) in
if !debug_reduce then
eprintf "Conversionals: lookup %a%t" debug_print t eflush;
match Term_match_table.lookup_bucket tbl (select_option options) t with
Some convs ->
if !debug_reduce then
eprintf "Conversionals: applying %a%t" debug_print t eflush;
mapsnd recrw convs
| None ->
recrw
in termC hrw
in
(fun tbl -> rw tbl, hrw tbl)
let resource (reduce_entry, reduce_conv) reduce =
table_resource_info extract_data
let reduceTopC_env e =
fst (get_resource_arg (env_arg e) get_reduce_resource)
let reduceTopC = funC reduceTopC_env
let reduceC =
funC (fun e ->
let p = env_arg e in
repeatC (snd (get_resource_arg p get_reduce_resource) (get_options p)))
let reduceT = funT (fun p ->
let reduceHigherC = snd (get_resource_arg p get_reduce_resource) (get_options p) in
rwAll (repeatC reduceHigherC))
let reduceHypsT = funT (fun p ->
let reduceHigherC = snd (get_resource_arg p get_reduce_resource) (get_options p) in
onAllMHypsT (rw (repeatC reduceHigherC)))
let simpleReduceTopC = withOptionInfoC Perv.select_crw OptionExclude reduceTopC
let simpleReduceC = withOptionInfoC Perv.select_crw OptionExclude reduceC
let simpleReduceT = withOptionInfoT Perv.select_crw OptionExclude reduceT
let rec wrap_addrs conv = function
[] -> conv
| addr :: addrs -> addrLiteralC addr reduceC thenC wrap_addrs conv addrs
let cound_vars tbl t =
if is_so_var_term t then
let v, _, _ = dest_so_var t in
if Hashtbl.mem tbl v then
Hashtbl.replace tbl v ((Hashtbl.find tbl v) + 1)
else
Hashtbl.add tbl v 1
let find_conds tbl t _ =
is_so_var_term t &&
let v, _, _ = dest_so_var t in
Hashtbl.mem tbl v && ((Hashtbl.find tbl v) > 1)
let process_reduce_resource_rw_annotation ?labels name redex contractum assums addrs args loc rw =
let conv = rewrite_of_pre_rewrite rw empty_rw_args [] in
let conv =
let instanceof =
try
let redex = compile_redex Strict addrs redex in
test_redex_applicability redex empty_rw_args contractum [];
true
with
RefineError _
| Not_found ->
false
in
if instanceof then begin
if !debug_reduce then
eprintf "%s: contractum is an instance of the redex@." name;
progressC conv
end
else
conv
in
match addrs, args with
{ spec_ints = [||]; spec_addrs = [||] }, [] ->
let vars = Hashtbl.create 19 in
let () = List.iter (TermOp.iter_down (cound_vars vars)) (contractum :: assums) in
let addrs = find_subterm redex (find_conds vars) in
let labels = rule_labels_of_opt_terms labels in
[redex, (labels, wrap_addrs conv addrs)]
| _ ->
raise (Invalid_argument ((Simple_print.string_of_loc loc) ^ ": reduce resource annotation:
rewrite " ^ name ^": rewrites that take arguments are not supported"))
let apply_rewrite p t =
get_resource_arg p Tactic_type.Conversionals.apply_rewrite t
|
c6fd224a20341bde19b24ff2277b7270bcea2099b871d98eee7d62868e939500 | kappelmann/eidi2_repetitorium_tum | ColUtils.mli | open Collection
module ColUtils (C: Collection) : sig
val filter : ('a -> bool) -> 'a C.t -> 'a C.t
(* apply function to every element of C *)
val for_all : ('a -> 'b) -> 'a C.t -> 'b C.t
val length : 'a C.t -> int
(* check if an element of C satifies the predicate *)
val exists : ('a -> bool) -> 'a C.t -> 'a option
merge two collections into one
val merge : 'a C.t -> 'a C.t -> 'a C.t
Make a new collection of tuples from the two given collections e.g. [ 1;2;3 ] [ a;b;c ]
* would give [ ( 1,a);(2,b);(3,c ) ] for a list collection . If the collections have a different size , throw a Different_Length exception
* would give [(1,a);(2,b);(3,c)] for a list collection. If the collections have a different size, throw a Different_Length exception*)
exception Different_Length
val tuple : 'a C.t -> 'b C.t -> ('a * 'b) C.t
Pull out every nth element of the collection . A call with 0 as a second parameter returns C.empty
val every_nth : 'a C.t -> int -> 'a C.t
end
| null | https://raw.githubusercontent.com/kappelmann/eidi2_repetitorium_tum/1d16bbc498487a85960e0d83152249eb13944611/2016/functors/solutions/ColUtils.mli | ocaml | apply function to every element of C
check if an element of C satifies the predicate | open Collection
module ColUtils (C: Collection) : sig
val filter : ('a -> bool) -> 'a C.t -> 'a C.t
val for_all : ('a -> 'b) -> 'a C.t -> 'b C.t
val length : 'a C.t -> int
val exists : ('a -> bool) -> 'a C.t -> 'a option
merge two collections into one
val merge : 'a C.t -> 'a C.t -> 'a C.t
Make a new collection of tuples from the two given collections e.g. [ 1;2;3 ] [ a;b;c ]
* would give [ ( 1,a);(2,b);(3,c ) ] for a list collection . If the collections have a different size , throw a Different_Length exception
* would give [(1,a);(2,b);(3,c)] for a list collection. If the collections have a different size, throw a Different_Length exception*)
exception Different_Length
val tuple : 'a C.t -> 'b C.t -> ('a * 'b) C.t
Pull out every nth element of the collection . A call with 0 as a second parameter returns C.empty
val every_nth : 'a C.t -> int -> 'a C.t
end
|
90463204ed8ef252af0da9216cc63116542167c18c7365a981fd49310376fd0a | nextjournal/clerk | example.clj | ;; # 🔭 Clerk Examples
^{:nextjournal.clerk/visibility {:code :hide}}
(ns example
(:require [nextjournal.clerk :as clerk]))
;; Outside of Clerk, the `example` macro evaluates to `nil`, just like `clojure.core/comment`. Try this in your editor!
;; But when used in the context of Clerk, it renders the expressions with thier resulting values.
(clerk/example
(+ 1 2)
(+ 41 1)
(-> 42 range shuffle)
(macroexpand '(example (+ 1 2)))
(clerk/html [:h1 "👋"])
(range)
(javax.imageio.ImageIO/read (java.net.URL. "-type=image/gif")))
| null | https://raw.githubusercontent.com/nextjournal/clerk/5620618c378be87b6b7c695c145424dded1ef080/notebooks/example.clj | clojure | # 🔭 Clerk Examples
Outside of Clerk, the `example` macro evaluates to `nil`, just like `clojure.core/comment`. Try this in your editor!
But when used in the context of Clerk, it renders the expressions with thier resulting values. | ^{:nextjournal.clerk/visibility {:code :hide}}
(ns example
(:require [nextjournal.clerk :as clerk]))
(clerk/example
(+ 1 2)
(+ 41 1)
(-> 42 range shuffle)
(macroexpand '(example (+ 1 2)))
(clerk/html [:h1 "👋"])
(range)
(javax.imageio.ImageIO/read (java.net.URL. "-type=image/gif")))
|
3a77f03d245798349f34ee8159e1bd2942740e39cad29c5655293cf572ae2485 | alex-gutev/cl-environments | package.lisp | package.lisp
;;;;
Copyright 2018
;;;;
;;;; Permission is hereby granted, free of charge, to any person
;;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;;;; restriction, including without limitation the rights to use,
;;;; copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the
;;;; Software is furnished to do so, subject to the following
;;;; conditions:
;;;;
;;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
;;;; OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
;;;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
;;;; OTHER DEALINGS IN THE SOFTWARE.
(defpackage :cl-environments.util
(:use :common-lisp
:alexandria
:anaphora
:optima)
(:export :defmacro!
:let-if
:slot-values
:match-state
:reexport-all-symbols))
| null | https://raw.githubusercontent.com/alex-gutev/cl-environments/0b22154c5afefef23d1eba9a4fae11d73580ef41/src/common/package.lisp | lisp |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE. | package.lisp
Copyright 2018
files ( the " Software " ) , to deal in the Software without
copies of the Software , and to permit persons to whom the
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(defpackage :cl-environments.util
(:use :common-lisp
:alexandria
:anaphora
:optima)
(:export :defmacro!
:let-if
:slot-values
:match-state
:reexport-all-symbols))
|
8390ca17ec82b2b2111bdc12bbf23cc4c4a19830dfea603be82108c3c97d6402 | metosin/reitit | core.cljs | (ns frontend.core
(:require [reagent.core :as r]
[reitit.frontend :as rf]
[reitit.frontend.easy :as rfe]
[reitit.coercion.spec :as rss]
[spec-tools.data-spec :as ds]
[fipp.edn :as fedn]))
(defn home-page []
[:div
[:h2 "Welcome to frontend"]
[:button
{:type "button"
:on-click #(rfe/push-state ::item {:id 3})}
"Item 3"]
[:button
{:type "button"
:on-click #(rfe/replace-state ::item {:id 4})}
"Replace State Item 4"]])
(defn about-page []
[:div
[:h2 "About frontend"]
[:ul
[:li [:a {:href ""} "external link"]]
[:li [:a {:href (rfe/href ::foobar)} "Missing route"]]
[:li [:a {:href (rfe/href ::item)} "Missing route params"]]]
[:div
{:content-editable true
:suppressContentEditableWarning true}
[:p "Link inside contentEditable element is ignored."]
[:a {:href (rfe/href ::frontpage)} "Link"]]])
(defn item-page [match]
(let [{:keys [path query]} (:parameters match)
{:keys [id]} path]
[:div
[:h2 "Selected item " id]
(if (:foo query)
[:p "Optional foo query param: " (:foo query)])]))
(defonce match (r/atom nil))
(defn current-page []
[:div
[:ul
[:li [:a {:href (rfe/href ::frontpage)} "Frontpage"]]
[:li [:a {:href (rfe/href ::about)} "About"]]
[:li [:a {:href (rfe/href ::item {:id 1})} "Item 1"]]
[:li [:a {:href (rfe/href ::item {:id 2} {:foo "bar"})} "Item 2"]]]
(if @match
(let [view (:view (:data @match))]
[view @match]))
[:pre (with-out-str (fedn/pprint @match))]])
(def routes
[["/"
{:name ::frontpage
:view home-page}]
["/about"
{:name ::about
:view about-page}]
["/item/:id"
{:name ::item
:view item-page
:parameters {:path {:id int?}
:query {(ds/opt :foo) keyword?}}}]])
(defn init! []
(rfe/start!
(rf/router routes {:data {:coercion rss/coercion}})
(fn [m] (reset! match m))
set to false to enable HistoryAPI
{:use-fragment true})
(r/render [current-page] (.getElementById js/document "app")))
(init!)
| null | https://raw.githubusercontent.com/metosin/reitit/1ab075bd353966636f154ac36ae9b7990efeb008/examples/frontend/src/frontend/core.cljs | clojure | (ns frontend.core
(:require [reagent.core :as r]
[reitit.frontend :as rf]
[reitit.frontend.easy :as rfe]
[reitit.coercion.spec :as rss]
[spec-tools.data-spec :as ds]
[fipp.edn :as fedn]))
(defn home-page []
[:div
[:h2 "Welcome to frontend"]
[:button
{:type "button"
:on-click #(rfe/push-state ::item {:id 3})}
"Item 3"]
[:button
{:type "button"
:on-click #(rfe/replace-state ::item {:id 4})}
"Replace State Item 4"]])
(defn about-page []
[:div
[:h2 "About frontend"]
[:ul
[:li [:a {:href ""} "external link"]]
[:li [:a {:href (rfe/href ::foobar)} "Missing route"]]
[:li [:a {:href (rfe/href ::item)} "Missing route params"]]]
[:div
{:content-editable true
:suppressContentEditableWarning true}
[:p "Link inside contentEditable element is ignored."]
[:a {:href (rfe/href ::frontpage)} "Link"]]])
(defn item-page [match]
(let [{:keys [path query]} (:parameters match)
{:keys [id]} path]
[:div
[:h2 "Selected item " id]
(if (:foo query)
[:p "Optional foo query param: " (:foo query)])]))
(defonce match (r/atom nil))
(defn current-page []
[:div
[:ul
[:li [:a {:href (rfe/href ::frontpage)} "Frontpage"]]
[:li [:a {:href (rfe/href ::about)} "About"]]
[:li [:a {:href (rfe/href ::item {:id 1})} "Item 1"]]
[:li [:a {:href (rfe/href ::item {:id 2} {:foo "bar"})} "Item 2"]]]
(if @match
(let [view (:view (:data @match))]
[view @match]))
[:pre (with-out-str (fedn/pprint @match))]])
(def routes
[["/"
{:name ::frontpage
:view home-page}]
["/about"
{:name ::about
:view about-page}]
["/item/:id"
{:name ::item
:view item-page
:parameters {:path {:id int?}
:query {(ds/opt :foo) keyword?}}}]])
(defn init! []
(rfe/start!
(rf/router routes {:data {:coercion rss/coercion}})
(fn [m] (reset! match m))
set to false to enable HistoryAPI
{:use-fragment true})
(r/render [current-page] (.getElementById js/document "app")))
(init!)
| |
0b96a8bf3c873a8678f981ef7cd0d7bc3c511428c76dae0dc6811cc4abf3e745 | district0x/district-ui-web3 | subs.cljs | (ns district.ui.web3.subs
(:require
[district.ui.web3.queries :as queries]
[re-frame.core :refer [reg-sub]]))
(reg-sub
::web3
queries/web3)
(reg-sub
::web3-injected?
queries/web3-injected?)
(reg-sub
::web3-legacy?
queries/web3-legacy?)
| null | https://raw.githubusercontent.com/district0x/district-ui-web3/f0c1bab3a9ef71e5cbb88e81ed6eddee7a4ed344/src/district/ui/web3/subs.cljs | clojure | (ns district.ui.web3.subs
(:require
[district.ui.web3.queries :as queries]
[re-frame.core :refer [reg-sub]]))
(reg-sub
::web3
queries/web3)
(reg-sub
::web3-injected?
queries/web3-injected?)
(reg-sub
::web3-legacy?
queries/web3-legacy?)
| |
9e2fc8eae9d116d6efdfffae49496d012a9549a57aeb681f8e8932d65caa42cd | Clozure/ccl-tests | prin1-to-string.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Mon Jul 26 12:18:22 2004
;;;; Contains: Tests of PRIN1-TO-STRING
(in-package :cl-test)
(compile-and-load "printer-aux.lsp")
(deftest prin1-to-string.1
(random-prin1-to-string-test 5)
nil)
(deftest prin1-to-string.2
(with-standard-io-syntax (prin1-to-string 2))
"2")
;;; Error tests
(deftest prin1-to-string.error.1
(signals-error (prin1-to-string) program-error)
t)
(deftest prin1-to-string.error.2
(signals-error (prin1-to-string nil nil) program-error)
t)
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/prin1-to-string.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of PRIN1-TO-STRING
Error tests | Author :
Created : Mon Jul 26 12:18:22 2004
(in-package :cl-test)
(compile-and-load "printer-aux.lsp")
(deftest prin1-to-string.1
(random-prin1-to-string-test 5)
nil)
(deftest prin1-to-string.2
(with-standard-io-syntax (prin1-to-string 2))
"2")
(deftest prin1-to-string.error.1
(signals-error (prin1-to-string) program-error)
t)
(deftest prin1-to-string.error.2
(signals-error (prin1-to-string nil nil) program-error)
t)
|
6618c911ea471e4eaccbde33f294000ed01366671e8935e888156b227257e688 | janestreet/bonsai | bonsai_web_ui_partial_render_table_protocol.ml | open Core
module Stable = struct
open Core.Core_stable
module Dir = struct
module V1 = struct
type t =
[ `Asc
| `Desc
]
[@@deriving sexp, bin_io, equal, compare]
end
end
module Order = struct
module V1 = struct
type 'col_id t = ('col_id * Dir.V1.t) list [@@deriving sexp, bin_io, equal, compare]
end
end
end
module type Col_id = sig
type t [@@deriving equal, sexp, bin_io]
end
module Dir = struct
include Stable.Dir.V1
end
module Order = struct
include Stable.Order.V1
module Action = struct
type 'col_id t =
| Set_sort of 'col_id
| Add_sort of 'col_id
[@@deriving sexp_of]
end
let apply_action
(type col_id)
t
(module Col_id : Col_id with type t = col_id)
(action : col_id Action.t)
=
let equal = Col_id.equal in
let cycle_sort_direction id =
match List.Assoc.find ~equal t id with
| None -> [ id, `Asc ]
| Some `Asc -> [ id, `Desc ]
| Some `Desc -> []
in
match action with
| Set_sort id -> cycle_sort_direction id
| Add_sort id -> cycle_sort_direction id @ List.Assoc.remove ~equal t id
;;
let to_compare t ~sorters ~default_sort : _ Incr_map_collate.Compare.t =
match t, default_sort with
| [], None -> Unchanged
| [], Some compare -> Custom_by_key_and_value { compare }
| t, default_sort ->
let l =
List.filter_map t ~f:(fun (id, direction) ->
let open Option.Let_syntax in
let%map compare = Map.find sorters id in
match direction with
| `Asc -> compare
| `Desc -> fun a b -> Comparable.reverse compare a b)
in
let compare =
List.append l (Option.to_list default_sort)
|> fun cmps a b -> Comparable.lexicographic cmps a b
in
Custom_by_key_and_value { compare }
;;
let default = []
end
| null | https://raw.githubusercontent.com/janestreet/bonsai/782fecd000a1f97b143a3f24b76efec96e36a398/web_ui/partial_render_table/protocol/bonsai_web_ui_partial_render_table_protocol.ml | ocaml | open Core
module Stable = struct
open Core.Core_stable
module Dir = struct
module V1 = struct
type t =
[ `Asc
| `Desc
]
[@@deriving sexp, bin_io, equal, compare]
end
end
module Order = struct
module V1 = struct
type 'col_id t = ('col_id * Dir.V1.t) list [@@deriving sexp, bin_io, equal, compare]
end
end
end
module type Col_id = sig
type t [@@deriving equal, sexp, bin_io]
end
module Dir = struct
include Stable.Dir.V1
end
module Order = struct
include Stable.Order.V1
module Action = struct
type 'col_id t =
| Set_sort of 'col_id
| Add_sort of 'col_id
[@@deriving sexp_of]
end
let apply_action
(type col_id)
t
(module Col_id : Col_id with type t = col_id)
(action : col_id Action.t)
=
let equal = Col_id.equal in
let cycle_sort_direction id =
match List.Assoc.find ~equal t id with
| None -> [ id, `Asc ]
| Some `Asc -> [ id, `Desc ]
| Some `Desc -> []
in
match action with
| Set_sort id -> cycle_sort_direction id
| Add_sort id -> cycle_sort_direction id @ List.Assoc.remove ~equal t id
;;
let to_compare t ~sorters ~default_sort : _ Incr_map_collate.Compare.t =
match t, default_sort with
| [], None -> Unchanged
| [], Some compare -> Custom_by_key_and_value { compare }
| t, default_sort ->
let l =
List.filter_map t ~f:(fun (id, direction) ->
let open Option.Let_syntax in
let%map compare = Map.find sorters id in
match direction with
| `Asc -> compare
| `Desc -> fun a b -> Comparable.reverse compare a b)
in
let compare =
List.append l (Option.to_list default_sort)
|> fun cmps a b -> Comparable.lexicographic cmps a b
in
Custom_by_key_and_value { compare }
;;
let default = []
end
| |
7b6ff278de128fa8d982731cf838a94f9d42835efbe337b45a065744ef0274e9 | ChicagoBoss/ChicagoBoss | types.erl | %%-------------------------------------------------------------------
@author
ChicagoBoss Team and contributors , see file in root directory
%% @end
This file is part of ChicagoBoss project .
See file in root directory
%% for license information, see LICENSE file in root directory
%% @end
%% @doc
%%-------------------------------------------------------------------
-module(types).
-type execution_mode() :: 'development' | 'production'.
-type application() :: atom().
-type language() :: any().
-type webserver() :: any().
-type cb_node() :: node().
-type controller() :: any().
-type compiler_adapters() :: 'boss_compiler_adapter_elixir' | 'boss_compiler_adapter_erlang' | 'boss_compiler_adapter_lfe'.
-export_type([execution_mode/0, application/0, language/0, webserver/0, cb_node/0]).
-export_type([controller/0, compiler_adapters/0]).
| null | https://raw.githubusercontent.com/ChicagoBoss/ChicagoBoss/113bac70c2f835c1e99c757170fd38abf09f5da2/src/boss/types.erl | erlang | -------------------------------------------------------------------
@end
for license information, see LICENSE file in root directory
@end
@doc
------------------------------------------------------------------- | @author
ChicagoBoss Team and contributors , see file in root directory
This file is part of ChicagoBoss project .
See file in root directory
-module(types).
-type execution_mode() :: 'development' | 'production'.
-type application() :: atom().
-type language() :: any().
-type webserver() :: any().
-type cb_node() :: node().
-type controller() :: any().
-type compiler_adapters() :: 'boss_compiler_adapter_elixir' | 'boss_compiler_adapter_erlang' | 'boss_compiler_adapter_lfe'.
-export_type([execution_mode/0, application/0, language/0, webserver/0, cb_node/0]).
-export_type([controller/0, compiler_adapters/0]).
|
cb84aa519048170e3fbc9e1ecde3cb4763a50fac235e632e163b17fd38f9cdeb | nixeagle/cl-irc | package.lisp | $ Id$
$ URL$
;;;; See the LICENSE file for licensing information.
(in-package :cl-user)
;; the exports list needs some cleanup/clarification/categorization
(eval-when (:execute :load-toplevel :compile-toplevel)
(defpackage :cl-irc
(:use :cl)
(:nicknames :irc)
(:export :read-message-loop
:read-message
:irc-message-event
:start-background-message-handler
:stop-background-message-handler
:destructuring-arguments
:&req
:socket-connect
:server-name
:server-port
:no-such-reply
:irc-mode
:boolean-value-mode
:single-value-mode
:list-value-mode
:add-mode
:remove-mode
:has-mode-p
:has-mode-value-p
:get-mode
:set-mode
:unset-mode
:parse-mode-arguments
:parse-raw-message
:normalize-nickname
:normalize-channel-name
:name
:normalized-name
:topic
:modes
:visibility
:user-count
:users
:network-stream
:output-stream
:client-stream
:channels
:add-hook
:remove-hook
:remove-hooks
:remove-all-hooks
:add-default-hooks
:get-hooks
:make-user
:nickname
:normalized-nickname
:username
:hostname
:realname
:change-nickname
:irc-message
:source
:user
:password
:host
:command
:arguments
:trailing-argument
:connection
:received-time
:raw-message-string
:make-connection
:make-channel
:channel
:client-log
:find-channel
:find-reply-name
:remove-channel
:remove-all-channels
:add-channel
:find-user
:add-user
:remove-all-users
:remove-user
:self-message-p
:pass
:nick
:user-
:oper
:mode
:op
:deop
:voice
:devoice
:ban
:unban
:service
:quit
:squit
:join
:part
:part-all
:topic-
:names
:list-
:invite
:kick
:privmsg
:notice
:motd-
:lusers
:version
:stats
:links
:time-
:connect
:trace-
:admin
:info
:servlist
:squery
:who
:whois
:whowas
:kill
:ping
:pong
:error-
:away
:rehash
:die
:restart-
:summon
:users-
:wallops
:userhost
:ison)))
| null | https://raw.githubusercontent.com/nixeagle/cl-irc/efaea15f2962107ea9b1a2fad5cd9db492b4247b/tags/0.8.0/package.lisp | lisp | See the LICENSE file for licensing information.
the exports list needs some cleanup/clarification/categorization | $ Id$
$ URL$
(in-package :cl-user)
(eval-when (:execute :load-toplevel :compile-toplevel)
(defpackage :cl-irc
(:use :cl)
(:nicknames :irc)
(:export :read-message-loop
:read-message
:irc-message-event
:start-background-message-handler
:stop-background-message-handler
:destructuring-arguments
:&req
:socket-connect
:server-name
:server-port
:no-such-reply
:irc-mode
:boolean-value-mode
:single-value-mode
:list-value-mode
:add-mode
:remove-mode
:has-mode-p
:has-mode-value-p
:get-mode
:set-mode
:unset-mode
:parse-mode-arguments
:parse-raw-message
:normalize-nickname
:normalize-channel-name
:name
:normalized-name
:topic
:modes
:visibility
:user-count
:users
:network-stream
:output-stream
:client-stream
:channels
:add-hook
:remove-hook
:remove-hooks
:remove-all-hooks
:add-default-hooks
:get-hooks
:make-user
:nickname
:normalized-nickname
:username
:hostname
:realname
:change-nickname
:irc-message
:source
:user
:password
:host
:command
:arguments
:trailing-argument
:connection
:received-time
:raw-message-string
:make-connection
:make-channel
:channel
:client-log
:find-channel
:find-reply-name
:remove-channel
:remove-all-channels
:add-channel
:find-user
:add-user
:remove-all-users
:remove-user
:self-message-p
:pass
:nick
:user-
:oper
:mode
:op
:deop
:voice
:devoice
:ban
:unban
:service
:quit
:squit
:join
:part
:part-all
:topic-
:names
:list-
:invite
:kick
:privmsg
:notice
:motd-
:lusers
:version
:stats
:links
:time-
:connect
:trace-
:admin
:info
:servlist
:squery
:who
:whois
:whowas
:kill
:ping
:pong
:error-
:away
:rehash
:die
:restart-
:summon
:users-
:wallops
:userhost
:ison)))
|
b21f8f6cff73c78cd2ea2522797a476c4442beda1a88e2567d79f0d355b69635 | OCamlPro/ez_api | ezCurl.ml | (**************************************************************************)
(* *)
Copyright 2018 - 2022 OCamlPro
(* *)
(* All rights reserved. This file is distributed under the terms of the *)
GNU Lesser General Public License version 2.1 , with the special
(* exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
let make ?meth ?headers ?content ?content_type ?msg url f =
EzCurl_common.log ?meth url msg;
if !Verbose.v land 2 <> 0 then Format.printf "[ez_api] sent:\n%s@." (Option.value ~default:"" content);
let rc, data =
try
let r, c = EzCurl_common.init ?meth ?headers ?content ?content_type url in
Curl.perform c;
let rc = Curl.get_responsecode c in
Curl.cleanup c;
rc, Buffer.contents r
with
| Curl.CurlException (_, i, s) -> i, s
| exn -> -1, Printexc.to_string exn in
EzCurl_common.log ~meth:("RECV " ^ string_of_int rc) url msg;
if !Verbose.v land 1 <> 0 then Format.printf "[ez_api] received:\n%s@." data;
if rc >= 200 && rc < 300 then try f (Ok data) with _ -> ()
else try f (Error (rc, Some data)) with _ -> ()
module Interface = struct
let get ?(meth="GET") ?headers ?msg url f =
make ~meth ?headers ?msg url f
let post ?(meth="POST") ?(content_type="application/json") ?(content="{}")
?headers ?msg url f =
make ~meth ?headers ?msg ~content_type ~content url f
end
include EzRequest.Make(Interface)
| null | https://raw.githubusercontent.com/OCamlPro/ez_api/2debe0bf97586fa231c855526fd54d69f72d1546/src/request/unix/curl/ezCurl.ml | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of the
exception on linking described in the file LICENSE.
************************************************************************ | Copyright 2018 - 2022 OCamlPro
GNU Lesser General Public License version 2.1 , with the special
let make ?meth ?headers ?content ?content_type ?msg url f =
EzCurl_common.log ?meth url msg;
if !Verbose.v land 2 <> 0 then Format.printf "[ez_api] sent:\n%s@." (Option.value ~default:"" content);
let rc, data =
try
let r, c = EzCurl_common.init ?meth ?headers ?content ?content_type url in
Curl.perform c;
let rc = Curl.get_responsecode c in
Curl.cleanup c;
rc, Buffer.contents r
with
| Curl.CurlException (_, i, s) -> i, s
| exn -> -1, Printexc.to_string exn in
EzCurl_common.log ~meth:("RECV " ^ string_of_int rc) url msg;
if !Verbose.v land 1 <> 0 then Format.printf "[ez_api] received:\n%s@." data;
if rc >= 200 && rc < 300 then try f (Ok data) with _ -> ()
else try f (Error (rc, Some data)) with _ -> ()
module Interface = struct
let get ?(meth="GET") ?headers ?msg url f =
make ~meth ?headers ?msg url f
let post ?(meth="POST") ?(content_type="application/json") ?(content="{}")
?headers ?msg url f =
make ~meth ?headers ?msg ~content_type ~content url f
end
include EzRequest.Make(Interface)
|
11e0cc21bb668ba1e4ab05437aab35b91f87ae7435e398fc8783e5874aac94c3 | marigold-dev/mankavar | main.ml | let () = () | null | https://raw.githubusercontent.com/marigold-dev/mankavar/51761e67f6e443766eed1f827bddb83cf777d2f9/src/sfsm/test/main.ml | ocaml | let () = () | |
320ad41058af823fa17c84a5db538833914a3c8450eeb2d2753bed09828d7437 | mbacarella/opws | cbc.ml | A CBC abstraction layer
Copyright ( C ) 2008 < >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2008 Michael Bacarella <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*)
type state = { mutable prev_v : bytes }
let init iv = { prev_v = iv }
let xor_strings b c =
let int c = int_of_char c in
let a = Bytes.copy b in
for i = 0 to Bytes.length a - 1 do
Bytes.set a i (Bin.chr (int (Bytes.get b i) lxor int (Bytes.get c i)))
done;
a
let encrypt cbc enc p =
let p' = xor_strings p cbc.prev_v in
let c = enc p' in
cbc.prev_v <- c;
c
let decrypt cbc dec c =
let p = dec c in
let p' = xor_strings p cbc.prev_v in
cbc.prev_v <- c;
p'
| null | https://raw.githubusercontent.com/mbacarella/opws/946b1e3e8e62712cfcdf22663ebfd857c025d5c5/cbc.ml | ocaml | A CBC abstraction layer
Copyright ( C ) 2008 < >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2008 Michael Bacarella <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*)
type state = { mutable prev_v : bytes }
let init iv = { prev_v = iv }
let xor_strings b c =
let int c = int_of_char c in
let a = Bytes.copy b in
for i = 0 to Bytes.length a - 1 do
Bytes.set a i (Bin.chr (int (Bytes.get b i) lxor int (Bytes.get c i)))
done;
a
let encrypt cbc enc p =
let p' = xor_strings p cbc.prev_v in
let c = enc p' in
cbc.prev_v <- c;
c
let decrypt cbc dec c =
let p = dec c in
let p' = xor_strings p cbc.prev_v in
cbc.prev_v <- c;
p'
| |
4f361e2964956bf7b8698579cc2ef8301be99509b45ecc0c55636ea7a1a55b91 | ocsigen/ocaml-eliom | int32.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* 32 - bit integers .
This module provides operations on the type [ int32 ]
of signed 32 - bit integers . Unlike the built - in [ int ] type ,
the type [ int32 ] is guaranteed to be exactly 32 - bit wide on all
platforms . All arithmetic operations over [ int32 ] are taken
modulo 2{^32 } .
Performance notice : values of type [ int32 ] occupy more memory
space than values of type [ int ] , and arithmetic operations on
[ int32 ] are generally slower than those on [ int ] . Use [ int32 ]
only when the application requires exact 32 - bit arithmetic .
This module provides operations on the type [int32]
of signed 32-bit integers. Unlike the built-in [int] type,
the type [int32] is guaranteed to be exactly 32-bit wide on all
platforms. All arithmetic operations over [int32] are taken
modulo 2{^32}.
Performance notice: values of type [int32] occupy more memory
space than values of type [int], and arithmetic operations on
[int32] are generally slower than those on [int]. Use [int32]
only when the application requires exact 32-bit arithmetic. *)
val zero : int32
* The 32 - bit integer 0 .
val one : int32
* The 32 - bit integer 1 .
val minus_one : int32
* The 32 - bit integer -1 .
external neg : int32 -> int32 = "%int32_neg"
(** Unary negation. *)
external add : int32 -> int32 -> int32 = "%int32_add"
(** Addition. *)
external sub : int32 -> int32 -> int32 = "%int32_sub"
(** Subtraction. *)
external mul : int32 -> int32 -> int32 = "%int32_mul"
(** Multiplication. *)
external div : int32 -> int32 -> int32 = "%int32_div"
* Integer division . Raise [ Division_by_zero ] if the second
argument is zero . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
argument is zero. This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}. *)
external rem : int32 -> int32 -> int32 = "%int32_mod"
* Integer remainder . If [ y ] is not zero , the result
of [ Int32.rem x y ] satisfies the following property :
[ x = Int32.add ( ( Int32.div x y ) y ) ( Int32.rem x y ) ] .
If [ y = 0 ] , [ x y ] raises [ Division_by_zero ] .
of [Int32.rem x y] satisfies the following property:
[x = Int32.add (Int32.mul (Int32.div x y) y) (Int32.rem x y)].
If [y = 0], [Int32.rem x y] raises [Division_by_zero]. *)
val succ : int32 -> int32
(** Successor. [Int32.succ x] is [Int32.add x Int32.one]. *)
val pred : int32 -> int32
(** Predecessor. [Int32.pred x] is [Int32.sub x Int32.one]. *)
val abs : int32 -> int32
(** Return the absolute value of its argument. *)
val max_int : int32
* The greatest representable 32 - bit integer , 2{^31 } - 1 .
val min_int : int32
* The smallest representable 32 - bit integer , -2{^31 } .
external logand : int32 -> int32 -> int32 = "%int32_and"
(** Bitwise logical and. *)
external logor : int32 -> int32 -> int32 = "%int32_or"
(** Bitwise logical or. *)
external logxor : int32 -> int32 -> int32 = "%int32_xor"
(** Bitwise logical exclusive or. *)
val lognot : int32 -> int32
(** Bitwise logical negation *)
external shift_left : int32 -> int -> int32 = "%int32_lsl"
* [ Int32.shift_left x y ] shifts [ x ] to the left by [ y ] bits .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
The result is unspecified if [y < 0] or [y >= 32]. *)
external shift_right : int32 -> int -> int32 = "%int32_asr"
* [ Int32.shift_right x y ] shifts [ x ] to the right by [ y ] bits .
This is an arithmetic shift : the sign bit of [ x ] is replicated
and inserted in the vacated bits .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
This is an arithmetic shift: the sign bit of [x] is replicated
and inserted in the vacated bits.
The result is unspecified if [y < 0] or [y >= 32]. *)
external shift_right_logical : int32 -> int -> int32 = "%int32_lsr"
* [ Int32.shift_right_logical x y ] shifts [ x ] to the right by [ y ] bits .
This is a logical shift : zeroes are inserted in the vacated bits
regardless of the sign of [ x ] .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
This is a logical shift: zeroes are inserted in the vacated bits
regardless of the sign of [x].
The result is unspecified if [y < 0] or [y >= 32]. *)
external of_int : int -> int32 = "%int32_of_int"
* Convert the given integer ( type [ int ] ) to a 32 - bit integer
( type [ int32 ] ) .
(type [int32]). *)
external to_int : int32 -> int = "%int32_to_int"
* Convert the given 32 - bit integer ( type [ int32 ] ) to an
integer ( type [ int ] ) . On 32 - bit platforms , the 32 - bit integer
is taken modulo 2{^31 } , i.e. the high - order bit is lost
during the conversion . On 64 - bit platforms , the conversion
is exact .
integer (type [int]). On 32-bit platforms, the 32-bit integer
is taken modulo 2{^31}, i.e. the high-order bit is lost
during the conversion. On 64-bit platforms, the conversion
is exact. *)
external of_float : float -> int32
= "caml_int32_of_float" "caml_int32_of_float_unboxed"
[@@unboxed] [@@noalloc]
* Convert the given floating - point number to a 32 - bit integer ,
discarding the fractional part ( truncate towards 0 ) .
The result of the conversion is undefined if , after truncation ,
the number is outside the range \[{!Int32.min_int } , { ! Int32.max_int}\ ] .
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int32.min_int}, {!Int32.max_int}\]. *)
external to_float : int32 -> float
= "caml_int32_to_float" "caml_int32_to_float_unboxed"
[@@unboxed] [@@noalloc]
* Convert the given 32 - bit integer to a floating - point number .
external of_string : string -> int32 = "caml_int32_of_string"
* Convert the given string to a 32 - bit integer .
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
Raise [ Failure " int_of_string " ] if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int32 ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
Raise [Failure "int_of_string"] if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int32]. *)
val to_string : int32 -> string
(** Return the string representation of its argument, in signed decimal. *)
external bits_of_float : float -> int32
= "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed"
[@@unboxed] [@@noalloc]
* Return the internal representation of the given float according
to the IEEE 754 floating - point ' single format ' bit layout .
Bit 31 of the result represents the sign of the float ;
bits 30 to 23 represent the ( biased ) exponent ; bits 22 to 0
represent the mantissa .
to the IEEE 754 floating-point 'single format' bit layout.
Bit 31 of the result represents the sign of the float;
bits 30 to 23 represent the (biased) exponent; bits 22 to 0
represent the mantissa. *)
external float_of_bits : int32 -> float
= "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed"
[@@unboxed] [@@noalloc]
(** Return the floating-point number whose internal representation,
according to the IEEE 754 floating-point 'single format' bit layout,
is the given [int32]. *)
type t = int32
* An alias for the type of 32 - bit integers .
val compare: t -> t -> int
* The comparison function for 32 - bit integers , with the same specification as
{ ! Pervasives.compare } . Along with the type [ t ] , this function [ compare ]
allows the module [ Int32 ] to be passed as argument to the functors
{ ! Set . Make } and { ! Map . Make } .
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int32] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}. *)
val equal: t -> t -> bool
* The equal function for int32s .
@since 4.03.0
@since 4.03.0 *)
(**/**)
* { 6 Deprecated functions }
external format : string -> int32 -> string = "caml_int32_format"
(** Do not use this deprecated function. Instead,
used {!Printf.sprintf} with a [%l...] format. *)
| null | https://raw.githubusercontent.com/ocsigen/ocaml-eliom/497c6707f477cb3086dc6d8124384e74a8c379ae/stdlib/int32.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Unary negation.
* Addition.
* Subtraction.
* Multiplication.
* Successor. [Int32.succ x] is [Int32.add x Int32.one].
* Predecessor. [Int32.pred x] is [Int32.sub x Int32.one].
* Return the absolute value of its argument.
* Bitwise logical and.
* Bitwise logical or.
* Bitwise logical exclusive or.
* Bitwise logical negation
* Return the string representation of its argument, in signed decimal.
* Return the floating-point number whose internal representation,
according to the IEEE 754 floating-point 'single format' bit layout,
is the given [int32].
*/*
* Do not use this deprecated function. Instead,
used {!Printf.sprintf} with a [%l...] format. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* 32 - bit integers .
This module provides operations on the type [ int32 ]
of signed 32 - bit integers . Unlike the built - in [ int ] type ,
the type [ int32 ] is guaranteed to be exactly 32 - bit wide on all
platforms . All arithmetic operations over [ int32 ] are taken
modulo 2{^32 } .
Performance notice : values of type [ int32 ] occupy more memory
space than values of type [ int ] , and arithmetic operations on
[ int32 ] are generally slower than those on [ int ] . Use [ int32 ]
only when the application requires exact 32 - bit arithmetic .
This module provides operations on the type [int32]
of signed 32-bit integers. Unlike the built-in [int] type,
the type [int32] is guaranteed to be exactly 32-bit wide on all
platforms. All arithmetic operations over [int32] are taken
modulo 2{^32}.
Performance notice: values of type [int32] occupy more memory
space than values of type [int], and arithmetic operations on
[int32] are generally slower than those on [int]. Use [int32]
only when the application requires exact 32-bit arithmetic. *)
val zero : int32
* The 32 - bit integer 0 .
val one : int32
* The 32 - bit integer 1 .
val minus_one : int32
* The 32 - bit integer -1 .
external neg : int32 -> int32 = "%int32_neg"
external add : int32 -> int32 -> int32 = "%int32_add"
external sub : int32 -> int32 -> int32 = "%int32_sub"
external mul : int32 -> int32 -> int32 = "%int32_mul"
external div : int32 -> int32 -> int32 = "%int32_div"
* Integer division . Raise [ Division_by_zero ] if the second
argument is zero . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
argument is zero. This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}. *)
external rem : int32 -> int32 -> int32 = "%int32_mod"
* Integer remainder . If [ y ] is not zero , the result
of [ Int32.rem x y ] satisfies the following property :
[ x = Int32.add ( ( Int32.div x y ) y ) ( Int32.rem x y ) ] .
If [ y = 0 ] , [ x y ] raises [ Division_by_zero ] .
of [Int32.rem x y] satisfies the following property:
[x = Int32.add (Int32.mul (Int32.div x y) y) (Int32.rem x y)].
If [y = 0], [Int32.rem x y] raises [Division_by_zero]. *)
val succ : int32 -> int32
val pred : int32 -> int32
val abs : int32 -> int32
val max_int : int32
* The greatest representable 32 - bit integer , 2{^31 } - 1 .
val min_int : int32
* The smallest representable 32 - bit integer , -2{^31 } .
external logand : int32 -> int32 -> int32 = "%int32_and"
external logor : int32 -> int32 -> int32 = "%int32_or"
external logxor : int32 -> int32 -> int32 = "%int32_xor"
val lognot : int32 -> int32
external shift_left : int32 -> int -> int32 = "%int32_lsl"
* [ Int32.shift_left x y ] shifts [ x ] to the left by [ y ] bits .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
The result is unspecified if [y < 0] or [y >= 32]. *)
external shift_right : int32 -> int -> int32 = "%int32_asr"
* [ Int32.shift_right x y ] shifts [ x ] to the right by [ y ] bits .
This is an arithmetic shift : the sign bit of [ x ] is replicated
and inserted in the vacated bits .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
This is an arithmetic shift: the sign bit of [x] is replicated
and inserted in the vacated bits.
The result is unspecified if [y < 0] or [y >= 32]. *)
external shift_right_logical : int32 -> int -> int32 = "%int32_lsr"
* [ Int32.shift_right_logical x y ] shifts [ x ] to the right by [ y ] bits .
This is a logical shift : zeroes are inserted in the vacated bits
regardless of the sign of [ x ] .
The result is unspecified if [ y < 0 ] or [ y > = 32 ] .
This is a logical shift: zeroes are inserted in the vacated bits
regardless of the sign of [x].
The result is unspecified if [y < 0] or [y >= 32]. *)
external of_int : int -> int32 = "%int32_of_int"
* Convert the given integer ( type [ int ] ) to a 32 - bit integer
( type [ int32 ] ) .
(type [int32]). *)
external to_int : int32 -> int = "%int32_to_int"
* Convert the given 32 - bit integer ( type [ int32 ] ) to an
integer ( type [ int ] ) . On 32 - bit platforms , the 32 - bit integer
is taken modulo 2{^31 } , i.e. the high - order bit is lost
during the conversion . On 64 - bit platforms , the conversion
is exact .
integer (type [int]). On 32-bit platforms, the 32-bit integer
is taken modulo 2{^31}, i.e. the high-order bit is lost
during the conversion. On 64-bit platforms, the conversion
is exact. *)
external of_float : float -> int32
= "caml_int32_of_float" "caml_int32_of_float_unboxed"
[@@unboxed] [@@noalloc]
* Convert the given floating - point number to a 32 - bit integer ,
discarding the fractional part ( truncate towards 0 ) .
The result of the conversion is undefined if , after truncation ,
the number is outside the range \[{!Int32.min_int } , { ! Int32.max_int}\ ] .
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int32.min_int}, {!Int32.max_int}\]. *)
external to_float : int32 -> float
= "caml_int32_to_float" "caml_int32_to_float_unboxed"
[@@unboxed] [@@noalloc]
* Convert the given 32 - bit integer to a floating - point number .
external of_string : string -> int32 = "caml_int32_of_string"
* Convert the given string to a 32 - bit integer .
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
Raise [ Failure " int_of_string " ] if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int32 ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
Raise [Failure "int_of_string"] if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int32]. *)
val to_string : int32 -> string
external bits_of_float : float -> int32
= "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed"
[@@unboxed] [@@noalloc]
* Return the internal representation of the given float according
to the IEEE 754 floating - point ' single format ' bit layout .
Bit 31 of the result represents the sign of the float ;
bits 30 to 23 represent the ( biased ) exponent ; bits 22 to 0
represent the mantissa .
to the IEEE 754 floating-point 'single format' bit layout.
Bit 31 of the result represents the sign of the float;
bits 30 to 23 represent the (biased) exponent; bits 22 to 0
represent the mantissa. *)
external float_of_bits : int32 -> float
= "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed"
[@@unboxed] [@@noalloc]
type t = int32
* An alias for the type of 32 - bit integers .
val compare: t -> t -> int
* The comparison function for 32 - bit integers , with the same specification as
{ ! Pervasives.compare } . Along with the type [ t ] , this function [ compare ]
allows the module [ Int32 ] to be passed as argument to the functors
{ ! Set . Make } and { ! Map . Make } .
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int32] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}. *)
val equal: t -> t -> bool
* The equal function for int32s .
@since 4.03.0
@since 4.03.0 *)
* { 6 Deprecated functions }
external format : string -> int32 -> string = "caml_int32_format"
|
29a77c8b16a3a3e3471b33ef7721a64adcd5004580531c1dbfb4223733797f59 | facebook/duckling | Corpus.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.HE.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Resolve
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Testing.Types hiding (examples)
corpus :: Corpus
corpus = (testContext {locale = makeLocale HE Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "עכשיו"
, "מייד"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "היום"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "אתמול"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "מחר"
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "ראשון"
, "יום ראשון"
, "בראשון הזה"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "יום שני"
, "שני"
, "שני הזה"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
-- "שני השמונה עשרה לפברואר"
[ "שני 18 לפברואר"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
" ה19 "
[ "שלישי"
, "יום שלישי התשעה עשר"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "חמישי"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "שישי"
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "שבת"
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "ראשון"
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
-- "הראשון למרץ"
-- "ה1 למרץ"
[ "1 למרץ"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "במרץ 3"
]
, examples (datetime (2013, 3, 15, 0, 0, 0) Day)
[ "באמצע מרץ"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
" השלישי למרץ 2015 "
[ "3 למרץ 2015"
, "שלושה במרץ 2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
-- "חמש עשרה לחודש"
-- "ב15 לחודש"
-- "ב15 החודש"
[
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "ה15 בפברואר"
, "15 לפברואר"
, "2/15"
, "ב 2/15"
, "פברואר 15"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "אוגוסט 8"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "אוקטובר 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "10/31/1974"
, "10/31/74"
, "10-31-74"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14 לאפריל 2015"
, "אפריל 14, 2015"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "שישי הבא"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "מרץ הבא"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "ראשון, 10 לפברואר"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
-- "שני, השמונה עשרה לפברואר"
-- "יום שני, ה18 לפברואר"
[
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "בשבוע הזה"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "שבוע שעבר"
, "שבוע האחרון"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "שבוע הבא"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "חודש שעבר"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "חודש הבא"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
-- "שנה שעברה"
[
]
, examples (datetime (2014, 0, 0, 0, 0, 0) Year)
[ "שנה הבאה"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "ראשון בשבוע שעבר"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "שלישי האחרון"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "רביעי שבוע הבא"
, "רביעי הבא"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "שישי הבא"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "רביעי הזה"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "שני האחרון של מרץ"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "ראשון האחרון של מרץ 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "השלישי באוקטובר"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
" יום שלישי הראשון של אוקטובר "
[
]
, examples (datetime (2013, 2, 13, 3, 18, 0) Minute)
[ "3:18am"
, "3:18a"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
" @ 3 pm "
[ "ב 3pm"
, "3PM"
, "3pm"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
" באיזור שלוש בצהריים "
[
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
-- "3:15 בצהריים"
-- "בשלוש ורבע בצהריים"
[ "15:15"
, "3:15pm"
, "3:15PM"
, "3:15p"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
-- "3:20 בצהריים"
-- "3:20 צהריים"
" עשרים אחרי שלוש בצהריים "
[ "3:20p"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
-- "בשלוש וחצי בערב"
-- "שלוש וחצי בצהריים"
[ "15:30"
, "3:30pm"
, "3:30PM"
, "330 p.m."
, "3:30 p m"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "רבע ל12"
, "11:45am"
]
, examples (datetime (2013, 9, 20, 19, 30, 0) Minute)
" בשבע וחצי בערב ביום "
[
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "בתשע בבוקר בשבת"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "שישי, יולי 18, 2014 07:00 PM"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "בעוד 2 דקות"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "בעוד 60 דקות"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "בעוד רבע שעה"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "בעוד חצי שעה"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "בעוד 24 שעות"
, "בעוד עשרים וארבע שעות"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "בעוד שבעה ימים"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "לפני שבעה ימים"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
" לפני שלושה "
[
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "הערב"
, "היום בערב"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "בסופ״ש האחרון"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "מחר בערב"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "מחר בצהריים"
, "מחר צהריים"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "אתמול בערב"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "בסופ״ש הזה"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "שני בבוקר"
]
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Time/HE/Corpus.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE OverloadedStrings #
"שני השמונה עשרה לפברואר"
"הראשון למרץ"
"ה1 למרץ"
"חמש עשרה לחודש"
"ב15 לחודש"
"ב15 החודש"
"שני, השמונה עשרה לפברואר"
"יום שני, ה18 לפברואר"
"שנה שעברה"
"3:15 בצהריים"
"בשלוש ורבע בצהריים"
"3:20 בצהריים"
"3:20 צהריים"
"בשלוש וחצי בערב"
"שלוש וחצי בצהריים" | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Time.HE.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Resolve
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Testing.Types hiding (examples)
corpus :: Corpus
corpus = (testContext {locale = makeLocale HE Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "עכשיו"
, "מייד"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "היום"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "אתמול"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "מחר"
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "ראשון"
, "יום ראשון"
, "בראשון הזה"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "יום שני"
, "שני"
, "שני הזה"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "שני 18 לפברואר"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
" ה19 "
[ "שלישי"
, "יום שלישי התשעה עשר"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "חמישי"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "שישי"
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "שבת"
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "ראשון"
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
[ "1 למרץ"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "במרץ 3"
]
, examples (datetime (2013, 3, 15, 0, 0, 0) Day)
[ "באמצע מרץ"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
" השלישי למרץ 2015 "
[ "3 למרץ 2015"
, "שלושה במרץ 2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "ה15 בפברואר"
, "15 לפברואר"
, "2/15"
, "ב 2/15"
, "פברואר 15"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "אוגוסט 8"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "אוקטובר 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "10/31/1974"
, "10/31/74"
, "10-31-74"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14 לאפריל 2015"
, "אפריל 14, 2015"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "שישי הבא"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "מרץ הבא"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "ראשון, 10 לפברואר"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "בשבוע הזה"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "שבוע שעבר"
, "שבוע האחרון"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "שבוע הבא"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "חודש שעבר"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "חודש הבא"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
[
]
, examples (datetime (2014, 0, 0, 0, 0, 0) Year)
[ "שנה הבאה"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "ראשון בשבוע שעבר"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "שלישי האחרון"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "רביעי שבוע הבא"
, "רביעי הבא"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "שישי הבא"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "רביעי הזה"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "שני האחרון של מרץ"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "ראשון האחרון של מרץ 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "השלישי באוקטובר"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
" יום שלישי הראשון של אוקטובר "
[
]
, examples (datetime (2013, 2, 13, 3, 18, 0) Minute)
[ "3:18am"
, "3:18a"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
" @ 3 pm "
[ "ב 3pm"
, "3PM"
, "3pm"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
" באיזור שלוש בצהריים "
[
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "15:15"
, "3:15pm"
, "3:15PM"
, "3:15p"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
" עשרים אחרי שלוש בצהריים "
[ "3:20p"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
[ "15:30"
, "3:30pm"
, "3:30PM"
, "330 p.m."
, "3:30 p m"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "רבע ל12"
, "11:45am"
]
, examples (datetime (2013, 9, 20, 19, 30, 0) Minute)
" בשבע וחצי בערב ביום "
[
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "בתשע בבוקר בשבת"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "שישי, יולי 18, 2014 07:00 PM"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "בעוד 2 דקות"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "בעוד 60 דקות"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "בעוד רבע שעה"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "בעוד חצי שעה"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "בעוד 24 שעות"
, "בעוד עשרים וארבע שעות"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "בעוד שבעה ימים"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "לפני שבעה ימים"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
" לפני שלושה "
[
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "הערב"
, "היום בערב"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "בסופ״ש האחרון"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "מחר בערב"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "מחר בצהריים"
, "מחר צהריים"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "אתמול בערב"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "בסופ״ש הזה"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "שני בבוקר"
]
]
|
e7bcbea1bbaed7f6e54be50b136b4e7a018bd5bd6b3c773af20db514785d72f1 | stchang/macrotypes | lang.rkt | #lang racket/base
# % module - begin is from macrotypes / typecheck - core
(provide (all-from-out macrotypes/typecheck-core
"turnstile.rkt")
(for-syntax (all-from-out racket syntax/parse))
(for-meta 2 (all-from-out racket/base syntax/parse racket/syntax)))
(require (except-in macrotypes/typecheck-core define-syntax-category)
"turnstile.rkt"
(for-syntax (except-in racket extends)
syntax/parse)
(for-meta 2 racket/base syntax/parse racket/syntax))
| null | https://raw.githubusercontent.com/stchang/macrotypes/05ec31f2e1fe0ddd653211e041e06c6c8071ffa6/turnstile-lib/turnstile/lang.rkt | racket | #lang racket/base
# % module - begin is from macrotypes / typecheck - core
(provide (all-from-out macrotypes/typecheck-core
"turnstile.rkt")
(for-syntax (all-from-out racket syntax/parse))
(for-meta 2 (all-from-out racket/base syntax/parse racket/syntax)))
(require (except-in macrotypes/typecheck-core define-syntax-category)
"turnstile.rkt"
(for-syntax (except-in racket extends)
syntax/parse)
(for-meta 2 racket/base syntax/parse racket/syntax))
| |
4cfd1543c34ee9b0f09427815e635f5dc4b0c8e327546a955a27ae3c3de3bf5c | footprintanalytics/footprint-web | honeysql_extensions_test.clj | (ns metabase.util.honeysql-extensions-test
(:require [clojure.test :refer :all]
[honeysql.core :as hsql]
[honeysql.format :as hformat]
[metabase.test :as mt]
[metabase.util.honeysql-extensions :as hx])
(:import metabase.util.honeysql_extensions.Identifier))
(deftest ^:parallel format-test
(testing "Basic format test not including a specific quoting option"
(is (= ["setting"]
(hformat/format :setting))))
(testing "`:h2` quoting will uppercase and quote the identifier"
(is (= ["\"SETTING\""]
(hformat/format :setting :quoting :h2)))))
(deftest ^:parallel literal-test
(testing "`literal` should be compiled to a single-quoted literal"
(is (= ["WHERE name = 'Cam'"]
(hsql/format {:where [:= :name (hx/literal "Cam")]}))))
(testing (str "`literal` should properly escape single-quotes inside the literal string double-single-quotes is how "
"to escape them in SQL")
(is (= ["WHERE name = 'Cam''s'"]
(hsql/format {:where [:= :name (hx/literal "Cam's")]}))))
(testing "`literal` should only escape single quotes that aren't already escaped -- with two single quotes..."
(is (= ["WHERE name = 'Cam''s'"]
(hsql/format {:where [:= :name (hx/literal "Cam''s")]}))))
(testing "...or with a slash"
(is (= ["WHERE name = 'Cam\\'s'"]
(hsql/format {:where [:= :name (hx/literal "Cam\\'s")]}))))
(testing "`literal` should escape strings that start with a single quote"
(is (= ["WHERE name = '''s'"]
(hsql/format {:where [:= :name (hx/literal "'s")]}))))
(testing "`literal` should handle namespaced keywords correctly"
(is (= ["WHERE name = 'ab/c'"]
(hsql/format {:where [:= :name (hx/literal :ab/c)]}))))
(testing "make sure `identifier` properly handles components with dots and both strings & keywords"
(is (= ["`A`.`B`.`C.D`.`E.F`"]
(hsql/format (hx/identifier :field "A" :B "C.D" :E.F)
:quoting :mysql))))
(testing "`identifer` should handle slashes"
(is (= ["`A/B`.`C\\D`.`E/F`"]
(hsql/format (hx/identifier :field "A/B" "C\\D" :E/F)
:quoting :mysql))))
(testing "`identifier` should also handle strings with quotes in them (ANSI)"
two double - quotes to escape , e.g. " A""B "
(is (= ["\"A\"\"B\""]
(hsql/format (hx/identifier :field "A\"B")
:quoting :ansi))))
(testing "`identifier` should also handle strings with quotes in them (MySQL)"
;; double-backticks to escape backticks seems to be the way to do it
(is (= ["`A``B`"]
(hsql/format (hx/identifier :field "A`B")
:quoting :mysql))))
(testing "`identifier` shouldn't try to change `lisp-case` to `snake-case` or vice-versa"
(is (= ["A-B.c-d.D_E.f_g"]
(hsql/format (hx/identifier :field "A-B" :c-d "D_E" :f_g))))
(is (= ["\"A-B\".\"c-d\".\"D_E\".\"f_g\""]
(hsql/format (hx/identifier :field "A-B" :c-d "D_E" :f_g)
:quoting :ansi))))
(testing "`identifier` should ignore `nil` or empty components."
(is (= ["A.B.C"]
(hsql/format (hx/identifier :field "A" "B" nil "C")))))
(testing "`identifier` should handle nested identifiers"
(is (= (hx/identifier :field "A" "B" "C" "D")
(hx/identifier :field "A" (hx/identifier :field "B" "C") "D")))
(is (= ["A.B.C.D"]
(hsql/format (hx/identifier :field "A" (hx/identifier :field "B" "C") "D")))))
(testing "the `identifier` function should unnest identifiers for you so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["A" "B" "C" "D"])
(hx/identifier :field "A" (hx/identifier :field "B" "C") "D"))))
(testing "the `identifier` function should remove nils so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["table" "field"])
(hx/identifier :field nil "table" "field"))))
(testing "the `identifier` function should convert everything to strings so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["keyword" "qualified/keyword"])
(hx/identifier :field :keyword :qualified/keyword)))))
(deftest h2-quoting-test
(testing (str "We provide our own quoting function for `:h2` databases. We quote and uppercase the identifier. Using "
"Java's toUpperCase method is surprisingly locale dependent. When uppercasing a string in a language "
"like Turkish, it can turn an i into an İ. This test converts a keyword with an `i` in it to verify "
"that we convert the identifier correctly using the english locale even when the user has changed the "
"locale to Turkish")
(mt/with-locale "tr"
(is (= ["\"SETTING\""]
(hformat/format :setting :quoting :h2))))))
(deftest ^:parallel ratios-test
(testing (str "test ToSql behavior for Ratios (#9246). Should convert to a double rather than leaving it as a "
"division operation. The double itself should get converted to a numeric literal")
(is (= ["SELECT 0.1 AS one_tenth"]
(hsql/format {:select [[(/ 1 10) :one_tenth]]})))))
(defn- ->sql [expr]
(hsql/format {:select [expr]}))
(deftest ^:parallel maybe-cast-test
(testing "maybe-cast should only cast things that need to be cast"
(letfn [(maybe-cast [expr]
(->sql (hx/maybe-cast "text" expr)))]
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast :field)))
(testing "cast should return a typed form"
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast (hx/cast "text" :field)))))
(testing "should not cast something that's already typed"
(let [typed-expr (hx/with-type-info :field {::hx/database-type "text"})]
(is (= ["SELECT field"]
(maybe-cast typed-expr)))
(testing "should work with different string/keyword and case combos"
(is (= typed-expr
(hx/maybe-cast :text typed-expr)
(hx/maybe-cast "TEXT" typed-expr)
(hx/maybe-cast :TEXT typed-expr)))))
(testing "multiple calls to maybe-cast should only cast at most once"
(is (= (hx/maybe-cast "text" :field)
(hx/maybe-cast "text" (hx/maybe-cast "text" :field))))
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast (hx/maybe-cast "text" :field)))))))))
(deftest ^:parallel cast-unless-type-in-test
(letfn [(cast-unless-type-in [expr]
(first (->sql (hx/cast-unless-type-in "timestamp" #{"timestamp" "timestamptz"} expr))))]
(is (= "SELECT field"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "timestamp"}))))
(is (= "SELECT field"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "timestamptz"}))))
(is (= "SELECT CAST(field AS timestamp)"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "date"}))))))
(def ^:private typed-form (hx/with-type-info :field {::hx/database-type "text"}))
(deftest ^:parallel TypedHoneySQLForm-test
(testing "should generate readable output"
(is (= (pr-str `(hx/with-type-info :field {::hx/database-type "text"}))
(pr-str typed-form)))))
(deftest ^:parallel type-info-test
(testing "should let you get info"
(is (= {::hx/database-type "text"}
(hx/type-info typed-form)))
(is (= nil
(hx/type-info :field)
(hx/type-info nil)))))
(deftest ^:parallel with-type-info-test
(testing "should let you update info"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-type-info typed-form {::hx/database-type "date"})))
(testing "should normalize :database-type"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-type-info typed-form {::hx/database-type "date"}))))))
(deftest ^:parallel with-database-type-info-test
(testing "should be the same as calling `with-type-info` with `::hx/database-type`"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-database-type-info :field "date"))))
(testing "Passing `nil` should"
(testing "return untyped clause as-is"
(is (= :field
(hx/with-database-type-info :field nil))))
(testing "unwrap a typed clause"
(is (= :field
(hx/with-database-type-info (hx/with-database-type-info :field "date") nil))))))
(deftest ^:parallel is-of-type?-test
(are [expr tyype expected] (= expected (hx/is-of-type? expr tyype))
typed-form "text" true
typed-form "TEXT" true
typed-form :text true
typed-form :TEXT true
typed-form :te/xt false
typed-form "date" false
typed-form nil false
nil "date" false
:%current_date "date" false
;; I guess this behavior makes sense? I guess untyped = "is of type nil"
nil nil true
:%current_date nil true))
(deftest ^:parallel unwrap-typed-honeysql-form-test
(testing "should be able to unwrap"
(is (= :field
(hx/unwrap-typed-honeysql-form typed-form)
(hx/unwrap-typed-honeysql-form :field)))
(is (= nil
(hx/unwrap-typed-honeysql-form nil)))))
(deftest ^:parallel math-operators-propagate-type-info-test
(testing "Math operators like `+` should propagate the type info of their args\n"
just pass along type info of the first arg with type info .
(doseq [f [#'hx/+ #'hx/- #'hx/* #'hx// #'hx/mod]
x [(hx/with-database-type-info 1 "int") 1]
y [(hx/with-database-type-info 2 "INT") 2]]
(testing (str (pr-str (list f x y)) \newline)
(let [expr (f x y)]
(testing (pr-str expr)
(is (= (if (some hx/type-info [x y])
"int"
nil)
(hx/type-info->db-type (hx/type-info expr))))))))))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/test/metabase/util/honeysql_extensions_test.clj | clojure | double-backticks to escape backticks seems to be the way to do it
I guess this behavior makes sense? I guess untyped = "is of type nil" | (ns metabase.util.honeysql-extensions-test
(:require [clojure.test :refer :all]
[honeysql.core :as hsql]
[honeysql.format :as hformat]
[metabase.test :as mt]
[metabase.util.honeysql-extensions :as hx])
(:import metabase.util.honeysql_extensions.Identifier))
(deftest ^:parallel format-test
(testing "Basic format test not including a specific quoting option"
(is (= ["setting"]
(hformat/format :setting))))
(testing "`:h2` quoting will uppercase and quote the identifier"
(is (= ["\"SETTING\""]
(hformat/format :setting :quoting :h2)))))
(deftest ^:parallel literal-test
(testing "`literal` should be compiled to a single-quoted literal"
(is (= ["WHERE name = 'Cam'"]
(hsql/format {:where [:= :name (hx/literal "Cam")]}))))
(testing (str "`literal` should properly escape single-quotes inside the literal string double-single-quotes is how "
"to escape them in SQL")
(is (= ["WHERE name = 'Cam''s'"]
(hsql/format {:where [:= :name (hx/literal "Cam's")]}))))
(testing "`literal` should only escape single quotes that aren't already escaped -- with two single quotes..."
(is (= ["WHERE name = 'Cam''s'"]
(hsql/format {:where [:= :name (hx/literal "Cam''s")]}))))
(testing "...or with a slash"
(is (= ["WHERE name = 'Cam\\'s'"]
(hsql/format {:where [:= :name (hx/literal "Cam\\'s")]}))))
(testing "`literal` should escape strings that start with a single quote"
(is (= ["WHERE name = '''s'"]
(hsql/format {:where [:= :name (hx/literal "'s")]}))))
(testing "`literal` should handle namespaced keywords correctly"
(is (= ["WHERE name = 'ab/c'"]
(hsql/format {:where [:= :name (hx/literal :ab/c)]}))))
(testing "make sure `identifier` properly handles components with dots and both strings & keywords"
(is (= ["`A`.`B`.`C.D`.`E.F`"]
(hsql/format (hx/identifier :field "A" :B "C.D" :E.F)
:quoting :mysql))))
(testing "`identifer` should handle slashes"
(is (= ["`A/B`.`C\\D`.`E/F`"]
(hsql/format (hx/identifier :field "A/B" "C\\D" :E/F)
:quoting :mysql))))
(testing "`identifier` should also handle strings with quotes in them (ANSI)"
two double - quotes to escape , e.g. " A""B "
(is (= ["\"A\"\"B\""]
(hsql/format (hx/identifier :field "A\"B")
:quoting :ansi))))
(testing "`identifier` should also handle strings with quotes in them (MySQL)"
(is (= ["`A``B`"]
(hsql/format (hx/identifier :field "A`B")
:quoting :mysql))))
(testing "`identifier` shouldn't try to change `lisp-case` to `snake-case` or vice-versa"
(is (= ["A-B.c-d.D_E.f_g"]
(hsql/format (hx/identifier :field "A-B" :c-d "D_E" :f_g))))
(is (= ["\"A-B\".\"c-d\".\"D_E\".\"f_g\""]
(hsql/format (hx/identifier :field "A-B" :c-d "D_E" :f_g)
:quoting :ansi))))
(testing "`identifier` should ignore `nil` or empty components."
(is (= ["A.B.C"]
(hsql/format (hx/identifier :field "A" "B" nil "C")))))
(testing "`identifier` should handle nested identifiers"
(is (= (hx/identifier :field "A" "B" "C" "D")
(hx/identifier :field "A" (hx/identifier :field "B" "C") "D")))
(is (= ["A.B.C.D"]
(hsql/format (hx/identifier :field "A" (hx/identifier :field "B" "C") "D")))))
(testing "the `identifier` function should unnest identifiers for you so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["A" "B" "C" "D"])
(hx/identifier :field "A" (hx/identifier :field "B" "C") "D"))))
(testing "the `identifier` function should remove nils so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["table" "field"])
(hx/identifier :field nil "table" "field"))))
(testing "the `identifier` function should convert everything to strings so drivers that manipulate `:components` don't need to worry about that"
(is (= (Identifier. :field ["keyword" "qualified/keyword"])
(hx/identifier :field :keyword :qualified/keyword)))))
(deftest h2-quoting-test
(testing (str "We provide our own quoting function for `:h2` databases. We quote and uppercase the identifier. Using "
"Java's toUpperCase method is surprisingly locale dependent. When uppercasing a string in a language "
"like Turkish, it can turn an i into an İ. This test converts a keyword with an `i` in it to verify "
"that we convert the identifier correctly using the english locale even when the user has changed the "
"locale to Turkish")
(mt/with-locale "tr"
(is (= ["\"SETTING\""]
(hformat/format :setting :quoting :h2))))))
(deftest ^:parallel ratios-test
(testing (str "test ToSql behavior for Ratios (#9246). Should convert to a double rather than leaving it as a "
"division operation. The double itself should get converted to a numeric literal")
(is (= ["SELECT 0.1 AS one_tenth"]
(hsql/format {:select [[(/ 1 10) :one_tenth]]})))))
(defn- ->sql [expr]
(hsql/format {:select [expr]}))
(deftest ^:parallel maybe-cast-test
(testing "maybe-cast should only cast things that need to be cast"
(letfn [(maybe-cast [expr]
(->sql (hx/maybe-cast "text" expr)))]
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast :field)))
(testing "cast should return a typed form"
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast (hx/cast "text" :field)))))
(testing "should not cast something that's already typed"
(let [typed-expr (hx/with-type-info :field {::hx/database-type "text"})]
(is (= ["SELECT field"]
(maybe-cast typed-expr)))
(testing "should work with different string/keyword and case combos"
(is (= typed-expr
(hx/maybe-cast :text typed-expr)
(hx/maybe-cast "TEXT" typed-expr)
(hx/maybe-cast :TEXT typed-expr)))))
(testing "multiple calls to maybe-cast should only cast at most once"
(is (= (hx/maybe-cast "text" :field)
(hx/maybe-cast "text" (hx/maybe-cast "text" :field))))
(is (= ["SELECT CAST(field AS text)"]
(maybe-cast (hx/maybe-cast "text" :field)))))))))
(deftest ^:parallel cast-unless-type-in-test
(letfn [(cast-unless-type-in [expr]
(first (->sql (hx/cast-unless-type-in "timestamp" #{"timestamp" "timestamptz"} expr))))]
(is (= "SELECT field"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "timestamp"}))))
(is (= "SELECT field"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "timestamptz"}))))
(is (= "SELECT CAST(field AS timestamp)"
(cast-unless-type-in (hx/with-type-info :field {::hx/database-type "date"}))))))
(def ^:private typed-form (hx/with-type-info :field {::hx/database-type "text"}))
(deftest ^:parallel TypedHoneySQLForm-test
(testing "should generate readable output"
(is (= (pr-str `(hx/with-type-info :field {::hx/database-type "text"}))
(pr-str typed-form)))))
(deftest ^:parallel type-info-test
(testing "should let you get info"
(is (= {::hx/database-type "text"}
(hx/type-info typed-form)))
(is (= nil
(hx/type-info :field)
(hx/type-info nil)))))
(deftest ^:parallel with-type-info-test
(testing "should let you update info"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-type-info typed-form {::hx/database-type "date"})))
(testing "should normalize :database-type"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-type-info typed-form {::hx/database-type "date"}))))))
(deftest ^:parallel with-database-type-info-test
(testing "should be the same as calling `with-type-info` with `::hx/database-type`"
(is (= (hx/with-type-info :field {::hx/database-type "date"})
(hx/with-database-type-info :field "date"))))
(testing "Passing `nil` should"
(testing "return untyped clause as-is"
(is (= :field
(hx/with-database-type-info :field nil))))
(testing "unwrap a typed clause"
(is (= :field
(hx/with-database-type-info (hx/with-database-type-info :field "date") nil))))))
(deftest ^:parallel is-of-type?-test
(are [expr tyype expected] (= expected (hx/is-of-type? expr tyype))
typed-form "text" true
typed-form "TEXT" true
typed-form :text true
typed-form :TEXT true
typed-form :te/xt false
typed-form "date" false
typed-form nil false
nil "date" false
:%current_date "date" false
nil nil true
:%current_date nil true))
(deftest ^:parallel unwrap-typed-honeysql-form-test
(testing "should be able to unwrap"
(is (= :field
(hx/unwrap-typed-honeysql-form typed-form)
(hx/unwrap-typed-honeysql-form :field)))
(is (= nil
(hx/unwrap-typed-honeysql-form nil)))))
(deftest ^:parallel math-operators-propagate-type-info-test
(testing "Math operators like `+` should propagate the type info of their args\n"
just pass along type info of the first arg with type info .
(doseq [f [#'hx/+ #'hx/- #'hx/* #'hx// #'hx/mod]
x [(hx/with-database-type-info 1 "int") 1]
y [(hx/with-database-type-info 2 "INT") 2]]
(testing (str (pr-str (list f x y)) \newline)
(let [expr (f x y)]
(testing (pr-str expr)
(is (= (if (some hx/type-info [x y])
"int"
nil)
(hx/type-info->db-type (hx/type-info expr))))))))))
|
444a90864e5c54110daa6453f83af3c58f24a1d72bbd1027667c3d6b3559d680 | rwmjones/guestfs-tools | utils.mli | virt - sparsify
* Copyright ( C ) 2011 - 2023 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2011-2023 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
(** Utilities/common functions used in virt-sparsify only. *)
val is_read_only_lv : Guestfs.guestfs -> string -> bool
(* Return true if the filesystem is a read-only LV (RHBZ#1185561). *)
| null | https://raw.githubusercontent.com/rwmjones/guestfs-tools/57423d907270526ea664ff15601cce956353820e/sparsify/utils.mli | ocaml | * Utilities/common functions used in virt-sparsify only.
Return true if the filesystem is a read-only LV (RHBZ#1185561). | virt - sparsify
* Copyright ( C ) 2011 - 2023 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2011-2023 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
val is_read_only_lv : Guestfs.guestfs -> string -> bool
|
9f8404069026a93f2763291ae8f820b4f587ee3167699b3d7ac32e81dd9c68b6 | dhleong/spade | defaults.clj | (ns spade.runtime.defaults
(:require [spade.container.atom :refer [->AtomStyleContainer]]))
(defonce shared-styles-atom (atom nil))
(defn create-container []
(->AtomStyleContainer shared-styles-atom))
| null | https://raw.githubusercontent.com/dhleong/spade/d77c2adcf451aa9c0b55bd0a835d53f95c7becf4/src/spade/runtime/defaults.clj | clojure | (ns spade.runtime.defaults
(:require [spade.container.atom :refer [->AtomStyleContainer]]))
(defonce shared-styles-atom (atom nil))
(defn create-container []
(->AtomStyleContainer shared-styles-atom))
| |
e097a27d18e485140a70839e72be808a98d528e1d6361492838fa16e2bbb6960 | kelamg/HtDP2e-workthrough | ex494.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex494) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Q - Does the insertion sort > function from Auxiliary Functions that Recur need
;; an accumulator? If so, why? If not, why not?
;; A - Yes it does. It is very similar to invert in that it also uses an auxiliary
;; function (insert in this case) which recurses on the entire list as well.
| null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Accumulators/ex494.rkt | racket | about the language level of this file in a form that our tools can easily process.
an accumulator? If so, why? If not, why not?
A - Yes it does. It is very similar to invert in that it also uses an auxiliary
function (insert in this case) which recurses on the entire list as well. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex494) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Q - Does the insertion sort > function from Auxiliary Functions that Recur need
|
b5b744a55e0c2c503b64ab9c8c073158fbb46e1cdcc72d9fd91dbdbc074ee28b | nowl/detome | world-macros.lisp | (in-package #:detome)
(defmacro with-gensyms (syms &body body)
`(let ,(loop for s in syms collect `(,s (gensym)))
,@body))
(defmacro set-predefined-level (map)
(let ((h (length map))
(w (length (car map))))
(with-gensyms (i j)
`(progn
(setf *level-type* :predefined
*level-width* ,w
*level-height* ,h
*level* (make-array '(,h ,w)))
(loop for ,i below ,h do
(loop for ,j below ,w do
(setf (aref *level* ,i ,j)
(list (nth ,j (nth ,i ',map))))))
(clear-intensity-map)
(clear-explored-map)))))
(defmacro set-perlin-level (func)
`(progn
(setf *level-type* :perlin
*level-width* nil
*level-height* nil
*level* ,func)
(clear-intensity-map)
(clear-explored-map)))
(defmacro place-monster (name x y)
(with-gensyms (mt)
`(let ((,mt (lookup-monster-type ,name)))
(push (make-instance 'monster
:x ,x :y ,y
:name (symbol-name (gensym (name ,mt)))
:image-name (image-name ,mt)
:mon-type ,mt
:level (level ,mt)
:hp (funcall (hp-gen ,mt))
:att-r (funcall (att-r-gen ,mt))
:dmg-r (funcall (dmg-r-gen ,mt))
:def-r (funcall (def-r-gen ,mt))
:update-cb (ai-cb ,mt)
:update-cb-control '(:turns 0))
*monsters-in-level*))))
(defmacro place-item (name location)
(with-gensyms (it)
`(let ((,it (get-item-type ,name)))
(push (make-instance 'item
:location ,location
:item-type ,it)
*items-in-level*))))
(defmacro place-random-monster (min-level max-level x y)
(with-gensyms (mon)
`(let ((,mon (get-random-monster ,x ,y ,min-level ,max-level)))
(push ,mon *monsters-in-level*))))
(defmacro make-scenery (image x y)
(with-gensyms (obj)
`(let ((,obj (make-instance 'scenery :name (symbol-name (gensym ,image)))))
(setf (aref *level* ,y ,x)
(append (aref *level* ,y ,x)
(list (map-cell-number (gethash ,image *map-cells-by-name*)))))
(set-meta :image ,image ,obj)
(set-meta :x ,x ,obj)
(set-meta :y ,y ,obj)
(push ,obj *scenery-in-level*))))
(defmacro place-player (x y)
`(progn
(setf (x *player*) ,x
(y *player*) ,y)
(update-intensity-map ,x ,y 1.0)
(move-map-window-if-needed)))
| null | https://raw.githubusercontent.com/nowl/detome/108ed05427a34a7d42de8ecac384aca63a8ab9e0/world-macros.lisp | lisp | (in-package #:detome)
(defmacro with-gensyms (syms &body body)
`(let ,(loop for s in syms collect `(,s (gensym)))
,@body))
(defmacro set-predefined-level (map)
(let ((h (length map))
(w (length (car map))))
(with-gensyms (i j)
`(progn
(setf *level-type* :predefined
*level-width* ,w
*level-height* ,h
*level* (make-array '(,h ,w)))
(loop for ,i below ,h do
(loop for ,j below ,w do
(setf (aref *level* ,i ,j)
(list (nth ,j (nth ,i ',map))))))
(clear-intensity-map)
(clear-explored-map)))))
(defmacro set-perlin-level (func)
`(progn
(setf *level-type* :perlin
*level-width* nil
*level-height* nil
*level* ,func)
(clear-intensity-map)
(clear-explored-map)))
(defmacro place-monster (name x y)
(with-gensyms (mt)
`(let ((,mt (lookup-monster-type ,name)))
(push (make-instance 'monster
:x ,x :y ,y
:name (symbol-name (gensym (name ,mt)))
:image-name (image-name ,mt)
:mon-type ,mt
:level (level ,mt)
:hp (funcall (hp-gen ,mt))
:att-r (funcall (att-r-gen ,mt))
:dmg-r (funcall (dmg-r-gen ,mt))
:def-r (funcall (def-r-gen ,mt))
:update-cb (ai-cb ,mt)
:update-cb-control '(:turns 0))
*monsters-in-level*))))
(defmacro place-item (name location)
(with-gensyms (it)
`(let ((,it (get-item-type ,name)))
(push (make-instance 'item
:location ,location
:item-type ,it)
*items-in-level*))))
(defmacro place-random-monster (min-level max-level x y)
(with-gensyms (mon)
`(let ((,mon (get-random-monster ,x ,y ,min-level ,max-level)))
(push ,mon *monsters-in-level*))))
(defmacro make-scenery (image x y)
(with-gensyms (obj)
`(let ((,obj (make-instance 'scenery :name (symbol-name (gensym ,image)))))
(setf (aref *level* ,y ,x)
(append (aref *level* ,y ,x)
(list (map-cell-number (gethash ,image *map-cells-by-name*)))))
(set-meta :image ,image ,obj)
(set-meta :x ,x ,obj)
(set-meta :y ,y ,obj)
(push ,obj *scenery-in-level*))))
(defmacro place-player (x y)
`(progn
(setf (x *player*) ,x
(y *player*) ,y)
(update-intensity-map ,x ,y 1.0)
(move-map-window-if-needed)))
| |
dd60a802d3dfa9bb0bf4df1ed7cef5278796f240f7d0f15cff479bdf2969556b | Metaxal/bazaar | keyword.rkt | #lang racket/base
(require define2
racket/dict
racket/list)
(provide keyword-apply/dict
keyword-apply/simple
list->pos+kw)
(module+ test
(require rackunit))
;; proc : procedure? ; procedure to apply
;; kw-dict : dict? ; dictionary of keywords and values
;; A key can be either a keyword or a symbol that is turned into a keyword.
;; largs : list? ; positional arguments
;; Returns the result of the application of proc to the positional arguments
;; and to the keyword arguments.
(define (keyword-apply/dict proc kw-dict largs)
(define alist
(sort
(for/list ([(k v) (in-dict kw-dict)])
(cons
(cond [(keyword? k) k]
[(symbol? k) (string->keyword (symbol->string k))]
[else (error "Not a keyword or symbol:" k)])
v))
keyword<? #:key car))
(keyword-apply proc (map car alist) (map cdr alist) largs))
;; Turns a flat list of arguments to a list of positional arguments and a dictionary of keyword/value
(define (list->pos+kw l)
(let loop ([l l] [pos-args '()] [kw-dict '()])
(if (empty? l)
(values (reverse pos-args) kw-dict)
(let ([x (first l)]
[l (rest l)])
(if (keyword? x)
(if (empty? l)
(error "Keyword must be followed by a value")
(let ([y (first l)]
[l (rest l)])
(loop l pos-args (cons (cons x y) kw-dict))))
(loop l (cons x pos-args) kw-dict))))))
(define (keyword-apply/simple proc args)
(let-values ([(pos kw) (list->pos+kw args)])
(keyword-apply/dict proc kw pos)))
(module+ test
(define (f a b #:c c #:d [d 10])
(list a b c d))
(check-equal?
(keyword-apply/dict f '((#:d . 4) (c . 3)) '(1 2))
'(1 2 3 4))
(check-equal?
(keyword-apply/dict f (hash '#:d 4 'c 3) '(1 2))
'(1 2 3 4))
(let ([kw-dict '((d . 4))]
[largs '(2)])
(check-equal?
(keyword-apply/dict f
(list* '(c . 3) kw-dict)
(list* 1 largs))
'(1 2 3 4)))
(check-equal?
(keyword-apply/simple f '(1 #:d 4 #:c 3 2))
'(1 2 3 4))
)
;; TODO: Pass-through function that takes a function f, a set of arguments and keyword arguments
;; and returns a specialized version of f for these arguments
| null | https://raw.githubusercontent.com/Metaxal/bazaar/2968178aa62288c8ff41018a42dd306afb458046/keyword.rkt | racket | proc : procedure? ; procedure to apply
kw-dict : dict? ; dictionary of keywords and values
A key can be either a keyword or a symbol that is turned into a keyword.
largs : list? ; positional arguments
Returns the result of the application of proc to the positional arguments
and to the keyword arguments.
Turns a flat list of arguments to a list of positional arguments and a dictionary of keyword/value
TODO: Pass-through function that takes a function f, a set of arguments and keyword arguments
and returns a specialized version of f for these arguments | #lang racket/base
(require define2
racket/dict
racket/list)
(provide keyword-apply/dict
keyword-apply/simple
list->pos+kw)
(module+ test
(require rackunit))
(define (keyword-apply/dict proc kw-dict largs)
(define alist
(sort
(for/list ([(k v) (in-dict kw-dict)])
(cons
(cond [(keyword? k) k]
[(symbol? k) (string->keyword (symbol->string k))]
[else (error "Not a keyword or symbol:" k)])
v))
keyword<? #:key car))
(keyword-apply proc (map car alist) (map cdr alist) largs))
(define (list->pos+kw l)
(let loop ([l l] [pos-args '()] [kw-dict '()])
(if (empty? l)
(values (reverse pos-args) kw-dict)
(let ([x (first l)]
[l (rest l)])
(if (keyword? x)
(if (empty? l)
(error "Keyword must be followed by a value")
(let ([y (first l)]
[l (rest l)])
(loop l pos-args (cons (cons x y) kw-dict))))
(loop l (cons x pos-args) kw-dict))))))
(define (keyword-apply/simple proc args)
(let-values ([(pos kw) (list->pos+kw args)])
(keyword-apply/dict proc kw pos)))
(module+ test
(define (f a b #:c c #:d [d 10])
(list a b c d))
(check-equal?
(keyword-apply/dict f '((#:d . 4) (c . 3)) '(1 2))
'(1 2 3 4))
(check-equal?
(keyword-apply/dict f (hash '#:d 4 'c 3) '(1 2))
'(1 2 3 4))
(let ([kw-dict '((d . 4))]
[largs '(2)])
(check-equal?
(keyword-apply/dict f
(list* '(c . 3) kw-dict)
(list* 1 largs))
'(1 2 3 4)))
(check-equal?
(keyword-apply/simple f '(1 #:d 4 #:c 3 2))
'(1 2 3 4))
)
|
ab288be2aa80b9e68c20b150ee68b765b5b47a87f16086db3bec47c9c309bbc5 | mumuki/mulang | GenericSpec.hs | {-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module GenericSpec (spec) where
import Test.Hspec
import Language.Mulang.Ast
import Language.Mulang.Ast.Operator
import Language.Mulang.Identifier
import Language.Mulang.Inspector.Combiner
import Language.Mulang.Inspector.Contextualized
import Language.Mulang.Inspector.Generic
import Language.Mulang.Inspector.Literal
import Language.Mulang.Inspector.Matcher
import Language.Mulang.Inspector.Smell
import Language.Mulang.Normalizers.Haskell (haskellNormalizationOptions)
import Language.Mulang.Parsers.Haskell
import Language.Mulang.Parsers.Java (java)
import Language.Mulang.Parsers.JavaScript
import Language.Mulang.Parsers.Python (py2, py3)
import Language.Mulang.Transform.Normalizer
nhs = normalize haskellNormalizationOptions . hs
spec :: Spec
spec = do
describe "declaresEntryPoint" $ do
describe "with program declarations" $ do
it "is True when program is declared" $ do
let code = EntryPoint "main" None
declaresEntryPoint anyone code `shouldBe` True
it "is False when program is not declared" $ do
let code = js "function(){}"
declaresEntryPoint anyone code `shouldBe` False
describe "declaresVariable" $ do
it "is True when declare a variable" $ do
let code = js "function f(){ let x = 2}"
declaresVariable (named "x") code `shouldBe` True
it "is False when variable is not declared" $ do
let code = js "function f(){ let x = 2}"
declaresVariable (named "y") code `shouldBe` False
describe "assigns" $ do
it "is True when initializes a variable" $ do
assigns (named "x") (Variable "x" (MuTrue)) `shouldBe` True
it "is True when declares an attribute" $ do
assigns (named "x") (MuObject (Attribute "x" (MuTrue))) `shouldBe` True
it "is True when assigns a variable" $ do
assigns (named "x") (Assignment "x" (MuTrue)) `shouldBe` True
it "is False otherwise" $ do
assigns (named "x") (Assignment "y" (MuTrue)) `shouldBe` False
assigns (named "x") (Other Nothing Nothing) `shouldBe` False
assigns (named "x") (MuFalse) `shouldBe` False
describe "declaresFunction" $ do
describe "with function declarations, hs" $ do
it "is True when functions is declared" $ do
declaresFunction (named "f") (hs "f x = 1") `shouldBe` True
describe "with constants, hs" $ do
it "is False when constant is declared with a non lambda literal" $ do
declaresFunction (named "f") (hs "f = 2") `shouldBe` False
it "is True when constant is declared with a lambda literal" $ do
declaresFunction (named "f") (nhs "f = \\x -> x + 1") `shouldBe` True
it "is False when constant is declared with a number literal" $ do
declaresFunction (named "f") (hs "f = 3") `shouldBe` False
it "is False when constant is declared with a list literal" $ do
declaresFunction (named "f") (hs "f = []") `shouldBe` False
it "is False when constant is declared with a variable literal" $ do
declaresFunction (named "f") (hs "f = snd") `shouldBe` False
describe "with function declarations, js" $ do
it "is True when functions is declared" $ do
declaresFunction (named "f") (js "function f(x) {return 1}") `shouldBe` True
it "is True when functions is declared" $ do
declaresFunction (named "f") (js "function f(x) {return 1}") `shouldBe` True
it "is True when any functions is declared" $ do
declaresFunction anyone (js "function f(x) {return 1}") `shouldBe` True
it "is False when functions is not declared" $ do
declaresFunction (named "g") (js "function f(x) {return 1}") `shouldBe` False
describe "with variables, js" $ do
it "is False when constant is declared with a non lambda literal" $ do
declaresFunction (named "f") (js "let f = 2") `shouldBe` False
it "is True when constant is declared with a lambda literal" $ do
declaresFunction (named "f") (js "let f = function(x) {}") `shouldBe` True
it "is False when constant is declared with a number literal" $ do
declaresFunction (named "f") (js "let f = 3") `shouldBe` False
it "is False when constant is declared with a list literal" $ do
declaresFunction (named "f") (js "let f = []") `shouldBe` False
it "is False when is a method" $ do
declaresFunction (named "f") (js "let o = {f: function(){}}") `shouldBe` False
describe "with matcher" $ do
it "is True when using a matcher that matches" $ do
(declaresFunctionMatching (with isSelf) anyone) (js "function f(x) { this; return 0; }") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(declaresFunctionMatching (with isSelf) anyone) (js "function f(x) { return 0; }") `shouldBe` False
it "is True when using a non literal matcher that matches" $ do
(declaresFunctionMatching (with (returnsMatching (with (isNumber 2)))) anyone) (js "function f() { return 2; }") `shouldBe` True
it "is False when using a non literal matcher that doesn't match" $ do
(declaresFunctionMatching (with (returnsMatching (with (isNumber 2)))) anyone) (js "function f() { return 3; }") `shouldBe` False
it "is False when using a literal matcher and it does not match literally" $ do
(declaresFunctionMatching (with . isNumber $ 2) anyone) (js "function f() { return 2; }") `shouldBe` False
describe "declaresComputationWithArity" $ do
describe "with function declarations, hs" $ do
it "is True when function is declared with the given arity" $ do
(declaresComputationWithArity 1) (named "f") (hs "f x = x + 1") `shouldBe` True
it "is False when function is declared with another arity" $ do
(declaresComputationWithArity 2) (named "f") (hs "f x = x + 1") `shouldBe` False
describe "with constant declaration, hs" $ do
it "is True when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 2) (named "f") (nhs "f = \\x y -> x + y") `shouldBe` True
it "is False when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 3) (named "f") (nhs "f = \\x y -> x + y") `shouldBe` False
it "is False if it is a variable" $ do
(declaresComputationWithArity 1) (named "f") (hs "f = snd") `shouldBe` False
describe "with function declarations, js" $ do
it "is True when function is declared with the given arity" $ do
(declaresComputationWithArity 1) (named "f") (js "function f(x) { return x + 1 }") `shouldBe` True
it "is False when function is declared with another arity" $ do
(declaresComputationWithArity 2) (named "f") (js "function f(x) { x + 1}") `shouldBe` False
describe "with constant declaration, js" $ do
it "is True when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 2) (named "f") (js "let f = function(x, y) { return x + y }") `shouldBe` True
describe "isLongCode" $ do
it "is False when the program has less than 16 nodes" $ do
isLongCode (js "function f() { while(true) { console.log('foo') } }") `shouldBe` False
it "is True when the program contains 16 or more nodes" $ do
isLongCode (js "function f(){Poner(Verde); Mover(Norte); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); }") `shouldBe` True
describe "uses" $ do
it "is True on direct usage in entry point" $ do
uses (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` True
it "is False if there is no usage" $ do
uses (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
describe "delegates'" $ do
it "is True when used with a scope" $ do
decontextualize (contextualized (scoped "main") (delegates' anyone)) (
Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
describe "delegates" $ do
context "when subroutine is declared" $ do
it "is False when used with a scope" $ do
scoped "main" (delegates anyone) (
Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
it "is True on function application in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is True on message send in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Send Self (Reference "m") []),
SimpleMethod "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is False on message send in entry point to an empty method" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Send Self (Reference "m") []),
SimpleMethod "m" [] None]) `shouldBe` False
it "is False on direct usage in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Reference "m"),
Class "m" Nothing (Return (MuNumber 4))]) `shouldBe` False
it "is False if there is no usage" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Reference "f"),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
it "is True when delegated and a wildcard is used" $ do
delegates anyone (Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is False when not delegated and a wildcard is used" $ do
delegates anyone (Sequence [
EntryPoint "main" (Application (Reference "g") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
context "when subroutine is not declared" $ do
it "is False on function application in entry point" $ do
delegates (named "m") (EntryPoint "main" (Application (Reference "m") [])) `shouldBe` False
it "is False on message send application in entry point" $ do
delegates (named "m") (EntryPoint "main" (Send Self (Reference "m") [])) `shouldBe` False
it "is False on direct usage in entry point" $ do
delegates (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` False
it "is False if there is no usage" $ do
delegates (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
describe "calls" $ do
it "is True on function application in entry point" $ do
calls (named "m") (EntryPoint "main" (Application (Reference "m") [])) `shouldBe` True
it "is True on message send application in entry point" $ do
calls (named "m") (EntryPoint "main" (Send Self (Reference "m") [])) `shouldBe` True
it "is False on direct usage in entry point" $ do
calls (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` False
it "is False if there is no usage" $ do
calls (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
it "is True when using a matcher that matches" $ do
(callsMatching (with . isNumber $ 1) anyone) (hs "f = g 1") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(callsMatching (with . isNumber $ 1) anyone) (hs "f = g 2") `shouldBe` False
describe "callsPrimitive" $ do
it "is True on direct usage in entry point" $ do
callsPrimitive GetAt (py3 "x[5]") `shouldBe` True
callsPrimitive SetAt (py3 "x[5] = 0") `shouldBe` True
callsPrimitive Slice (py3 "x[5:6]") `shouldBe` True
callsPrimitive Size (py3 "len(x)") `shouldBe` True
it "is False if there is no usage" $ do
callsPrimitive SetAt (py3 "x[5]") `shouldBe` False
callsPrimitive Slice (py3 "x[5] = 0") `shouldBe` False
callsPrimitive Size (py3 "x[5:6]") `shouldBe` False
callsPrimitive GetAt (py3 "len(x)") `shouldBe` False
it "is True when using a matcher that matches" $ do
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len('hello')") `shouldBe` True
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len('hello')") `shouldBe` True
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len([])") `shouldBe` True
(callsPrimitiveMatching (withEvery [isAnything, isAnything, isNumber 0]) SetAt) (py3 "x['i'] = 0") `shouldBe` True
(callsPrimitiveMatching (withEvery [isAnything, isString "i", isNumber 0]) SetAt) (py3 "x['i'] = 0") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len('bye!!')") `shouldBe` False
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len(greet)") `shouldBe` False
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len([])") `shouldBe` False
(callsPrimitiveMatching (withEvery [isAnything, isAnything, isNumber 0]) SetAt) (py3 "x['i'] = 5") `shouldBe` False
(callsPrimitiveMatching (withEvery [isAnything, isString "i", isNumber 0]) SetAt) (py3 "x['j'] = 0") `shouldBe` False
describe "usesLogic" $ do
it "is when it is used" $ do
usesLogic (hs "f x y = x || y") `shouldBe` True
usesLogic (hs "f x y = x && y") `shouldBe` True
usesLogic (hs "f x y = not x") `shouldBe` True
usesLogic (hs "f x y = (not) x") `shouldBe` True
usesLogic (hs "f x y = (&&) x y") `shouldBe` True
it "is is not used otherwise" $ do
usesLogic (hs "f x y = x + y") `shouldBe` False
usesLogic (hs "f x y = x") `shouldBe` False
usesLogic (hs "f x y = and x") `shouldBe` False
usesLogic (hs "f x y = or x") `shouldBe` False
describe "usesMath" $ do
it "is True when it is used in function bodies" $ do
usesMath (hs "f x y = x + y") `shouldBe` True
usesMath (hs "f x y = x * y") `shouldBe` True
usesMath (hs "f x y = x / x") `shouldBe` True
usesMath (hs "f x y = div x z") `shouldBe` True
usesMath (hs "f x y = x - y") `shouldBe` True
it "is True when it is used in named arguments" $ do
usesMath (py3 "f(x = 4 + 5)") `shouldBe` True
usesMath (py3 "f(x = 4)") `shouldBe` False
it "is True when it is used in default parameters" $ do
usesMath (py3 "def f(x = 4 + 5): pass") `shouldBe` True
usesMath (py3 "def f(x = 4): pass") `shouldBe` False
it "is True when it is used in composite literals" $ do
usesMath (py3 "{'hello': 4 + 5}") `shouldBe` True
usesMath (py3 "{'hello': 4}") `shouldBe` False
usesMath (js "{x: 4 + 5}") `shouldBe` True
usesMath (js "{x: 4}") `shouldBe` False
usesMath (js "[4+5, 0]") `shouldBe` True
usesMath (js "[9, 0]") `shouldBe` False
it "is True is not used otherwise" $ do
usesMath (hs "f x y = x") `shouldBe` False
usesMath (hs "f x y = plus x") `shouldBe` False
usesMath (hs "f x y = minus x") `shouldBe` False
usesMath (hs "f x y = x || y") `shouldBe` False
describe "usesExceptions" $ do
it "is True when a raise is used, java" $ do
usesExceptions (java "class Sample { void aMethod() { throw new RuntimeException(); } }") `shouldBe` True
it "is True when a raise is used, js" $ do
usesExceptions (js "throw new Error()") `shouldBe` True
it "is True when undefined is used, hs" $ do
usesExceptions (hs "f = undefined") `shouldBe` True
it "is True when error is used, hs" $ do
usesExceptions (hs "f = error \"ups\"") `shouldBe` True
it "is False when no raise is used, java" $ do
usesExceptions (java "class Sample { void aMethod() {} }") `shouldBe` False
it "is False when a raise is used, js" $ do
usesExceptions (js "new Error()") `shouldBe` False
it "is False when no raise is used, hs" $ do
usesExceptions (hs "f = 4") `shouldBe` False
describe "raises" $ do
it "is True when raises an expected instance exception" $ do
raises (named "RuntimeException") (java "class Sample { void aMethod() { throw new RuntimeException(); } }") `shouldBe` True
it "is True when raises an expected exception class" $ do
raises (named "Exception") (py2 "raise Exception") `shouldBe` True
it "is True when raises an expected exception application, python" $ do
raises (named "Exception") (py3 "raise Exception('ups')") `shouldBe` True
it "is True when raises an expected exception application, js" $ do
raises (named "Error") (js "throw Error('ups')") `shouldBe` True
describe "rescues" $ do
it "is True when rescues an expected exception" $ do
rescues (named "RuntimeException") (java "class Sample { void aMethod() { try { foo(); } catch (RuntimeException e) { } } }") `shouldBe` True
it "is False when rescues an unexpected exception" $ do
rescues (named "RuntimeException") (java "class Sample { void aMethod() { try { foo(); } catch (Exception e) { } } }") `shouldBe` False
describe "uses, hs" $ do
it "is True when required function is used on application" $ do
uses (named "m") (hs "y x = m x") `shouldBe` True
it "is True when required function is used as argument" $ do
uses (named "m") (hs "y x = x m") `shouldBe` True
it "is False with primitives" $ do
uses (named "&&") (hs "y x = x && z") `shouldBe` False
it "is True when required function is used as operator" $ do
uses (named "<>") (hs "y x = x <> z") `shouldBe` True
it "is False when required function is not used in constant" $ do
uses (named "m") (hs "y = 3") `shouldBe` False
it "is False when required function is not used in function" $ do
uses (named "m") (hs "y = x 3") `shouldBe` False
it "is False when reference is not present, scoped" $ do
scoped "p" (uses (named "m")) (hs "z = m 3") `shouldBe` False
it "is False when required function is blank" $ do
uses (named "" )(hs "y = m 3") `shouldBe` False
it "is False when not present in enum" $ do
uses (named "h") (hs "y = [a..b]") `shouldBe` False
it "is True when is present in enum" $ do
uses (named "h") (hs "y = [a..h]") `shouldBe` True
it "is True when required constructor is used on application" $ do
uses (named "Foo") (hs "y x = Foo x") `shouldBe` True
it "is False when required constructor is not used on application" $ do
uses (named "Foo") (hs "y x = Bar x") `shouldBe` False
it "is True when required function is used on list comprehension" $ do
uses (named "f") (hs "y x = [ f m | m <- ms ]") `shouldBe` True
it "is False when required function is not used on list comprehension" $ do
uses (named "f") (hs "y x = [ g m | m <- ms ]") `shouldBe` False
it "is True when an identifier is used within a New expression" $ do
uses (named "LinkedList") (New (Reference "LinkedList") []) `shouldBe` True
it "is True when an identifier is used within an Include expression" $ do
uses (named "Enumerable") (Include (Reference "Enumerable")) `shouldBe` True
it "is True when an identifier is used within an Implement expression" $ do
uses (named "Iterator") (Implement (Reference "Iterator")) `shouldBe` True
it "is False when variable is defined within scope" $ do
uses ( named " m " ) ( hs " y x = [ g m | m < - ms ] " ) ` shouldBe ` False
pending
it "is False when there is variable hiding in list comprehension generator" $ do
uses ( named " m " ) ( hs " y x = [ g x | m < - ms , x < - f m ] " ) ` shouldBe ` False
pending
it "is True when a function is used in a list comprehension generator" $ do
uses (named "f") (hs "y x = [ g x | m <- ms, x <- f m]") `shouldBe` True
describe "uses, js" $ do
it "is True on direct usage in function" $ do
uses (named "m") (js "function f(x) { m }") `shouldBe` True
it "is True on direct call in function" $ do
uses (named "m") (js "function f(x) { m() }") `shouldBe` True
it "is True on negated call in function" $ do
uses (named "m") (js "function f(x) { !m() }") `shouldBe` True
it "is True on direct usage of something like it in function" $ do
uses (like "m") (js "function f(x) { m2 }") `shouldBe` True
it "is True on direct usage in method" $ do
uses (named "m") (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is True on direct usage in method, scoped" $ do
scoped "o" (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped" $ do
scoped "o" (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is True on usage in method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is False on usage in wrong method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "m")) (js "let o = {p: function(x) { m }}") `shouldBe` False
it "is True on usage in method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is False on usage in wrong method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "m")) (js "let o = {p: function(x) { m }}") `shouldBe` False
it "is True through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g() { m }; function f(x) { g() }") `shouldBe` True
it "is True through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g(p) { return m }; function f(x) { return g(2) }") `shouldBe` True
it "is False through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g() { m }; function f(x) { k() }") `shouldBe` False
it "is True through message send in function" $ do
transitive "f" (uses (named "m")) (js "let o = {g: function(){ m }}; function f(x) { o.g() }") `shouldBe` True
it "is True through message send in objects" $ do
transitive "p" (uses (named "m")) (js "let o = {g: function(){ m }}\n\
\let p = {n: function() { o.g() }}") `shouldBe` True
describe "usesPrimitive, hs" $ do
it "is True when required primitive is used on application" $ do
usesPrimitive And (hs "y x = x && z") `shouldBe` True
usesPrimitive BackwardComposition (hs "y x = x . z") `shouldBe` True
usesPrimitive Negation (hs "y x = not z") `shouldBe` True
it "is True when required primitive is used as argument" $ do
usesPrimitive And (hs "y x = f (&&) y z") `shouldBe` True
it "is False when primitive is just apparently used" $ do
usesPrimitive And (hs "y x = and x") `shouldBe` False
it "is False when primitive is not used" $ do
usesPrimitive Negation (hs "y x = m x") `shouldBe` False
describe "usesPrimitive, js" $ do
it "is True when required primitive is used on application" $ do
usesPrimitive And (js "x && z") `shouldBe` True
usesPrimitive Negation (js "function () { return !z }") `shouldBe` True
it "is False when primitive is just apparently used" $ do
usesPrimitive Or (js "or(x)") `shouldBe` False
it "is False when primitive is not used" $ do
usesPrimitive ForwardComposition (js "f(g(x))") `shouldBe` False
describe "declaresComputation" $ do
describe "with constants" $ do
it "is False when exists" $ do
declaresComputation (named "x") (hs "x = 1") `shouldBe` False
describe "with type declarations" $ do
it "is False when exists" $ do
declaresComputation (named "x") (hs "x :: Int -> Int") `shouldBe` False
describe "with function declarations" $ do
it "is True when exists" $ do
declaresComputation (named "x") (hs "x _ = True") `shouldBe` True
describe "declares" $ do
describe "with constants" $ do
it "is True when exists" $ do
declares (named "x") (hs "x = 1") `shouldBe` True
it "is False when reference doesnt exists" $ do
declares (named "y") (hs "x = 1") `shouldBe` False
describe "with types signatures" $ do
it "is False when just type signature exists" $ do
declares (named "x") (hs "x :: Int") `shouldBe` False
describe "with functions" $ do
it "is True when exists" $ do
declares (named "x") (hs "x m = 1") `shouldBe` True
it "is False when reference doesnt exists" $ do
declares (named "y") (hs "x m = 1") `shouldBe` False
describe "parses" $ do
it "is True when similar" $ do
parses hs "x = map f . map g" (hs "x = map f.map g") `shouldBe` True
it "is False when differ" $ do
parses hs "x = map g . map f" (hs "x = map f . map g") `shouldBe` False
describe "declaresRecursively" $ do
it "is True when has direct recursion in unguarded expresion" $ do
declaresRecursively (named "y") (hs "y x = y x") `shouldBe` True
it "is True when has direct recursion in guarded expresion" $ do
declaresRecursively (named "y") (hs "y x | c x = y m\n\
\ | otherwise = 0") `shouldBe` True
it "is False when there is no named recursion" $ do
declaresRecursively (named "y") (hs "y = 3") `shouldBe` False
it "is False when there is no named recursion, scoped" $ do
declaresRecursively (named "y") (hs "y = 3\nf x = f 4") `shouldBe` False
it "is True when there is any recursion" $ do
declaresRecursively anyone (hs "y x = y 3") `shouldBe` True
it "is False when there is no recursion" $ do
declaresRecursively anyone (hs "y x = 3") `shouldBe` False
describe "usesIf, hs" $ do
it "is True when present" $ do
usesIf (hs "f x = if c x then 2 else 3") `shouldBe` True
it "is False when not present" $ do
usesIf (hs "f x = x") `shouldBe` False
describe "usesIf, js" $ do
it "is True when present in function" $ do
let code = js "function f(){if(true){}else{}; return x;}"
usesIf code `shouldBe` True
it "is False when not present in function" $ do
let code = js "function f(x){return 1;}"
usesIf code `shouldBe` False
describe "returns, js" $ do
it "is True when returns with value" $ do
returns (js "function f(){ return 4 }") `shouldBe` True
it "is True when returns without value" $ do
returns (js "function f(){ return }") `shouldBe` True
it "is False when not present" $ do
returns (js "function f(x){ }") `shouldBe` False
describe "subordinatesDeclarationsTo" $ do
it "is True when procedure is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "init") (js "function init() {}") `shouldBe` True
it "is True when function is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "main") (js "function main() { return 0; }") `shouldBe` True
it "is False when there is no such computation" $ do
subordinatesDeclarationsTo (named "init") (js "function main() { return 0; }") `shouldBe` False
it "is True when variable is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "init") (js "let init = 0") `shouldBe` True
it "is True when variable is declared and all other variables are used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = x; let x = 2;") `shouldBe` True
it "is True when variable is declared and all other functions are used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = x(); function x() { return 2 }") `shouldBe` True
it "is False when variable is declared and there are other variables not used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = y; let x = 2;") `shouldBe` False
it "is True when procedure is declared and there are other declarations directly called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() {}") `shouldBe` True
it "is True when procedure is declared and all declarations are transitively called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() { read() }\n\
\function read() {}") `shouldBe` True
it "is False when procedure is declared and there are other declarations not called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName() } \n\
\function askForAge() {}\n\
\function askForName() {}") `shouldBe` False
it "is False when procedure is declared and not all declarations are transitively called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() {}\n\
\function read() {}") `shouldBe` False
| null | https://raw.githubusercontent.com/mumuki/mulang/e3c7da1f3191ac3741e6916d5c78477f145bbaa9/spec/GenericSpec.hs | haskell | # LANGUAGE QuasiQuotes, OverloadedStrings # |
module GenericSpec (spec) where
import Test.Hspec
import Language.Mulang.Ast
import Language.Mulang.Ast.Operator
import Language.Mulang.Identifier
import Language.Mulang.Inspector.Combiner
import Language.Mulang.Inspector.Contextualized
import Language.Mulang.Inspector.Generic
import Language.Mulang.Inspector.Literal
import Language.Mulang.Inspector.Matcher
import Language.Mulang.Inspector.Smell
import Language.Mulang.Normalizers.Haskell (haskellNormalizationOptions)
import Language.Mulang.Parsers.Haskell
import Language.Mulang.Parsers.Java (java)
import Language.Mulang.Parsers.JavaScript
import Language.Mulang.Parsers.Python (py2, py3)
import Language.Mulang.Transform.Normalizer
nhs = normalize haskellNormalizationOptions . hs
spec :: Spec
spec = do
describe "declaresEntryPoint" $ do
describe "with program declarations" $ do
it "is True when program is declared" $ do
let code = EntryPoint "main" None
declaresEntryPoint anyone code `shouldBe` True
it "is False when program is not declared" $ do
let code = js "function(){}"
declaresEntryPoint anyone code `shouldBe` False
describe "declaresVariable" $ do
it "is True when declare a variable" $ do
let code = js "function f(){ let x = 2}"
declaresVariable (named "x") code `shouldBe` True
it "is False when variable is not declared" $ do
let code = js "function f(){ let x = 2}"
declaresVariable (named "y") code `shouldBe` False
describe "assigns" $ do
it "is True when initializes a variable" $ do
assigns (named "x") (Variable "x" (MuTrue)) `shouldBe` True
it "is True when declares an attribute" $ do
assigns (named "x") (MuObject (Attribute "x" (MuTrue))) `shouldBe` True
it "is True when assigns a variable" $ do
assigns (named "x") (Assignment "x" (MuTrue)) `shouldBe` True
it "is False otherwise" $ do
assigns (named "x") (Assignment "y" (MuTrue)) `shouldBe` False
assigns (named "x") (Other Nothing Nothing) `shouldBe` False
assigns (named "x") (MuFalse) `shouldBe` False
describe "declaresFunction" $ do
describe "with function declarations, hs" $ do
it "is True when functions is declared" $ do
declaresFunction (named "f") (hs "f x = 1") `shouldBe` True
describe "with constants, hs" $ do
it "is False when constant is declared with a non lambda literal" $ do
declaresFunction (named "f") (hs "f = 2") `shouldBe` False
it "is True when constant is declared with a lambda literal" $ do
declaresFunction (named "f") (nhs "f = \\x -> x + 1") `shouldBe` True
it "is False when constant is declared with a number literal" $ do
declaresFunction (named "f") (hs "f = 3") `shouldBe` False
it "is False when constant is declared with a list literal" $ do
declaresFunction (named "f") (hs "f = []") `shouldBe` False
it "is False when constant is declared with a variable literal" $ do
declaresFunction (named "f") (hs "f = snd") `shouldBe` False
describe "with function declarations, js" $ do
it "is True when functions is declared" $ do
declaresFunction (named "f") (js "function f(x) {return 1}") `shouldBe` True
it "is True when functions is declared" $ do
declaresFunction (named "f") (js "function f(x) {return 1}") `shouldBe` True
it "is True when any functions is declared" $ do
declaresFunction anyone (js "function f(x) {return 1}") `shouldBe` True
it "is False when functions is not declared" $ do
declaresFunction (named "g") (js "function f(x) {return 1}") `shouldBe` False
describe "with variables, js" $ do
it "is False when constant is declared with a non lambda literal" $ do
declaresFunction (named "f") (js "let f = 2") `shouldBe` False
it "is True when constant is declared with a lambda literal" $ do
declaresFunction (named "f") (js "let f = function(x) {}") `shouldBe` True
it "is False when constant is declared with a number literal" $ do
declaresFunction (named "f") (js "let f = 3") `shouldBe` False
it "is False when constant is declared with a list literal" $ do
declaresFunction (named "f") (js "let f = []") `shouldBe` False
it "is False when is a method" $ do
declaresFunction (named "f") (js "let o = {f: function(){}}") `shouldBe` False
describe "with matcher" $ do
it "is True when using a matcher that matches" $ do
(declaresFunctionMatching (with isSelf) anyone) (js "function f(x) { this; return 0; }") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(declaresFunctionMatching (with isSelf) anyone) (js "function f(x) { return 0; }") `shouldBe` False
it "is True when using a non literal matcher that matches" $ do
(declaresFunctionMatching (with (returnsMatching (with (isNumber 2)))) anyone) (js "function f() { return 2; }") `shouldBe` True
it "is False when using a non literal matcher that doesn't match" $ do
(declaresFunctionMatching (with (returnsMatching (with (isNumber 2)))) anyone) (js "function f() { return 3; }") `shouldBe` False
it "is False when using a literal matcher and it does not match literally" $ do
(declaresFunctionMatching (with . isNumber $ 2) anyone) (js "function f() { return 2; }") `shouldBe` False
describe "declaresComputationWithArity" $ do
describe "with function declarations, hs" $ do
it "is True when function is declared with the given arity" $ do
(declaresComputationWithArity 1) (named "f") (hs "f x = x + 1") `shouldBe` True
it "is False when function is declared with another arity" $ do
(declaresComputationWithArity 2) (named "f") (hs "f x = x + 1") `shouldBe` False
describe "with constant declaration, hs" $ do
it "is True when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 2) (named "f") (nhs "f = \\x y -> x + y") `shouldBe` True
it "is False when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 3) (named "f") (nhs "f = \\x y -> x + y") `shouldBe` False
it "is False if it is a variable" $ do
(declaresComputationWithArity 1) (named "f") (hs "f = snd") `shouldBe` False
describe "with function declarations, js" $ do
it "is True when function is declared with the given arity" $ do
(declaresComputationWithArity 1) (named "f") (js "function f(x) { return x + 1 }") `shouldBe` True
it "is False when function is declared with another arity" $ do
(declaresComputationWithArity 2) (named "f") (js "function f(x) { x + 1}") `shouldBe` False
describe "with constant declaration, js" $ do
it "is True when constant is declared with lambda of given arity" $ do
(declaresComputationWithArity 2) (named "f") (js "let f = function(x, y) { return x + y }") `shouldBe` True
describe "isLongCode" $ do
it "is False when the program has less than 16 nodes" $ do
isLongCode (js "function f() { while(true) { console.log('foo') } }") `shouldBe` False
it "is True when the program contains 16 or more nodes" $ do
isLongCode (js "function f(){Poner(Verde); Mover(Norte); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Norte); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Este); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Sur); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); Mover(Oeste); Poner(Verde); }") `shouldBe` True
describe "uses" $ do
it "is True on direct usage in entry point" $ do
uses (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` True
it "is False if there is no usage" $ do
uses (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
describe "delegates'" $ do
it "is True when used with a scope" $ do
decontextualize (contextualized (scoped "main") (delegates' anyone)) (
Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
describe "delegates" $ do
context "when subroutine is declared" $ do
it "is False when used with a scope" $ do
scoped "main" (delegates anyone) (
Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
it "is True on function application in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is True on message send in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Send Self (Reference "m") []),
SimpleMethod "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is False on message send in entry point to an empty method" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Send Self (Reference "m") []),
SimpleMethod "m" [] None]) `shouldBe` False
it "is False on direct usage in entry point" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Reference "m"),
Class "m" Nothing (Return (MuNumber 4))]) `shouldBe` False
it "is False if there is no usage" $ do
delegates (named "m") (Sequence [
EntryPoint "main" (Reference "f"),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
it "is True when delegated and a wildcard is used" $ do
delegates anyone (Sequence [
EntryPoint "main" (Application (Reference "m") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` True
it "is False when not delegated and a wildcard is used" $ do
delegates anyone (Sequence [
EntryPoint "main" (Application (Reference "g") []),
SimpleProcedure "m" [] (Return (MuNumber 4))]) `shouldBe` False
context "when subroutine is not declared" $ do
it "is False on function application in entry point" $ do
delegates (named "m") (EntryPoint "main" (Application (Reference "m") [])) `shouldBe` False
it "is False on message send application in entry point" $ do
delegates (named "m") (EntryPoint "main" (Send Self (Reference "m") [])) `shouldBe` False
it "is False on direct usage in entry point" $ do
delegates (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` False
it "is False if there is no usage" $ do
delegates (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
describe "calls" $ do
it "is True on function application in entry point" $ do
calls (named "m") (EntryPoint "main" (Application (Reference "m") [])) `shouldBe` True
it "is True on message send application in entry point" $ do
calls (named "m") (EntryPoint "main" (Send Self (Reference "m") [])) `shouldBe` True
it "is False on direct usage in entry point" $ do
calls (named "m") (EntryPoint "main" (Reference "m")) `shouldBe` False
it "is False if there is no usage" $ do
calls (named "m") (EntryPoint "main" (Reference "f")) `shouldBe` False
it "is True when using a matcher that matches" $ do
(callsMatching (with . isNumber $ 1) anyone) (hs "f = g 1") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(callsMatching (with . isNumber $ 1) anyone) (hs "f = g 2") `shouldBe` False
describe "callsPrimitive" $ do
it "is True on direct usage in entry point" $ do
callsPrimitive GetAt (py3 "x[5]") `shouldBe` True
callsPrimitive SetAt (py3 "x[5] = 0") `shouldBe` True
callsPrimitive Slice (py3 "x[5:6]") `shouldBe` True
callsPrimitive Size (py3 "len(x)") `shouldBe` True
it "is False if there is no usage" $ do
callsPrimitive SetAt (py3 "x[5]") `shouldBe` False
callsPrimitive Slice (py3 "x[5] = 0") `shouldBe` False
callsPrimitive Size (py3 "x[5:6]") `shouldBe` False
callsPrimitive GetAt (py3 "len(x)") `shouldBe` False
it "is True when using a matcher that matches" $ do
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len('hello')") `shouldBe` True
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len('hello')") `shouldBe` True
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len([])") `shouldBe` True
(callsPrimitiveMatching (withEvery [isAnything, isAnything, isNumber 0]) SetAt) (py3 "x['i'] = 0") `shouldBe` True
(callsPrimitiveMatching (withEvery [isAnything, isString "i", isNumber 0]) SetAt) (py3 "x['i'] = 0") `shouldBe` True
it "is False when using a matcher that does not match" $ do
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len('bye!!')") `shouldBe` False
(callsPrimitiveMatching (with isLiteral) Size) (py3 "len(greet)") `shouldBe` False
(callsPrimitiveMatching (with . isString $ "hello") Size) (py3 "len([])") `shouldBe` False
(callsPrimitiveMatching (withEvery [isAnything, isAnything, isNumber 0]) SetAt) (py3 "x['i'] = 5") `shouldBe` False
(callsPrimitiveMatching (withEvery [isAnything, isString "i", isNumber 0]) SetAt) (py3 "x['j'] = 0") `shouldBe` False
describe "usesLogic" $ do
it "is when it is used" $ do
usesLogic (hs "f x y = x || y") `shouldBe` True
usesLogic (hs "f x y = x && y") `shouldBe` True
usesLogic (hs "f x y = not x") `shouldBe` True
usesLogic (hs "f x y = (not) x") `shouldBe` True
usesLogic (hs "f x y = (&&) x y") `shouldBe` True
it "is is not used otherwise" $ do
usesLogic (hs "f x y = x + y") `shouldBe` False
usesLogic (hs "f x y = x") `shouldBe` False
usesLogic (hs "f x y = and x") `shouldBe` False
usesLogic (hs "f x y = or x") `shouldBe` False
describe "usesMath" $ do
it "is True when it is used in function bodies" $ do
usesMath (hs "f x y = x + y") `shouldBe` True
usesMath (hs "f x y = x * y") `shouldBe` True
usesMath (hs "f x y = x / x") `shouldBe` True
usesMath (hs "f x y = div x z") `shouldBe` True
usesMath (hs "f x y = x - y") `shouldBe` True
it "is True when it is used in named arguments" $ do
usesMath (py3 "f(x = 4 + 5)") `shouldBe` True
usesMath (py3 "f(x = 4)") `shouldBe` False
it "is True when it is used in default parameters" $ do
usesMath (py3 "def f(x = 4 + 5): pass") `shouldBe` True
usesMath (py3 "def f(x = 4): pass") `shouldBe` False
it "is True when it is used in composite literals" $ do
usesMath (py3 "{'hello': 4 + 5}") `shouldBe` True
usesMath (py3 "{'hello': 4}") `shouldBe` False
usesMath (js "{x: 4 + 5}") `shouldBe` True
usesMath (js "{x: 4}") `shouldBe` False
usesMath (js "[4+5, 0]") `shouldBe` True
usesMath (js "[9, 0]") `shouldBe` False
it "is True is not used otherwise" $ do
usesMath (hs "f x y = x") `shouldBe` False
usesMath (hs "f x y = plus x") `shouldBe` False
usesMath (hs "f x y = minus x") `shouldBe` False
usesMath (hs "f x y = x || y") `shouldBe` False
describe "usesExceptions" $ do
it "is True when a raise is used, java" $ do
usesExceptions (java "class Sample { void aMethod() { throw new RuntimeException(); } }") `shouldBe` True
it "is True when a raise is used, js" $ do
usesExceptions (js "throw new Error()") `shouldBe` True
it "is True when undefined is used, hs" $ do
usesExceptions (hs "f = undefined") `shouldBe` True
it "is True when error is used, hs" $ do
usesExceptions (hs "f = error \"ups\"") `shouldBe` True
it "is False when no raise is used, java" $ do
usesExceptions (java "class Sample { void aMethod() {} }") `shouldBe` False
it "is False when a raise is used, js" $ do
usesExceptions (js "new Error()") `shouldBe` False
it "is False when no raise is used, hs" $ do
usesExceptions (hs "f = 4") `shouldBe` False
describe "raises" $ do
it "is True when raises an expected instance exception" $ do
raises (named "RuntimeException") (java "class Sample { void aMethod() { throw new RuntimeException(); } }") `shouldBe` True
it "is True when raises an expected exception class" $ do
raises (named "Exception") (py2 "raise Exception") `shouldBe` True
it "is True when raises an expected exception application, python" $ do
raises (named "Exception") (py3 "raise Exception('ups')") `shouldBe` True
it "is True when raises an expected exception application, js" $ do
raises (named "Error") (js "throw Error('ups')") `shouldBe` True
describe "rescues" $ do
it "is True when rescues an expected exception" $ do
rescues (named "RuntimeException") (java "class Sample { void aMethod() { try { foo(); } catch (RuntimeException e) { } } }") `shouldBe` True
it "is False when rescues an unexpected exception" $ do
rescues (named "RuntimeException") (java "class Sample { void aMethod() { try { foo(); } catch (Exception e) { } } }") `shouldBe` False
describe "uses, hs" $ do
it "is True when required function is used on application" $ do
uses (named "m") (hs "y x = m x") `shouldBe` True
it "is True when required function is used as argument" $ do
uses (named "m") (hs "y x = x m") `shouldBe` True
it "is False with primitives" $ do
uses (named "&&") (hs "y x = x && z") `shouldBe` False
it "is True when required function is used as operator" $ do
uses (named "<>") (hs "y x = x <> z") `shouldBe` True
it "is False when required function is not used in constant" $ do
uses (named "m") (hs "y = 3") `shouldBe` False
it "is False when required function is not used in function" $ do
uses (named "m") (hs "y = x 3") `shouldBe` False
it "is False when reference is not present, scoped" $ do
scoped "p" (uses (named "m")) (hs "z = m 3") `shouldBe` False
it "is False when required function is blank" $ do
uses (named "" )(hs "y = m 3") `shouldBe` False
it "is False when not present in enum" $ do
uses (named "h") (hs "y = [a..b]") `shouldBe` False
it "is True when is present in enum" $ do
uses (named "h") (hs "y = [a..h]") `shouldBe` True
it "is True when required constructor is used on application" $ do
uses (named "Foo") (hs "y x = Foo x") `shouldBe` True
it "is False when required constructor is not used on application" $ do
uses (named "Foo") (hs "y x = Bar x") `shouldBe` False
it "is True when required function is used on list comprehension" $ do
uses (named "f") (hs "y x = [ f m | m <- ms ]") `shouldBe` True
it "is False when required function is not used on list comprehension" $ do
uses (named "f") (hs "y x = [ g m | m <- ms ]") `shouldBe` False
it "is True when an identifier is used within a New expression" $ do
uses (named "LinkedList") (New (Reference "LinkedList") []) `shouldBe` True
it "is True when an identifier is used within an Include expression" $ do
uses (named "Enumerable") (Include (Reference "Enumerable")) `shouldBe` True
it "is True when an identifier is used within an Implement expression" $ do
uses (named "Iterator") (Implement (Reference "Iterator")) `shouldBe` True
it "is False when variable is defined within scope" $ do
uses ( named " m " ) ( hs " y x = [ g m | m < - ms ] " ) ` shouldBe ` False
pending
it "is False when there is variable hiding in list comprehension generator" $ do
uses ( named " m " ) ( hs " y x = [ g x | m < - ms , x < - f m ] " ) ` shouldBe ` False
pending
it "is True when a function is used in a list comprehension generator" $ do
uses (named "f") (hs "y x = [ g x | m <- ms, x <- f m]") `shouldBe` True
describe "uses, js" $ do
it "is True on direct usage in function" $ do
uses (named "m") (js "function f(x) { m }") `shouldBe` True
it "is True on direct call in function" $ do
uses (named "m") (js "function f(x) { m() }") `shouldBe` True
it "is True on negated call in function" $ do
uses (named "m") (js "function f(x) { !m() }") `shouldBe` True
it "is True on direct usage of something like it in function" $ do
uses (like "m") (js "function f(x) { m2 }") `shouldBe` True
it "is True on direct usage in method" $ do
uses (named "m") (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is True on direct usage in method, scoped" $ do
scoped "o" (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped" $ do
scoped "o" (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is True on usage in method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is False on usage in wrong method, scoped twice" $ do
scopedList ["o", "z"] (uses (named "m")) (js "let o = {p: function(x) { m }}") `shouldBe` False
it "is True on usage in method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "m")) (js "let o = {z: function(x) { m }}") `shouldBe` True
it "is False on missing usage in method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "p")) (js "let o = {z: function(x) { m }}") `shouldBe` False
it "is False on usage in wrong method, scoped twice" $ do
transitiveList ["o", "z"] (uses (named "m")) (js "let o = {p: function(x) { m }}") `shouldBe` False
it "is True through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g() { m }; function f(x) { g() }") `shouldBe` True
it "is True through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g(p) { return m }; function f(x) { return g(2) }") `shouldBe` True
it "is False through function application in function" $ do
transitive "f" (uses (named "m")) (js "function g() { m }; function f(x) { k() }") `shouldBe` False
it "is True through message send in function" $ do
transitive "f" (uses (named "m")) (js "let o = {g: function(){ m }}; function f(x) { o.g() }") `shouldBe` True
it "is True through message send in objects" $ do
transitive "p" (uses (named "m")) (js "let o = {g: function(){ m }}\n\
\let p = {n: function() { o.g() }}") `shouldBe` True
describe "usesPrimitive, hs" $ do
it "is True when required primitive is used on application" $ do
usesPrimitive And (hs "y x = x && z") `shouldBe` True
usesPrimitive BackwardComposition (hs "y x = x . z") `shouldBe` True
usesPrimitive Negation (hs "y x = not z") `shouldBe` True
it "is True when required primitive is used as argument" $ do
usesPrimitive And (hs "y x = f (&&) y z") `shouldBe` True
it "is False when primitive is just apparently used" $ do
usesPrimitive And (hs "y x = and x") `shouldBe` False
it "is False when primitive is not used" $ do
usesPrimitive Negation (hs "y x = m x") `shouldBe` False
describe "usesPrimitive, js" $ do
it "is True when required primitive is used on application" $ do
usesPrimitive And (js "x && z") `shouldBe` True
usesPrimitive Negation (js "function () { return !z }") `shouldBe` True
it "is False when primitive is just apparently used" $ do
usesPrimitive Or (js "or(x)") `shouldBe` False
it "is False when primitive is not used" $ do
usesPrimitive ForwardComposition (js "f(g(x))") `shouldBe` False
describe "declaresComputation" $ do
describe "with constants" $ do
it "is False when exists" $ do
declaresComputation (named "x") (hs "x = 1") `shouldBe` False
describe "with type declarations" $ do
it "is False when exists" $ do
declaresComputation (named "x") (hs "x :: Int -> Int") `shouldBe` False
describe "with function declarations" $ do
it "is True when exists" $ do
declaresComputation (named "x") (hs "x _ = True") `shouldBe` True
describe "declares" $ do
describe "with constants" $ do
it "is True when exists" $ do
declares (named "x") (hs "x = 1") `shouldBe` True
it "is False when reference doesnt exists" $ do
declares (named "y") (hs "x = 1") `shouldBe` False
describe "with types signatures" $ do
it "is False when just type signature exists" $ do
declares (named "x") (hs "x :: Int") `shouldBe` False
describe "with functions" $ do
it "is True when exists" $ do
declares (named "x") (hs "x m = 1") `shouldBe` True
it "is False when reference doesnt exists" $ do
declares (named "y") (hs "x m = 1") `shouldBe` False
describe "parses" $ do
it "is True when similar" $ do
parses hs "x = map f . map g" (hs "x = map f.map g") `shouldBe` True
it "is False when differ" $ do
parses hs "x = map g . map f" (hs "x = map f . map g") `shouldBe` False
describe "declaresRecursively" $ do
it "is True when has direct recursion in unguarded expresion" $ do
declaresRecursively (named "y") (hs "y x = y x") `shouldBe` True
it "is True when has direct recursion in guarded expresion" $ do
declaresRecursively (named "y") (hs "y x | c x = y m\n\
\ | otherwise = 0") `shouldBe` True
it "is False when there is no named recursion" $ do
declaresRecursively (named "y") (hs "y = 3") `shouldBe` False
it "is False when there is no named recursion, scoped" $ do
declaresRecursively (named "y") (hs "y = 3\nf x = f 4") `shouldBe` False
it "is True when there is any recursion" $ do
declaresRecursively anyone (hs "y x = y 3") `shouldBe` True
it "is False when there is no recursion" $ do
declaresRecursively anyone (hs "y x = 3") `shouldBe` False
describe "usesIf, hs" $ do
it "is True when present" $ do
usesIf (hs "f x = if c x then 2 else 3") `shouldBe` True
it "is False when not present" $ do
usesIf (hs "f x = x") `shouldBe` False
describe "usesIf, js" $ do
it "is True when present in function" $ do
let code = js "function f(){if(true){}else{}; return x;}"
usesIf code `shouldBe` True
it "is False when not present in function" $ do
let code = js "function f(x){return 1;}"
usesIf code `shouldBe` False
describe "returns, js" $ do
it "is True when returns with value" $ do
returns (js "function f(){ return 4 }") `shouldBe` True
it "is True when returns without value" $ do
returns (js "function f(){ return }") `shouldBe` True
it "is False when not present" $ do
returns (js "function f(x){ }") `shouldBe` False
describe "subordinatesDeclarationsTo" $ do
it "is True when procedure is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "init") (js "function init() {}") `shouldBe` True
it "is True when function is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "main") (js "function main() { return 0; }") `shouldBe` True
it "is False when there is no such computation" $ do
subordinatesDeclarationsTo (named "init") (js "function main() { return 0; }") `shouldBe` False
it "is True when variable is declared and there are no other declarations" $ do
subordinatesDeclarationsTo (named "init") (js "let init = 0") `shouldBe` True
it "is True when variable is declared and all other variables are used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = x; let x = 2;") `shouldBe` True
it "is True when variable is declared and all other functions are used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = x(); function x() { return 2 }") `shouldBe` True
it "is False when variable is declared and there are other variables not used from it" $ do
subordinatesDeclarationsTo (named "init") (js "let init = y; let x = 2;") `shouldBe` False
it "is True when procedure is declared and there are other declarations directly called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() {}") `shouldBe` True
it "is True when procedure is declared and all declarations are transitively called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() { read() }\n\
\function read() {}") `shouldBe` True
it "is False when procedure is declared and there are other declarations not called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName() } \n\
\function askForAge() {}\n\
\function askForName() {}") `shouldBe` False
it "is False when procedure is declared and not all declarations are transitively called from it" $ do
subordinatesDeclarationsTo (named "interact") (js "function interact() { askForName(); askForAge() } \n\
\function askForAge() {}\n\
\function askForName() {}\n\
\function read() {}") `shouldBe` False
|
0faa6b541ce9df9366d9f6f6f07e9bff33f76bd0c4efabcef52d2672afa60e70 | glguy/tries | GenericTrie.hs | {-# LANGUAGE Safe #-}
|
This module implements an interface for working with maps .
For primitive types , like ' Int ' , the library automatically selects
an efficient implementation ( e.g. , an " IntMap " ) .
For complex structured types , the library uses an implementation
based on tries : this is useful when using large and similar keys where
comparing for order may become expensive , and storing the distinct
keys would be inefficient .
The ' OrdKey ' type allows for maps with complex keys ,
where the keys are compared based on order , rather than using the
trie implementation .
All methods of ' ' can be derived automatically using
a ' GHC.Generics . Generic ' instance .
@
data Demo = DemoC1 ' Int ' | DemoC2 ' Int ' ' ' deriving ' GHC.Generics . Generic '
instance ' ' Demo
@
This module implements an interface for working with maps.
For primitive types, like 'Int', the library automatically selects
an efficient implementation (e.g., an "IntMap").
For complex structured types, the library uses an implementation
based on tries: this is useful when using large and similar keys where
comparing for order may become expensive, and storing the distinct
keys would be inefficient.
The 'OrdKey' type allows for maps with complex keys,
where the keys are compared based on order, rather than using the
trie implementation.
All methods of 'TrieKey' can be derived automatically using
a 'GHC.Generics.Generic' instance.
@
data Demo = DemoC1 'Int' | DemoC2 'Int' 'Char' deriving 'GHC.Generics.Generic'
instance 'TrieKey' Demo
@
-}
module Data.GenericTrie
(
* Trie interface
Trie
, TrieKey
-- ** Construction
, empty
, singleton
, fromList
, fromListWith
, fromListWith'
-- ** Updates
, alter
, insert
, insertWith
, insertWith'
, delete
, at
-- ** Queries
, member
, notMember
, null
, lookup
-- ** Folding
, foldWithKey
, fold
, toList
-- ** Traversing
, traverseWithKey
, traverseMaybeWithKey
, mapMaybe
, mapMaybeWithKey
, filter
, filterWithKey
-- ** Combining maps
, union
, unionWith
, unionWithKey
, intersection
, intersectionWith
, intersectionWithKey
, difference
, differenceWith
, differenceWithKey
* Keys using ' '
, OrdKey(..)
, EnumKey(..)
, IntLikeKey(..)
) where
import Prelude ()
import Data.GenericTrie.Prelude hiding (lookup, null, filter)
import Data.List (foldl')
import Data.Maybe (isNothing, isJust)
import Data.GenericTrie.Internal
------------------------------------------------------------------------------
-- Various helpers
------------------------------------------------------------------------------
-- | Construct a trie from a list of key-value pairs
fromList :: TrieKey k => [(k,v)] -> Trie k v
fromList = foldl' (\acc (k,v) -> insert k v acc) empty
-- | Construct a trie from a list of key-value pairs.
-- The given function is used to combine values at the
-- same key.
fromListWith :: TrieKey k => (v -> v -> v) -> [(k,v)] -> Trie k v
fromListWith f = foldl' (\acc (k,v) -> insertWith f k v acc) empty
-- | Version of 'fromListWith' which is strict in the result of
-- the combining function.
fromListWith' :: TrieKey k => (v -> v -> v) -> [(k,v)] -> Trie k v
fromListWith' f = foldl' (\acc (k,v) -> insertWith' f k v acc) empty
-- | Construct an empty trie
empty :: TrieKey k => Trie k a
empty = trieEmpty
{-# INLINE empty #-}
-- | Test for an empty trie
null :: TrieKey k => Trie k a -> Bool
null = trieNull
# INLINE null #
-- | Lookup an element from a trie
lookup :: TrieKey k => k -> Trie k a -> Maybe a
lookup = trieLookup
{-# INLINE lookup #-}
-- | Lens for the value at a given key
at :: (Functor f, TrieKey k) => k -> (Maybe a -> f (Maybe a)) -> Trie k a -> f (Trie k a)
at k f m = fmap aux (f mv)
where
mv = lookup k m
aux r = case r of
Nothing -> maybe m (const (delete k m)) mv
Just v' -> insert k v' m
-- | Insert an element into a trie
insert :: TrieKey k => k -> a -> Trie k a -> Trie k a
insert = trieInsert
# INLINE insert #
-- | Delete an element from a trie
delete :: TrieKey k => k -> Trie k a -> Trie k a
delete = trieDelete
{-# INLINE delete #-}
-- | Construct a trie holding a single value
singleton :: TrieKey k => k -> a -> Trie k a
singleton = trieSingleton
# INLINE singleton #
-- | Apply a function to the values of a trie and keep the elements
-- of the trie that result in a 'Just' value.
mapMaybeWithKey :: TrieKey k => (k -> a -> Maybe b) -> Trie k a -> Trie k b
mapMaybeWithKey = trieMapMaybeWithKey
# INLINE mapMaybeWithKey #
-- | Perform an action for each value in a trie and keep the elements
-- of the trie that result in a 'Just' value.
traverseMaybeWithKey :: (TrieKey k, Applicative f)
=> (k -> a -> f (Maybe b)) -> Trie k a -> f (Trie k b)
traverseMaybeWithKey = trieTraverseMaybeWithKey
# INLINE traverseMaybeWithKey #
-- | Filter the values of a trie with the given predicate.
filter :: TrieKey k => (a -> Bool) -> Trie k a -> Trie k a
filter p = filterWithKey (const p)
-- | Version of 'filter' where the predicate also gets the key.
filterWithKey :: TrieKey k => (k -> a -> Bool) -> Trie k a -> Trie k a
filterWithKey p = mapMaybeWithKey aux
where
aux k x
| p k x = Just x
| otherwise = Nothing
-- | Fold a trie with a function of the value
fold :: TrieKey k => (a -> r -> r) -> r -> Trie k a -> r
fold = trieFoldWithKey . const
# INLINE fold #
-- | Fold a trie with a function of both key and value
foldWithKey :: TrieKey k => (k -> a -> r -> r) -> r -> Trie k a -> r
foldWithKey = trieFoldWithKey
# INLINE foldWithKey #
-- | Traverse a trie with a function of both key and value
traverseWithKey :: (TrieKey k, Applicative f) => (k -> a -> f b) -> Trie k a -> f (Trie k b)
traverseWithKey = trieTraverseWithKey
# INLINE traverseWithKey #
mergeWithKey ::
TrieKey k =>
(k -> a -> b -> Maybe c) ->
(Trie k a -> Trie k c) ->
(Trie k b -> Trie k c) ->
Trie k a -> Trie k b -> Trie k c
mergeWithKey = trieMergeWithKey
# INLINE mergeWithKey #
-- | Alter the value at the given key location.
-- The parameter function takes the value stored
-- at the given key, if one exists, and should return a value to insert at
-- that location, or 'Nothing' to delete from that location.
alter :: TrieKey k => k -> (Maybe a -> Maybe a) -> Trie k a -> Trie k a
alter = trieAlter
# INLINE alter #
-- | Insert a value at the given key. The combining function is used
-- when a value is already stored at that key. The new value is the
first argument to the combining function .
insertWith :: TrieKey k => (v -> v -> v) -> k -> v -> Trie k v -> Trie k v
insertWith f k v = alter k $ \mb ->
case mb of
Just v0 -> Just (f v v0)
Nothing -> Just v
-- | Version of 'insertWith' that is strict in the result of combining
two elements .
insertWith' :: TrieKey k => (v -> v -> v) -> k -> v -> Trie k v -> Trie k v
insertWith' f k v = alter k $ \mb ->
case mb of
Just v0 -> Just $! f v v0
Nothing -> Just v
-- | Returns 'True' when the 'Trie' has a value stored at the given key.
member :: TrieKey k => k -> Trie k a -> Bool
member k t = isJust (lookup k t)
-- | Returns 'False' when the 'Trie' has a value stored at the given key.
notMember :: TrieKey k => k -> Trie k a -> Bool
notMember k t = isNothing (lookup k t)
| Left - biased union of two tries
union :: TrieKey k => Trie k a -> Trie k a -> Trie k a
union = mergeWithKey (\_ a _ -> Just a) id id
| Union of two tries with function used to merge overlapping elements
unionWith :: TrieKey k => (a -> a -> a) -> Trie k a -> Trie k a -> Trie k a
unionWith f = mergeWithKey (\_ a b -> Just (f a b)) id id
| Union of two tries with function used to merge overlapping elements along with key
unionWithKey :: TrieKey k => (k -> a -> a -> a) -> Trie k a -> Trie k a -> Trie k a
unionWithKey f = mergeWithKey (\k a b -> Just (f k a b)) id id
| Left - biased intersection of two tries
intersection :: TrieKey k => Trie k a -> Trie k b -> Trie k a
intersection = mergeWithKey (\_ a _ -> Just a) (const empty) (const empty)
| Intersection of two tries parameterized by a combining function of the
-- values at overlapping keys
intersectionWith :: TrieKey k => (a -> b -> c) -> Trie k a -> Trie k b -> Trie k c
intersectionWith f = mergeWithKey (\_ a b -> Just (f a b)) (const empty) (const empty)
| Intersection of two tries parameterized by a combining function of the
-- key and the values at overlapping keys
intersectionWithKey :: TrieKey k => (k -> a -> b -> c) -> Trie k a -> Trie k b -> Trie k c
intersectionWithKey f = mergeWithKey (\k a b -> Just (f k a b)) (const empty) (const empty)
-- | Remove the keys of the right trie from the left trie
difference :: TrieKey k => Trie k a -> Trie k b -> Trie k a
difference = mergeWithKey (\_ _ _ -> Nothing) id (const empty)
-- | Parameterized 'difference' using a custom merge function.
-- Return 'Just' to change the value stored in left trie, or
-- 'Nothing' to remove from the left trie.
differenceWith :: TrieKey k => (a -> b -> Maybe a) -> Trie k a -> Trie k b -> Trie k a
differenceWith f = mergeWithKey (\_ -> f) id (const empty)
-- | 'differenceWith' where function also has access to the key
differenceWithKey :: TrieKey k => (k -> a -> b -> Maybe a) -> Trie k a -> Trie k b -> Trie k a
differenceWithKey f = mergeWithKey f id (const empty)
-- | Map a function over a trie filtering out elements where function returns 'Nothing'
mapMaybe :: TrieKey k => (a -> Maybe b) -> Trie k a -> Trie k b
mapMaybe f = mapMaybeWithKey (\_ -> f)
| null | https://raw.githubusercontent.com/glguy/tries/f644bd3920ffff52096a2a417233a87f113dad82/src/Data/GenericTrie.hs | haskell | # LANGUAGE Safe #
** Construction
** Updates
** Queries
** Folding
** Traversing
** Combining maps
----------------------------------------------------------------------------
Various helpers
----------------------------------------------------------------------------
| Construct a trie from a list of key-value pairs
| Construct a trie from a list of key-value pairs.
The given function is used to combine values at the
same key.
| Version of 'fromListWith' which is strict in the result of
the combining function.
| Construct an empty trie
# INLINE empty #
| Test for an empty trie
| Lookup an element from a trie
# INLINE lookup #
| Lens for the value at a given key
| Insert an element into a trie
| Delete an element from a trie
# INLINE delete #
| Construct a trie holding a single value
| Apply a function to the values of a trie and keep the elements
of the trie that result in a 'Just' value.
| Perform an action for each value in a trie and keep the elements
of the trie that result in a 'Just' value.
| Filter the values of a trie with the given predicate.
| Version of 'filter' where the predicate also gets the key.
| Fold a trie with a function of the value
| Fold a trie with a function of both key and value
| Traverse a trie with a function of both key and value
| Alter the value at the given key location.
The parameter function takes the value stored
at the given key, if one exists, and should return a value to insert at
that location, or 'Nothing' to delete from that location.
| Insert a value at the given key. The combining function is used
when a value is already stored at that key. The new value is the
| Version of 'insertWith' that is strict in the result of combining
| Returns 'True' when the 'Trie' has a value stored at the given key.
| Returns 'False' when the 'Trie' has a value stored at the given key.
values at overlapping keys
key and the values at overlapping keys
| Remove the keys of the right trie from the left trie
| Parameterized 'difference' using a custom merge function.
Return 'Just' to change the value stored in left trie, or
'Nothing' to remove from the left trie.
| 'differenceWith' where function also has access to the key
| Map a function over a trie filtering out elements where function returns 'Nothing' |
|
This module implements an interface for working with maps .
For primitive types , like ' Int ' , the library automatically selects
an efficient implementation ( e.g. , an " IntMap " ) .
For complex structured types , the library uses an implementation
based on tries : this is useful when using large and similar keys where
comparing for order may become expensive , and storing the distinct
keys would be inefficient .
The ' OrdKey ' type allows for maps with complex keys ,
where the keys are compared based on order , rather than using the
trie implementation .
All methods of ' ' can be derived automatically using
a ' GHC.Generics . Generic ' instance .
@
data Demo = DemoC1 ' Int ' | DemoC2 ' Int ' ' ' deriving ' GHC.Generics . Generic '
instance ' ' Demo
@
This module implements an interface for working with maps.
For primitive types, like 'Int', the library automatically selects
an efficient implementation (e.g., an "IntMap").
For complex structured types, the library uses an implementation
based on tries: this is useful when using large and similar keys where
comparing for order may become expensive, and storing the distinct
keys would be inefficient.
The 'OrdKey' type allows for maps with complex keys,
where the keys are compared based on order, rather than using the
trie implementation.
All methods of 'TrieKey' can be derived automatically using
a 'GHC.Generics.Generic' instance.
@
data Demo = DemoC1 'Int' | DemoC2 'Int' 'Char' deriving 'GHC.Generics.Generic'
instance 'TrieKey' Demo
@
-}
module Data.GenericTrie
(
* Trie interface
Trie
, TrieKey
, empty
, singleton
, fromList
, fromListWith
, fromListWith'
, alter
, insert
, insertWith
, insertWith'
, delete
, at
, member
, notMember
, null
, lookup
, foldWithKey
, fold
, toList
, traverseWithKey
, traverseMaybeWithKey
, mapMaybe
, mapMaybeWithKey
, filter
, filterWithKey
, union
, unionWith
, unionWithKey
, intersection
, intersectionWith
, intersectionWithKey
, difference
, differenceWith
, differenceWithKey
* Keys using ' '
, OrdKey(..)
, EnumKey(..)
, IntLikeKey(..)
) where
import Prelude ()
import Data.GenericTrie.Prelude hiding (lookup, null, filter)
import Data.List (foldl')
import Data.Maybe (isNothing, isJust)
import Data.GenericTrie.Internal
fromList :: TrieKey k => [(k,v)] -> Trie k v
fromList = foldl' (\acc (k,v) -> insert k v acc) empty
fromListWith :: TrieKey k => (v -> v -> v) -> [(k,v)] -> Trie k v
fromListWith f = foldl' (\acc (k,v) -> insertWith f k v acc) empty
fromListWith' :: TrieKey k => (v -> v -> v) -> [(k,v)] -> Trie k v
fromListWith' f = foldl' (\acc (k,v) -> insertWith' f k v acc) empty
empty :: TrieKey k => Trie k a
empty = trieEmpty
null :: TrieKey k => Trie k a -> Bool
null = trieNull
# INLINE null #
lookup :: TrieKey k => k -> Trie k a -> Maybe a
lookup = trieLookup
at :: (Functor f, TrieKey k) => k -> (Maybe a -> f (Maybe a)) -> Trie k a -> f (Trie k a)
at k f m = fmap aux (f mv)
where
mv = lookup k m
aux r = case r of
Nothing -> maybe m (const (delete k m)) mv
Just v' -> insert k v' m
insert :: TrieKey k => k -> a -> Trie k a -> Trie k a
insert = trieInsert
# INLINE insert #
delete :: TrieKey k => k -> Trie k a -> Trie k a
delete = trieDelete
singleton :: TrieKey k => k -> a -> Trie k a
singleton = trieSingleton
# INLINE singleton #
mapMaybeWithKey :: TrieKey k => (k -> a -> Maybe b) -> Trie k a -> Trie k b
mapMaybeWithKey = trieMapMaybeWithKey
# INLINE mapMaybeWithKey #
traverseMaybeWithKey :: (TrieKey k, Applicative f)
=> (k -> a -> f (Maybe b)) -> Trie k a -> f (Trie k b)
traverseMaybeWithKey = trieTraverseMaybeWithKey
# INLINE traverseMaybeWithKey #
filter :: TrieKey k => (a -> Bool) -> Trie k a -> Trie k a
filter p = filterWithKey (const p)
filterWithKey :: TrieKey k => (k -> a -> Bool) -> Trie k a -> Trie k a
filterWithKey p = mapMaybeWithKey aux
where
aux k x
| p k x = Just x
| otherwise = Nothing
fold :: TrieKey k => (a -> r -> r) -> r -> Trie k a -> r
fold = trieFoldWithKey . const
# INLINE fold #
foldWithKey :: TrieKey k => (k -> a -> r -> r) -> r -> Trie k a -> r
foldWithKey = trieFoldWithKey
# INLINE foldWithKey #
traverseWithKey :: (TrieKey k, Applicative f) => (k -> a -> f b) -> Trie k a -> f (Trie k b)
traverseWithKey = trieTraverseWithKey
# INLINE traverseWithKey #
mergeWithKey ::
TrieKey k =>
(k -> a -> b -> Maybe c) ->
(Trie k a -> Trie k c) ->
(Trie k b -> Trie k c) ->
Trie k a -> Trie k b -> Trie k c
mergeWithKey = trieMergeWithKey
# INLINE mergeWithKey #
alter :: TrieKey k => k -> (Maybe a -> Maybe a) -> Trie k a -> Trie k a
alter = trieAlter
# INLINE alter #
first argument to the combining function .
insertWith :: TrieKey k => (v -> v -> v) -> k -> v -> Trie k v -> Trie k v
insertWith f k v = alter k $ \mb ->
case mb of
Just v0 -> Just (f v v0)
Nothing -> Just v
two elements .
insertWith' :: TrieKey k => (v -> v -> v) -> k -> v -> Trie k v -> Trie k v
insertWith' f k v = alter k $ \mb ->
case mb of
Just v0 -> Just $! f v v0
Nothing -> Just v
member :: TrieKey k => k -> Trie k a -> Bool
member k t = isJust (lookup k t)
notMember :: TrieKey k => k -> Trie k a -> Bool
notMember k t = isNothing (lookup k t)
| Left - biased union of two tries
union :: TrieKey k => Trie k a -> Trie k a -> Trie k a
union = mergeWithKey (\_ a _ -> Just a) id id
| Union of two tries with function used to merge overlapping elements
unionWith :: TrieKey k => (a -> a -> a) -> Trie k a -> Trie k a -> Trie k a
unionWith f = mergeWithKey (\_ a b -> Just (f a b)) id id
| Union of two tries with function used to merge overlapping elements along with key
unionWithKey :: TrieKey k => (k -> a -> a -> a) -> Trie k a -> Trie k a -> Trie k a
unionWithKey f = mergeWithKey (\k a b -> Just (f k a b)) id id
| Left - biased intersection of two tries
intersection :: TrieKey k => Trie k a -> Trie k b -> Trie k a
intersection = mergeWithKey (\_ a _ -> Just a) (const empty) (const empty)
| Intersection of two tries parameterized by a combining function of the
intersectionWith :: TrieKey k => (a -> b -> c) -> Trie k a -> Trie k b -> Trie k c
intersectionWith f = mergeWithKey (\_ a b -> Just (f a b)) (const empty) (const empty)
| Intersection of two tries parameterized by a combining function of the
intersectionWithKey :: TrieKey k => (k -> a -> b -> c) -> Trie k a -> Trie k b -> Trie k c
intersectionWithKey f = mergeWithKey (\k a b -> Just (f k a b)) (const empty) (const empty)
difference :: TrieKey k => Trie k a -> Trie k b -> Trie k a
difference = mergeWithKey (\_ _ _ -> Nothing) id (const empty)
differenceWith :: TrieKey k => (a -> b -> Maybe a) -> Trie k a -> Trie k b -> Trie k a
differenceWith f = mergeWithKey (\_ -> f) id (const empty)
differenceWithKey :: TrieKey k => (k -> a -> b -> Maybe a) -> Trie k a -> Trie k b -> Trie k a
differenceWithKey f = mergeWithKey f id (const empty)
mapMaybe :: TrieKey k => (a -> Maybe b) -> Trie k a -> Trie k b
mapMaybe f = mapMaybeWithKey (\_ -> f)
|
6f7b2917db945ba45f97816f5e2f18730a02e6b4a5b6ec33149192da5a28af38 | onaio/milia | dataset.cljc | (ns milia.api.dataset
#? (:clj (:import [java.net URLEncoder]
[java.nio.charset StandardCharsets]))
(:refer-clojure :exclude [clone update])
(:require [chimera.seq :refer [has-keys? in?]]
[chimera.core :refer [not-nil?]]
[chimera.string :refer [get-query-params-str]]
[clojure.string :refer [join split]]
[milia.api.http :refer [parse-http]]
[milia.utils.metadata :refer [metadata-files]]
[milia.utils.remote
:refer [make-j2x-url make-client-url make-url]]
#?@(:clj [[milia.api.io :refer [multipart-options]]
[milia.utils.file :as file-utils]
[milia.utils.metadata :refer [upload-metadata-file]]
[cheshire.core :refer [generate-string]]])))
(defmulti type->endpoint (fn [datatype & _] datatype))
(defmethod type->endpoint :default [_ & {:keys [async] :or {async true}}]
(if async "forms" "data"))
(defmethod type->endpoint :filtered-dataset [_ & _] "dataviews")
(defn all
"Return all the datasets for an account."
[username]
(let [url (make-url (str "forms.json?owner=" username))]
(parse-http :get url)))
(defn public
"Return all public datasets for a specific user."
[username]
(let [url (make-url "forms" (str username ".json"))]
(when (seq username)
(parse-http :get url))))
#?(:clj
(defn- send-file-or-params
"Send request with file or params"
[method
url
{:keys [xls_file xml_file] :as params}
suppress-4xx-exceptions?]
(let [options
(cond
(not-nil? xls_file) (multipart-options xls_file "xls_file")
(not-nil? xml_file) (multipart-options xml_file "xml_file")
:else {:form-params params})]
(parse-http method url :http-options options
:suppress-4xx-exceptions? suppress-4xx-exceptions?))))
#?(:clj
(defn create
"Create a new dataset from a file."
([params]
(create params nil))
([params project-id]
(let [url (apply make-url (if project-id
["projects" project-id "forms.json"]
["forms.json"]))]
(send-file-or-params :post url params false)))))
#?(:clj
(defn patch
"Set the metadata for a dataset using PATCH. Only a subset of the
required parameters are needed."
[dataset-id params & {:keys [suppress-4xx-exceptions?]
:or {suppress-4xx-exceptions? true}}]
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(send-file-or-params :patch url params suppress-4xx-exceptions?)))))
(defn clone
"Clone the dataset given by ID into the account with the given username."
[dataset-id username & {:keys [project-id]}]
(when (and dataset-id (seq username))
(let [url (make-url "forms" dataset-id "clone.json")
data-base {:form-params {:username username}}
data (if project-id
(assoc-in data-base [:form-params :project_id] project-id)
data-base)]
(when dataset-id
(parse-http
:post url
:http-options data
:suppress-4xx-exceptions? true)))))
(defn update
"Set the metadata for a dataset using PUT. All parameters must be passed."
[dataset-id params]
{:pre [(has-keys? params [:created_by
:description
:downloadable
:owner
:project
:public
:public_data
:title
:uuid])]}
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(parse-http :put url :http-options {:form-params params})))))
(defn update-form-name
"Update the title of a form"
[dataset-id params]
(let [url (make-url "forms" dataset-id)]
(when dataset-id
(parse-http :put url :http-options {:form-params params}))))
(defn ^:export data
"Return the data associated with a dataset."
[dataset-id &
{:keys [format raw? must-revalidate? accept-header query-params
data-id auth-token] #?@(:cljs [:or {format "json"}])}]
(let [dataset-suffix (if format
(str dataset-id (when data-id (str "/" data-id))
"." format)
dataset-id)
url (make-url "data" dataset-suffix)
options {:query-params query-params}]
(parse-http :get url
:http-options options
:raw-response? raw?
:must-revalidate? must-revalidate?
:accept-header accept-header
:auth-token auth-token)))
(defn record
"Retrieve a record from the dataset."
[dataset-id record-id]
(let [url (make-url "data" dataset-id (str record-id ".json"))]
(parse-http :get url)))
(defn tags
"Returns tags for a dataset"
[dataset-id]
(let [url (make-url "forms" dataset-id "labels.json")]
(when dataset-id
(parse-http :get url))))
(defn add-tags
"Add tags to a dataset"
[dataset-id tags]
(let [url (make-url "forms" dataset-id "labels.json")]
(when dataset-id
(parse-http :post url :http-options {:form-params tags}))))
(defn filename-for-format
"Return filename taking format special cases into account."
[dataset-id format]
(str dataset-id "." (if (= format "csvzip") "zip" format)))
(defn- options-for-format
"Return options needed to handle format."
[format]
(if (in? ["csvzip" "sav" "xls" "xlsx" "zip"] format) {:as :byte-array} {}))
(defmulti type->download-path (fn [datatype & _] datatype))
(defmethod type->download-path :default
[_ dataset-id format export-options]
[(str dataset-id "." format
(when export-options
(str "?"
(join "&"
(for [[option val] export-options]
(str (name option) "="
#?(:clj (if (seq? val)
(java.net.URLEncoder/encode
(str val)
(.toString (StandardCharsets/UTF_8)))
val)
:cljs val)))))))])
(defmethod type->download-path :filtered-dataset
[_ dataset-id format export-options]
[dataset-id (str "data." format)])
#?(:clj
(defn download
"Download dataset in specified format."
[dataset-id format & [async data-type export-options]]
(let [options (options-for-format format)
endpoint (type->endpoint data-type :async async)
url (apply make-url
(cons endpoint
(type->download-path data-type
dataset-id
format
export-options)))
filename (filename-for-format dataset-id format)]
(parse-http :get url :http-options options :filename filename))))
(defn download-synchronously
"Download form data in specified format. The synchronicity here refers to the
server side. This will still return a channel, not data, in CLJS.
The options map (last parameter) has the following keys:
:accept-header Defaults to application/json
:submission-id The id of the submission whose data the client requires. The
function returns data for all submissions if this is not provided.
:dataview? Boolean flag indicating whether the data belongs to a filtered
dataview"
[dataset-id format
& {:keys [accept-header submission-id dataview?]}]
(let [url (cond
dataview? (make-url "dataviews" dataset-id (str "data." format))
submission-id (make-url "data"
dataset-id (str submission-id "." format))
:default (make-url "data" (str dataset-id "." format)))]
(parse-http :get url
:accept-header accept-header
:http-options (options-for-format format))))
(defn form
"Download form as JSON string or file in specified format if format passed."
([dataset-id]
(let [url (make-url "forms" dataset-id "form.json")]
(when dataset-id
(parse-http :get url))))
([dataset-id format]
(let [suffix (str "form." format)
options (options-for-format format)
url (make-url "forms" dataset-id suffix)
filename (str dataset-id "_" suffix)]
(when dataset-id
(parse-http :get url :http-options options :filename filename)))))
(defn metadata
"Show dataset metadata."
[dataset-id & {:keys [no-cache?]}]
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(parse-http :get url :no-cache? no-cache?)))))
(defn online-data-entry-link
"Return link to online data entry."
[dataset-id]
(let [url (make-url "forms" dataset-id "enketo.json")]
#?(:clj
(parse-http :get url :suppress-4xx-exceptions? true)
:cljs
(parse-http :get url))))
(defn edit-link
"Return link to online data entry."
[username project-id dataset-id instance-id]
(let [return-url (make-client-url username
project-id
dataset-id
"submission-editing-complete")
url (make-url "data" dataset-id instance-id
(str "enketo.json?return_url=" return-url))]
(:url (parse-http :get url))))
(defn delete
"Delete a dataset by ID."
[dataset-id]
(let [url (make-url "forms" dataset-id "delete_async.json")]
(parse-http :delete url)))
(defn move-to-project
"Move a dataset to a project use account if no owner passed."
[dataset-id project-id]
(let [url (make-url "projects" project-id "forms.json")]
(parse-http :post url :http-options {:form-params {:formid dataset-id}})))
(defn new-form-owner
"Set a new form owner"
[dataset-id new-owner]
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))
new-owner (make-url "users" new-owner)]
(when dataset-id
(parse-http :patch url :http-options
{:form-params {:owner new-owner}})))))
(defn update-sharing
"Share dataset with specific user"
[dataset-id username role]
(let [url (make-url "forms" dataset-id "share.json")
data {:username username :role role}]
(when dataset-id
(parse-http :post url :http-options {:form-params data}))))
#?(:clj
(defn upload-media
"Upload media for a form"
[datasetd-id media-file]
(let [url (make-url "metadata.json")
data-file (file-utils/uploaded->file media-file)
muiltipart [{:name "data_value"
:content (:filename media-file)}
{:name "data_type"
:content "media"}
{:name "xform"
:content datasetd-id}
{:name "data_file"
:content data-file}]]
(parse-http :post url
:http-options {:multipart muiltipart}
:suppress-4xx-exceptions? true))))
(defn link-xform-or-dataview-as-media
"Link xform or dataview as media"
[object-type object-id media-filename xform-id]
(let [url (make-url "metadata.json")
form-params {:data_type "media"
:data_value
(str (join " " [object-type object-id media-filename]))
:xform xform-id}]
(parse-http :post url
:http-options {:form-params form-params}
:suppress-4xx-exceptions? true)))
(defn add-xls-report
"Add xls report link to dataset"
[dataset-id uuid filename]
(let [xls-url (make-j2x-url "xls" uuid)
url (make-url "metadata.json")
data {:xform dataset-id
:data_type "external_export"
:data_value (str filename "|" xls-url)}]
(parse-http :post url :http-options {:form-params data})))
(defn download-xls-report
"Download xls report from the j2x service"
([dataset-id meta-id filename]
(download-xls-report dataset-id meta-id filename nil))
([dataset-id meta-id filename data-id]
(let [suffix (if data-id
(str dataset-id ".xls?meta=" meta-id "&data_id=" data-id)
(str dataset-id ".xls?meta=" meta-id))
url (make-url "forms" suffix)]
(parse-http :get
url
:http-options {:as :byte-array}
:as-map? true
:filename filename))))
(def file-ext-query-param-map {:csv "csv_file"
:xls "xls_file"
:xlsx "xls_file"})
(defn get-media-file-extension [filename]
(-> filename
(split #"\.")
peek))
#?(:clj
(defn file-import
"Import csv or xls file data to existing form"
[dataset-id {:keys [filename] :as media-file} & [overwrite?]]
(let [media-file-extension (get-media-file-extension filename)
url (make-url "forms"
dataset-id
(cond-> "import.json"
overwrite? (str "?overwrite=true")))
multipart (multipart-options media-file
(->> media-file-extension
keyword
(get file-ext-query-param-map)))]
(parse-http :post url :http-options multipart
:suppress-4xx-exceptions? true
:as-map? true))))
(defn edit-history
"Returns a submission's edit history"
[dataset-id instance-id]
(parse-http :get (make-url "data" dataset-id instance-id "history.json")))
#?(:clj
(defn upload-file
"Upload metadata file for a submission"
[submission-id file]
(upload-metadata-file "instance" submission-id file)))
(defn files
[instance-id project-id & {:keys [no-cache? dataset-id]}]
(let [extra-params (apply assoc {:project project-id}
[:xform dataset-id])]
(metadata-files :instance instance-id no-cache?
:extra-params extra-params)))
(defn update-xform-meta-permissions
"Integer Integer String String -> Channel HttpResponse"
[dataset-id metadata-id editor-meta-role dataentry-meta-role]
(parse-http
:put (make-url "metadata" (str metadata-id ".json"))
:http-options
{:form-params
{:data_type "xform_meta_perms"
:xform dataset-id
:data_value (str editor-meta-role "|" dataentry-meta-role)}}))
(defn create-xform-meta-permissions
"Integer String String -> Channel HttpResponse"
[dataset-id editor-meta-role dataentry-meta-role]
(parse-http
:post (make-url "metadata.json")
:http-options
{:form-params
{:data_type "xform_meta_perms"
:xform dataset-id
:data_value (str editor-meta-role "|" dataentry-meta-role)}}))
(defn create-submission-review
"Create a submission review"
[{:keys [status instance note]}]
(parse-http
:post (make-url "submissionreview.json")
:http-options
{:form-params
{:status status
:instance instance
:note note}}))
(defn create-multiple-submission-reviews
"Create a submission review"
[{:keys [status instances note]}]
(let [json-vec (mapv (fn [instance]
{:note note :status status :instance instance})
instances)]
(parse-http
:post (make-url "submissionreview.json")
:http-options
#?(:clj {:body
(generate-string json-vec)
:content-type :json})
#?(:cljs {:json-params json-vec}))))
(defn get-submission-review
"Get a submission review"
[submission-review-id]
(parse-http :get (make-url "submissionreview"
(str submission-review-id ".json"))))
(defn list-submission-reviews
"List a submission review"
[]
(parse-http :get (make-url "submissionreview.json")))
(defn update-submission-review
"Update a submission review"
[{:keys [submission-review-id status note]}]
(when (not-every? nil? [status note])
(parse-http
:patch (make-url "submissionreview" (str submission-review-id ".json"))
:http-options
{:form-params
(cond-> {}
(not-nil? status) (assoc :status status)
(not-nil? note) (assoc :note note))})))
(defn filter-submission-review-by-instance
[{:keys [instance status note]}]
(let [query-params-str
(get-query-params-str
(cond-> {}
(not-nil? instance) (assoc :instance instance)
(not-nil? status) (assoc :status status)
(not-nil? note) (assoc :note note)))]
(parse-http
:get (make-url (str "submissionreview.json" query-params-str)))))
(defn delete-submission-review
[instance]
(parse-http :delete (make-url "submissionreview" (str instance ".json"))))
| null | https://raw.githubusercontent.com/onaio/milia/718ac4d2952417d1db845bf17ea54663ac7fa6a8/src/milia/api/dataset.cljc | clojure | (ns milia.api.dataset
#? (:clj (:import [java.net URLEncoder]
[java.nio.charset StandardCharsets]))
(:refer-clojure :exclude [clone update])
(:require [chimera.seq :refer [has-keys? in?]]
[chimera.core :refer [not-nil?]]
[chimera.string :refer [get-query-params-str]]
[clojure.string :refer [join split]]
[milia.api.http :refer [parse-http]]
[milia.utils.metadata :refer [metadata-files]]
[milia.utils.remote
:refer [make-j2x-url make-client-url make-url]]
#?@(:clj [[milia.api.io :refer [multipart-options]]
[milia.utils.file :as file-utils]
[milia.utils.metadata :refer [upload-metadata-file]]
[cheshire.core :refer [generate-string]]])))
(defmulti type->endpoint (fn [datatype & _] datatype))
(defmethod type->endpoint :default [_ & {:keys [async] :or {async true}}]
(if async "forms" "data"))
(defmethod type->endpoint :filtered-dataset [_ & _] "dataviews")
(defn all
"Return all the datasets for an account."
[username]
(let [url (make-url (str "forms.json?owner=" username))]
(parse-http :get url)))
(defn public
"Return all public datasets for a specific user."
[username]
(let [url (make-url "forms" (str username ".json"))]
(when (seq username)
(parse-http :get url))))
#?(:clj
(defn- send-file-or-params
"Send request with file or params"
[method
url
{:keys [xls_file xml_file] :as params}
suppress-4xx-exceptions?]
(let [options
(cond
(not-nil? xls_file) (multipart-options xls_file "xls_file")
(not-nil? xml_file) (multipart-options xml_file "xml_file")
:else {:form-params params})]
(parse-http method url :http-options options
:suppress-4xx-exceptions? suppress-4xx-exceptions?))))
#?(:clj
(defn create
"Create a new dataset from a file."
([params]
(create params nil))
([params project-id]
(let [url (apply make-url (if project-id
["projects" project-id "forms.json"]
["forms.json"]))]
(send-file-or-params :post url params false)))))
#?(:clj
(defn patch
"Set the metadata for a dataset using PATCH. Only a subset of the
required parameters are needed."
[dataset-id params & {:keys [suppress-4xx-exceptions?]
:or {suppress-4xx-exceptions? true}}]
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(send-file-or-params :patch url params suppress-4xx-exceptions?)))))
(defn clone
"Clone the dataset given by ID into the account with the given username."
[dataset-id username & {:keys [project-id]}]
(when (and dataset-id (seq username))
(let [url (make-url "forms" dataset-id "clone.json")
data-base {:form-params {:username username}}
data (if project-id
(assoc-in data-base [:form-params :project_id] project-id)
data-base)]
(when dataset-id
(parse-http
:post url
:http-options data
:suppress-4xx-exceptions? true)))))
(defn update
"Set the metadata for a dataset using PUT. All parameters must be passed."
[dataset-id params]
{:pre [(has-keys? params [:created_by
:description
:downloadable
:owner
:project
:public
:public_data
:title
:uuid])]}
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(parse-http :put url :http-options {:form-params params})))))
(defn update-form-name
"Update the title of a form"
[dataset-id params]
(let [url (make-url "forms" dataset-id)]
(when dataset-id
(parse-http :put url :http-options {:form-params params}))))
(defn ^:export data
"Return the data associated with a dataset."
[dataset-id &
{:keys [format raw? must-revalidate? accept-header query-params
data-id auth-token] #?@(:cljs [:or {format "json"}])}]
(let [dataset-suffix (if format
(str dataset-id (when data-id (str "/" data-id))
"." format)
dataset-id)
url (make-url "data" dataset-suffix)
options {:query-params query-params}]
(parse-http :get url
:http-options options
:raw-response? raw?
:must-revalidate? must-revalidate?
:accept-header accept-header
:auth-token auth-token)))
(defn record
"Retrieve a record from the dataset."
[dataset-id record-id]
(let [url (make-url "data" dataset-id (str record-id ".json"))]
(parse-http :get url)))
(defn tags
"Returns tags for a dataset"
[dataset-id]
(let [url (make-url "forms" dataset-id "labels.json")]
(when dataset-id
(parse-http :get url))))
(defn add-tags
"Add tags to a dataset"
[dataset-id tags]
(let [url (make-url "forms" dataset-id "labels.json")]
(when dataset-id
(parse-http :post url :http-options {:form-params tags}))))
(defn filename-for-format
"Return filename taking format special cases into account."
[dataset-id format]
(str dataset-id "." (if (= format "csvzip") "zip" format)))
(defn- options-for-format
"Return options needed to handle format."
[format]
(if (in? ["csvzip" "sav" "xls" "xlsx" "zip"] format) {:as :byte-array} {}))
(defmulti type->download-path (fn [datatype & _] datatype))
(defmethod type->download-path :default
[_ dataset-id format export-options]
[(str dataset-id "." format
(when export-options
(str "?"
(join "&"
(for [[option val] export-options]
(str (name option) "="
#?(:clj (if (seq? val)
(java.net.URLEncoder/encode
(str val)
(.toString (StandardCharsets/UTF_8)))
val)
:cljs val)))))))])
(defmethod type->download-path :filtered-dataset
[_ dataset-id format export-options]
[dataset-id (str "data." format)])
#?(:clj
(defn download
"Download dataset in specified format."
[dataset-id format & [async data-type export-options]]
(let [options (options-for-format format)
endpoint (type->endpoint data-type :async async)
url (apply make-url
(cons endpoint
(type->download-path data-type
dataset-id
format
export-options)))
filename (filename-for-format dataset-id format)]
(parse-http :get url :http-options options :filename filename))))
(defn download-synchronously
"Download form data in specified format. The synchronicity here refers to the
server side. This will still return a channel, not data, in CLJS.
The options map (last parameter) has the following keys:
:accept-header Defaults to application/json
:submission-id The id of the submission whose data the client requires. The
function returns data for all submissions if this is not provided.
:dataview? Boolean flag indicating whether the data belongs to a filtered
dataview"
[dataset-id format
& {:keys [accept-header submission-id dataview?]}]
(let [url (cond
dataview? (make-url "dataviews" dataset-id (str "data." format))
submission-id (make-url "data"
dataset-id (str submission-id "." format))
:default (make-url "data" (str dataset-id "." format)))]
(parse-http :get url
:accept-header accept-header
:http-options (options-for-format format))))
(defn form
"Download form as JSON string or file in specified format if format passed."
([dataset-id]
(let [url (make-url "forms" dataset-id "form.json")]
(when dataset-id
(parse-http :get url))))
([dataset-id format]
(let [suffix (str "form." format)
options (options-for-format format)
url (make-url "forms" dataset-id suffix)
filename (str dataset-id "_" suffix)]
(when dataset-id
(parse-http :get url :http-options options :filename filename)))))
(defn metadata
"Show dataset metadata."
[dataset-id & {:keys [no-cache?]}]
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))]
(when dataset-id
(parse-http :get url :no-cache? no-cache?)))))
(defn online-data-entry-link
"Return link to online data entry."
[dataset-id]
(let [url (make-url "forms" dataset-id "enketo.json")]
#?(:clj
(parse-http :get url :suppress-4xx-exceptions? true)
:cljs
(parse-http :get url))))
(defn edit-link
"Return link to online data entry."
[username project-id dataset-id instance-id]
(let [return-url (make-client-url username
project-id
dataset-id
"submission-editing-complete")
url (make-url "data" dataset-id instance-id
(str "enketo.json?return_url=" return-url))]
(:url (parse-http :get url))))
(defn delete
"Delete a dataset by ID."
[dataset-id]
(let [url (make-url "forms" dataset-id "delete_async.json")]
(parse-http :delete url)))
(defn move-to-project
"Move a dataset to a project use account if no owner passed."
[dataset-id project-id]
(let [url (make-url "projects" project-id "forms.json")]
(parse-http :post url :http-options {:form-params {:formid dataset-id}})))
(defn new-form-owner
"Set a new form owner"
[dataset-id new-owner]
(when dataset-id
(let [url (make-url "forms" (str dataset-id ".json"))
new-owner (make-url "users" new-owner)]
(when dataset-id
(parse-http :patch url :http-options
{:form-params {:owner new-owner}})))))
(defn update-sharing
"Share dataset with specific user"
[dataset-id username role]
(let [url (make-url "forms" dataset-id "share.json")
data {:username username :role role}]
(when dataset-id
(parse-http :post url :http-options {:form-params data}))))
#?(:clj
(defn upload-media
"Upload media for a form"
[datasetd-id media-file]
(let [url (make-url "metadata.json")
data-file (file-utils/uploaded->file media-file)
muiltipart [{:name "data_value"
:content (:filename media-file)}
{:name "data_type"
:content "media"}
{:name "xform"
:content datasetd-id}
{:name "data_file"
:content data-file}]]
(parse-http :post url
:http-options {:multipart muiltipart}
:suppress-4xx-exceptions? true))))
(defn link-xform-or-dataview-as-media
"Link xform or dataview as media"
[object-type object-id media-filename xform-id]
(let [url (make-url "metadata.json")
form-params {:data_type "media"
:data_value
(str (join " " [object-type object-id media-filename]))
:xform xform-id}]
(parse-http :post url
:http-options {:form-params form-params}
:suppress-4xx-exceptions? true)))
(defn add-xls-report
"Add xls report link to dataset"
[dataset-id uuid filename]
(let [xls-url (make-j2x-url "xls" uuid)
url (make-url "metadata.json")
data {:xform dataset-id
:data_type "external_export"
:data_value (str filename "|" xls-url)}]
(parse-http :post url :http-options {:form-params data})))
(defn download-xls-report
"Download xls report from the j2x service"
([dataset-id meta-id filename]
(download-xls-report dataset-id meta-id filename nil))
([dataset-id meta-id filename data-id]
(let [suffix (if data-id
(str dataset-id ".xls?meta=" meta-id "&data_id=" data-id)
(str dataset-id ".xls?meta=" meta-id))
url (make-url "forms" suffix)]
(parse-http :get
url
:http-options {:as :byte-array}
:as-map? true
:filename filename))))
(def file-ext-query-param-map {:csv "csv_file"
:xls "xls_file"
:xlsx "xls_file"})
(defn get-media-file-extension [filename]
(-> filename
(split #"\.")
peek))
#?(:clj
(defn file-import
"Import csv or xls file data to existing form"
[dataset-id {:keys [filename] :as media-file} & [overwrite?]]
(let [media-file-extension (get-media-file-extension filename)
url (make-url "forms"
dataset-id
(cond-> "import.json"
overwrite? (str "?overwrite=true")))
multipart (multipart-options media-file
(->> media-file-extension
keyword
(get file-ext-query-param-map)))]
(parse-http :post url :http-options multipart
:suppress-4xx-exceptions? true
:as-map? true))))
(defn edit-history
"Returns a submission's edit history"
[dataset-id instance-id]
(parse-http :get (make-url "data" dataset-id instance-id "history.json")))
#?(:clj
(defn upload-file
"Upload metadata file for a submission"
[submission-id file]
(upload-metadata-file "instance" submission-id file)))
(defn files
[instance-id project-id & {:keys [no-cache? dataset-id]}]
(let [extra-params (apply assoc {:project project-id}
[:xform dataset-id])]
(metadata-files :instance instance-id no-cache?
:extra-params extra-params)))
(defn update-xform-meta-permissions
"Integer Integer String String -> Channel HttpResponse"
[dataset-id metadata-id editor-meta-role dataentry-meta-role]
(parse-http
:put (make-url "metadata" (str metadata-id ".json"))
:http-options
{:form-params
{:data_type "xform_meta_perms"
:xform dataset-id
:data_value (str editor-meta-role "|" dataentry-meta-role)}}))
(defn create-xform-meta-permissions
"Integer String String -> Channel HttpResponse"
[dataset-id editor-meta-role dataentry-meta-role]
(parse-http
:post (make-url "metadata.json")
:http-options
{:form-params
{:data_type "xform_meta_perms"
:xform dataset-id
:data_value (str editor-meta-role "|" dataentry-meta-role)}}))
(defn create-submission-review
"Create a submission review"
[{:keys [status instance note]}]
(parse-http
:post (make-url "submissionreview.json")
:http-options
{:form-params
{:status status
:instance instance
:note note}}))
(defn create-multiple-submission-reviews
"Create a submission review"
[{:keys [status instances note]}]
(let [json-vec (mapv (fn [instance]
{:note note :status status :instance instance})
instances)]
(parse-http
:post (make-url "submissionreview.json")
:http-options
#?(:clj {:body
(generate-string json-vec)
:content-type :json})
#?(:cljs {:json-params json-vec}))))
(defn get-submission-review
"Get a submission review"
[submission-review-id]
(parse-http :get (make-url "submissionreview"
(str submission-review-id ".json"))))
(defn list-submission-reviews
"List a submission review"
[]
(parse-http :get (make-url "submissionreview.json")))
(defn update-submission-review
"Update a submission review"
[{:keys [submission-review-id status note]}]
(when (not-every? nil? [status note])
(parse-http
:patch (make-url "submissionreview" (str submission-review-id ".json"))
:http-options
{:form-params
(cond-> {}
(not-nil? status) (assoc :status status)
(not-nil? note) (assoc :note note))})))
(defn filter-submission-review-by-instance
[{:keys [instance status note]}]
(let [query-params-str
(get-query-params-str
(cond-> {}
(not-nil? instance) (assoc :instance instance)
(not-nil? status) (assoc :status status)
(not-nil? note) (assoc :note note)))]
(parse-http
:get (make-url (str "submissionreview.json" query-params-str)))))
(defn delete-submission-review
[instance]
(parse-http :delete (make-url "submissionreview" (str instance ".json"))))
| |
cdb42f58d8f6a61c8e9a59e22ea450e3240159e69062105c1b3116fc85230a52 | wdhowe/clojure-snippets | validators.clj | ; validators - specify which values are allowed for an atom/reference.
; if the validator returns false, an atom's value is not changed.
; create the validator function, should return true or false
(defn battery-level-validator
"Returns true if battery has a valid power level."
[power-level]
(and (>= power-level 0)
(<= power-level 100)))
; specify a validator to attach to an atom during creation
(def battery (atom 50 :validator battery-level-validator))
(println "Charging battery to 100 percent.")
(swap! battery + 50)
(println "Charging battery 1 more percent to 101. (Invalid reference state expected)")
(swap! battery inc)
| null | https://raw.githubusercontent.com/wdhowe/clojure-snippets/0c3247ce99a563312b549d03f080b8cf449b541d/state/validators.clj | clojure | validators - specify which values are allowed for an atom/reference.
if the validator returns false, an atom's value is not changed.
create the validator function, should return true or false
specify a validator to attach to an atom during creation |
(defn battery-level-validator
"Returns true if battery has a valid power level."
[power-level]
(and (>= power-level 0)
(<= power-level 100)))
(def battery (atom 50 :validator battery-level-validator))
(println "Charging battery to 100 percent.")
(swap! battery + 50)
(println "Charging battery 1 more percent to 101. (Invalid reference state expected)")
(swap! battery inc)
|
6a083f1936c2e44277f4762cbcac0b6ad3b0bebf0a42a7953b862b38721c1c71 | d-cent/mooncake | feed.clj | (ns mooncake.test.view.feed
(:require [midje.sweet :refer :all]
[net.cgrand.enlive-html :as html]
[clj-time.core :as c]
[clj-time.format :as f]
[mooncake.routes :as routes]
[mooncake.test.test-helpers.enlive :as eh]
[mooncake.view.feed :as fv]))
(fact "feed page should return feed template"
(let [page (fv/feed :request)]
page => (eh/has-class? [:body] "func--feed-page")))
(eh/test-translations "feed page" fv/feed)
(eh/test-logo-link fv/feed)
(fact "username is rendered"
(fv/feed {:session {:username "Dave"}}) => (eh/text-is? [:.clj--username] "Dave"))
(fact "sign-out link is rendered and directs to /sign-out when user is signed in"
(let [page (fv/feed {:session {:username ...username...}})]
page => (eh/links-to? [:.clj--sign-out__link] (routes/path :sign-out))
page =not=> (eh/has-class? [:.clj--sign-out__link] "clj--STRIP")))
(fact "sign-out link is not rendered if user is not signed in"
(let [page (fv/feed {})]
page => (eh/has-class? [:.clj--sign-out__link] "clj--STRIP")))
(fact "customise-feed link is rendered and directs to /customise-feed when user is signed in"
(let [page (fv/feed {:session {:username ...username...}})]
page => (eh/links-to? [:.clj--customise-feed__link] (routes/path :show-customise-feed))
page =not=> (eh/has-class? [:.clj--customise-feed__link] "clj--STRIP")))
(fact "customise-feed link is not rendered if user is not signed in"
(let [page (fv/feed {})]
page => (eh/has-class? [:.clj--customise-feed__link] "clj--STRIP")))
(tabular
(fact "newer and older links only display on relevant pages"
(let [page (fv/feed {:context {:is-last-page ?is-last-page}
:params {:page-number ?page-number}})]
page ?newer-link-hidden (eh/has-class? [:.clj--newer-activities__link] "clj--STRIP")
page ?older-link-hidden (eh/has-class? [:.clj--older-activities__link] "clj--STRIP")))
?page-number ?is-last-page ?newer-link-hidden ?older-link-hidden
1 false => =not=>
2 true =not=> =>
1 true => =>
3 false =not=> =not=>)
(fact "activities are rendered on the page"
(let [ten-minutes-ago (-> -10 c/minutes c/from-now)
ten-minutes-ago-iso (f/unparse (f/formatters :date-time) ten-minutes-ago)
ten-minutes-ago-custom (f/unparse fv/time-formatter ten-minutes-ago)
page (fv/feed {:context
{:activities
[{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:published ten-minutes-ago-iso
:actor {:type "Person"
:name "JDog"}
:object {:type "Objective"
:name "OBJECTIVE 7 TITLE"
:content "We want to establish Activity Types for Objective8"
:url ""}
:relInsertTime "6"}
{:activity-src "a-helsinki-activity-src"
(keyword "@context") ""
:type "Add"
:published "2015-09-06T11:05:53.002Z"
:actor {:type "Group"
:name "Kaupunginjohtaja/J"
:id "/"}
:object {:id "/"
:name "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020"
:type "Content"
:url "-2015-005343/11010vh1j-2015-25/"
:content "some Finnish HTML"}
:target {:name "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020"
:content "some Finnish HTML"}
:relInsertTime "9"}
{:activity-src "another-objective8-activity-src"
(keyword "@context") ""
:type "Question"
:published "2015-08-04T14:49:38.407Z"
:actor {:type "Person"
:name "Lala"}
:object {:type "Objective Question"
:name "QUESTION 6 TITLE"
:description "Yes."
:url ""}
:target {:name "OBJECTIVE 6 TITLE"
:type "Objective"
:url ""}
:relInsertTime "3"}]}})
[first-activity-item second-activity-item third-activity-item] (html/select page [:.clj--activity-item])]
(count (html/select page [:.clj--activity-item])) => 3
first-activity-item => (eh/links-to? [:.clj--activity-item__link] "")
first-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime ten-minutes-ago-iso)
first-activity-item => (eh/text-is? [:.clj--activity-item__time] ten-minutes-ago-custom)
first-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "JDog")
first-activity-item => (eh/has-attr? [:.clj--activity-item__action] :data-l8n "content:activity-type/action-text-objective")
first-activity-item => (eh/text-is? [:.clj--activity-item__title] "OBJECTIVE 7 TITLE")
first-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
first-activity-item => (eh/text-is? [:.clj--activity-item__id] "6")
second-activity-item => (eh/links-to? [:.clj--activity-item__link] "-2015-005343/11010vh1j-2015-25/")
second-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime "2015-09-06T11:05:53.002Z")
second-activity-item => (eh/text-is? [:.clj--activity-item__time] "2015-09-06 11:05:53")
second-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "Kaupunginjohtaja/J")
second-activity-item => (eh/text-is? [:.clj--activity-item__action] "- Content - Add")
second-activity-item => (eh/text-is? [:.clj--activity-item__connector] " -")
second-activity-item => (eh/text-is? [:.clj--activity-item__title] "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020")
second-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
second-activity-item => (eh/text-is? [:.clj--activity-item__id] "9")
third-activity-item => (eh/links-to? [:.clj--activity-item__link] "")
third-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime "2015-08-04T14:49:38.407Z")
third-activity-item => (eh/text-is? [:.clj--activity-item__time] "2015-08-04 14:49:38")
third-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "Lala")
third-activity-item => (eh/has-attr? [:.clj--activity-item__action] :data-l8n "content:activity-type/action-text-objective-question")
third-activity-item => (eh/text-is? [:.clj--activity-item__title] "QUESTION 6 TITLE")
third-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
third-activity-item => (eh/text-is? [:.clj--activity-item__id] "3")))
(fact "activity action text varies depending on existence of target"
(let [page (fv/feed {:context {:activities
[{:type "Question"
:object {:type "Objective Question"
:name "QUESTION 6 TITLE"
:url ""}}
{:type "Question"
:object {:type "Objective Question"
:name "QUESTION 7 TITLE"
:url ""}
:target {:type "Objective"
:name "OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE"
:url ""}}]}})
[activity-without-target activity-with-target] (html/select page [:.clj--activity-item])]
activity-with-target => (eh/text-is? [:.clj--activity-item__target] "OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE…")
activity-with-target => (eh/has-attr? [:.activity-item__action__target html/last-child] :href "")
activity-with-target => (eh/has-attr? [:.clj--activity-item__connector] :data-l8n "content:feed/action-text-connector-about")
activity-without-target => (eh/text-is? [:.clj--activity-item__target] "")
activity-without-target => (eh/does-not-exist? [:.activity-item__action__target [html/last-child (html/attr? :href)]])
activity-without-target => (eh/does-not-exist? [:.clj--activity-item__connector])))
(fact "activity item avatars are given the initial of the actor (the name of the person)"
(let [page (fv/feed {:context {:activities
[{:activity-src "an-activity-src"
:actor {:type "Person"
:name "abby"}}
{:activity-src "an-activity-src"
:actor {:type "Person"
:name "Bobby"}}
{:activity-src "an-activity-src"
:actor {:type "Person"
:name "2k12carlos"}}]}})
initials-elements (-> (html/select page [:.clj--avatar__initials]))]
(html/text (first initials-elements)) => "A"
(html/text (second initials-elements)) => "B"
(html/text (nth initials-elements 2 nil)) => "2"))
(fact "activity item avatars are assigned the correct classes so they can be colour-coded by activity source"
(let [page (fv/feed {:context {:activity-sources
{:an-activity-src {:index 0}
:another-activity-src {:index 1}}
:activities
[{:activity-src "an-activity-src"}
{:activity-src "another-activity-src"}
{:activity-src "an-activity-src"}]}})
first-activity-item-class (-> (html/select page [:.clj--activity-item])
first :attrs :class)
second-activity-item-class (-> (html/select page [:.clj--activity-item])
second :attrs :class)
third-activity-item-class (-> (html/select page [:.clj--activity-item])
(nth 2 nil) :attrs :class)]
first-activity-item-class => (contains "activity-src-0")
first-activity-item-class =not=> (contains "activity-src-1")
(count (re-seq #"activity-src-" first-activity-item-class)) => 1
second-activity-item-class => (contains "activity-src-1")
second-activity-item-class =not=> (contains "activity-src-0")
(count (re-seq #"activity-src-" second-activity-item-class)) => 1
third-activity-item-class => (contains "activity-src-0")
third-activity-item-class =not=> (contains "activity-src-1")
(count (re-seq #"activity-src-" third-activity-item-class)) => 1))
(facts "about activities"
(facts "when empty"
(let [page (fv/feed {:context {:activities []}})]
(fact "message indicating no retrieved activities"
(-> page (html/select [:.clj--empty-activity-item]) first) =not=> nil?)
(fact "message indicating no retrieved activities links to the customise feed page"
page => (eh/links-to? [:.clj--empty-stream__link] (routes/path :show-customise-feed)))
(fact "message indicating no retrieved activities is translated"
(eh/test-translations "feed page - no activity sources message" (constantly page)))))
(facts "when not empty"
(let [page (fv/feed {:context {:activities [...something...]}})]
(fact "message indicating no retrieved activities is not shown"
(-> page (html/select [:.clj--empty-activity-item]) first) => nil?))))
(fact "activity warning messages are rendered on the page"
(let [page (fv/feed {:context {:activities
[{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "JDog"}
:object {:type "Objective"
:name (str "Lorem ipsum dolor sit amet, consectetur "
"adipiscing elit. Morbi nunc tortor, eleifend et egestas sit "
"amet, tincidunt ac augue. Mauris pellentesque sed.")
:url ""}
:signed true}
{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "HCat"}
:object {:type "Objective"
:name (str "Loremxipsumxdolorxsitxametyxconsecteturx"
"adipiscingxelitzxMorbixnuncxtortoryxeleifendxetxegestasxsitx"
"ametyxtinciduntxacxauguezxMaurisxpellentgfdogk")
:url ""}
:signed false}
{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "QRacoon"}
:object {:type "Objective"
:name (str "Loremxipsumxdolorxsitxametyxconsecteturx"
"adipiscingxelitzxMorbixnuncxtortoryxeleifendxetxegestasxsitx"
"ametyxtinciduntxacxauguezxMaurisxpellentgfdogk")
:url ""}
:signed "verification-failed"}]
:active-activity-source-keys [...active-activity-source-key...]}})
[first-activity-item second-activity-item third-activity-item] (html/select page [:.clj--activity-item])]
first-activity-item => (eh/text-is? [:.clj--activity-item__title] (str "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi nunc tortor, "
"eleifend et egestas sit amet, tincidunt ac augue. Mauris\u2026"))
second-activity-item => (eh/text-is? [:.clj--activity-item__title] (str "LoremxipsumxdolorxsitxametyxconsecteturxadipiscingxelitzxMorbixnuncxtortoryxeleifendxe"
"txegestasxsitxametyxtinciduntxacxauguezxMaurisxpellent\u2026"))
(facts "about warning messages"
(fact "no warning sign gets displayed if the activity is signed"
first-activity-item =not=> (eh/has-class? [:.clj--activity-item] "clj--activity-item__suspicious"))
(fact "a corresponding warning sign gets displayed if the activity is unsigned"
second-activity-item =not=> (eh/has-class? [:.clj--activity-item__suspicious] "clj--STRIP")
second-activity-item => (eh/has-class? [:.clj--activity-item__suspicious] "clj--activity-item__suspicious--untrusted-source"))
(fact "a corresponding warning sign gets displayed if verification of the activity failed"
third-activity-item =not=> (eh/has-class? [:.clj--activity-item__suspicious] "clj--STRIP")
third-activity-item => (eh/has-class? [:.clj--activity-item__suspicious] "clj--activity-item__suspicious--unverified-signature"))))) | null | https://raw.githubusercontent.com/d-cent/mooncake/eb16b7239e7580a73b98f7cdacb324ab4e301f9c/test/mooncake/test/view/feed.clj | clojure | (ns mooncake.test.view.feed
(:require [midje.sweet :refer :all]
[net.cgrand.enlive-html :as html]
[clj-time.core :as c]
[clj-time.format :as f]
[mooncake.routes :as routes]
[mooncake.test.test-helpers.enlive :as eh]
[mooncake.view.feed :as fv]))
(fact "feed page should return feed template"
(let [page (fv/feed :request)]
page => (eh/has-class? [:body] "func--feed-page")))
(eh/test-translations "feed page" fv/feed)
(eh/test-logo-link fv/feed)
(fact "username is rendered"
(fv/feed {:session {:username "Dave"}}) => (eh/text-is? [:.clj--username] "Dave"))
(fact "sign-out link is rendered and directs to /sign-out when user is signed in"
(let [page (fv/feed {:session {:username ...username...}})]
page => (eh/links-to? [:.clj--sign-out__link] (routes/path :sign-out))
page =not=> (eh/has-class? [:.clj--sign-out__link] "clj--STRIP")))
(fact "sign-out link is not rendered if user is not signed in"
(let [page (fv/feed {})]
page => (eh/has-class? [:.clj--sign-out__link] "clj--STRIP")))
(fact "customise-feed link is rendered and directs to /customise-feed when user is signed in"
(let [page (fv/feed {:session {:username ...username...}})]
page => (eh/links-to? [:.clj--customise-feed__link] (routes/path :show-customise-feed))
page =not=> (eh/has-class? [:.clj--customise-feed__link] "clj--STRIP")))
(fact "customise-feed link is not rendered if user is not signed in"
(let [page (fv/feed {})]
page => (eh/has-class? [:.clj--customise-feed__link] "clj--STRIP")))
(tabular
(fact "newer and older links only display on relevant pages"
(let [page (fv/feed {:context {:is-last-page ?is-last-page}
:params {:page-number ?page-number}})]
page ?newer-link-hidden (eh/has-class? [:.clj--newer-activities__link] "clj--STRIP")
page ?older-link-hidden (eh/has-class? [:.clj--older-activities__link] "clj--STRIP")))
?page-number ?is-last-page ?newer-link-hidden ?older-link-hidden
1 false => =not=>
2 true =not=> =>
1 true => =>
3 false =not=> =not=>)
(fact "activities are rendered on the page"
(let [ten-minutes-ago (-> -10 c/minutes c/from-now)
ten-minutes-ago-iso (f/unparse (f/formatters :date-time) ten-minutes-ago)
ten-minutes-ago-custom (f/unparse fv/time-formatter ten-minutes-ago)
page (fv/feed {:context
{:activities
[{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:published ten-minutes-ago-iso
:actor {:type "Person"
:name "JDog"}
:object {:type "Objective"
:name "OBJECTIVE 7 TITLE"
:content "We want to establish Activity Types for Objective8"
:url ""}
:relInsertTime "6"}
{:activity-src "a-helsinki-activity-src"
(keyword "@context") ""
:type "Add"
:published "2015-09-06T11:05:53.002Z"
:actor {:type "Group"
:name "Kaupunginjohtaja/J"
:id "/"}
:object {:id "/"
:name "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020"
:type "Content"
:url "-2015-005343/11010vh1j-2015-25/"
:content "some Finnish HTML"}
:target {:name "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020"
:content "some Finnish HTML"}
:relInsertTime "9"}
{:activity-src "another-objective8-activity-src"
(keyword "@context") ""
:type "Question"
:published "2015-08-04T14:49:38.407Z"
:actor {:type "Person"
:name "Lala"}
:object {:type "Objective Question"
:name "QUESTION 6 TITLE"
:description "Yes."
:url ""}
:target {:name "OBJECTIVE 6 TITLE"
:type "Objective"
:url ""}
:relInsertTime "3"}]}})
[first-activity-item second-activity-item third-activity-item] (html/select page [:.clj--activity-item])]
(count (html/select page [:.clj--activity-item])) => 3
first-activity-item => (eh/links-to? [:.clj--activity-item__link] "")
first-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime ten-minutes-ago-iso)
first-activity-item => (eh/text-is? [:.clj--activity-item__time] ten-minutes-ago-custom)
first-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "JDog")
first-activity-item => (eh/has-attr? [:.clj--activity-item__action] :data-l8n "content:activity-type/action-text-objective")
first-activity-item => (eh/text-is? [:.clj--activity-item__title] "OBJECTIVE 7 TITLE")
first-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
first-activity-item => (eh/text-is? [:.clj--activity-item__id] "6")
second-activity-item => (eh/links-to? [:.clj--activity-item__link] "-2015-005343/11010vh1j-2015-25/")
second-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime "2015-09-06T11:05:53.002Z")
second-activity-item => (eh/text-is? [:.clj--activity-item__time] "2015-09-06 11:05:53")
second-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "Kaupunginjohtaja/J")
second-activity-item => (eh/text-is? [:.clj--activity-item__action] "- Content - Add")
second-activity-item => (eh/text-is? [:.clj--activity-item__connector] " -")
second-activity-item => (eh/text-is? [:.clj--activity-item__title] "Ymp\u00e4rist\u00f6raportoinnin asiantuntijaty\u00f6ryhm\u00e4n asettaminen toimikaudeksi 2015\u20132020")
second-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
second-activity-item => (eh/text-is? [:.clj--activity-item__id] "9")
third-activity-item => (eh/links-to? [:.clj--activity-item__link] "")
third-activity-item => (eh/has-attr? [:.clj--activity-item__time] :datetime "2015-08-04T14:49:38.407Z")
third-activity-item => (eh/text-is? [:.clj--activity-item__time] "2015-08-04 14:49:38")
third-activity-item => (eh/text-is? [:.clj--activity-item__action__author] "Lala")
third-activity-item => (eh/has-attr? [:.clj--activity-item__action] :data-l8n "content:activity-type/action-text-objective-question")
third-activity-item => (eh/text-is? [:.clj--activity-item__title] "QUESTION 6 TITLE")
third-activity-item => (eh/has-attr? [:.clj--activity-item__id] :hidden "hidden")
third-activity-item => (eh/text-is? [:.clj--activity-item__id] "3")))
(fact "activity action text varies depending on existence of target"
(let [page (fv/feed {:context {:activities
[{:type "Question"
:object {:type "Objective Question"
:name "QUESTION 6 TITLE"
:url ""}}
{:type "Question"
:object {:type "Objective Question"
:name "QUESTION 7 TITLE"
:url ""}
:target {:type "Objective"
:name "OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE"
:url ""}}]}})
[activity-without-target activity-with-target] (html/select page [:.clj--activity-item])]
activity-with-target => (eh/text-is? [:.clj--activity-item__target] "OBJECTIVE 7 TITLE OBJECTIVE 7 TITLE…")
activity-with-target => (eh/has-attr? [:.activity-item__action__target html/last-child] :href "")
activity-with-target => (eh/has-attr? [:.clj--activity-item__connector] :data-l8n "content:feed/action-text-connector-about")
activity-without-target => (eh/text-is? [:.clj--activity-item__target] "")
activity-without-target => (eh/does-not-exist? [:.activity-item__action__target [html/last-child (html/attr? :href)]])
activity-without-target => (eh/does-not-exist? [:.clj--activity-item__connector])))
(fact "activity item avatars are given the initial of the actor (the name of the person)"
(let [page (fv/feed {:context {:activities
[{:activity-src "an-activity-src"
:actor {:type "Person"
:name "abby"}}
{:activity-src "an-activity-src"
:actor {:type "Person"
:name "Bobby"}}
{:activity-src "an-activity-src"
:actor {:type "Person"
:name "2k12carlos"}}]}})
initials-elements (-> (html/select page [:.clj--avatar__initials]))]
(html/text (first initials-elements)) => "A"
(html/text (second initials-elements)) => "B"
(html/text (nth initials-elements 2 nil)) => "2"))
(fact "activity item avatars are assigned the correct classes so they can be colour-coded by activity source"
(let [page (fv/feed {:context {:activity-sources
{:an-activity-src {:index 0}
:another-activity-src {:index 1}}
:activities
[{:activity-src "an-activity-src"}
{:activity-src "another-activity-src"}
{:activity-src "an-activity-src"}]}})
first-activity-item-class (-> (html/select page [:.clj--activity-item])
first :attrs :class)
second-activity-item-class (-> (html/select page [:.clj--activity-item])
second :attrs :class)
third-activity-item-class (-> (html/select page [:.clj--activity-item])
(nth 2 nil) :attrs :class)]
first-activity-item-class => (contains "activity-src-0")
first-activity-item-class =not=> (contains "activity-src-1")
(count (re-seq #"activity-src-" first-activity-item-class)) => 1
second-activity-item-class => (contains "activity-src-1")
second-activity-item-class =not=> (contains "activity-src-0")
(count (re-seq #"activity-src-" second-activity-item-class)) => 1
third-activity-item-class => (contains "activity-src-0")
third-activity-item-class =not=> (contains "activity-src-1")
(count (re-seq #"activity-src-" third-activity-item-class)) => 1))
(facts "about activities"
(facts "when empty"
(let [page (fv/feed {:context {:activities []}})]
(fact "message indicating no retrieved activities"
(-> page (html/select [:.clj--empty-activity-item]) first) =not=> nil?)
(fact "message indicating no retrieved activities links to the customise feed page"
page => (eh/links-to? [:.clj--empty-stream__link] (routes/path :show-customise-feed)))
(fact "message indicating no retrieved activities is translated"
(eh/test-translations "feed page - no activity sources message" (constantly page)))))
(facts "when not empty"
(let [page (fv/feed {:context {:activities [...something...]}})]
(fact "message indicating no retrieved activities is not shown"
(-> page (html/select [:.clj--empty-activity-item]) first) => nil?))))
(fact "activity warning messages are rendered on the page"
(let [page (fv/feed {:context {:activities
[{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "JDog"}
:object {:type "Objective"
:name (str "Lorem ipsum dolor sit amet, consectetur "
"adipiscing elit. Morbi nunc tortor, eleifend et egestas sit "
"amet, tincidunt ac augue. Mauris pellentesque sed.")
:url ""}
:signed true}
{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "HCat"}
:object {:type "Objective"
:name (str "Loremxipsumxdolorxsitxametyxconsecteturx"
"adipiscingxelitzxMorbixnuncxtortoryxeleifendxetxegestasxsitx"
"ametyxtinciduntxacxauguezxMaurisxpellentgfdogk")
:url ""}
:signed false}
{:activity-src "an-objective8-activity-src"
(keyword "@context") ""
:type "Create"
:actor {:type "Person"
:name "QRacoon"}
:object {:type "Objective"
:name (str "Loremxipsumxdolorxsitxametyxconsecteturx"
"adipiscingxelitzxMorbixnuncxtortoryxeleifendxetxegestasxsitx"
"ametyxtinciduntxacxauguezxMaurisxpellentgfdogk")
:url ""}
:signed "verification-failed"}]
:active-activity-source-keys [...active-activity-source-key...]}})
[first-activity-item second-activity-item third-activity-item] (html/select page [:.clj--activity-item])]
first-activity-item => (eh/text-is? [:.clj--activity-item__title] (str "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi nunc tortor, "
"eleifend et egestas sit amet, tincidunt ac augue. Mauris\u2026"))
second-activity-item => (eh/text-is? [:.clj--activity-item__title] (str "LoremxipsumxdolorxsitxametyxconsecteturxadipiscingxelitzxMorbixnuncxtortoryxeleifendxe"
"txegestasxsitxametyxtinciduntxacxauguezxMaurisxpellent\u2026"))
(facts "about warning messages"
(fact "no warning sign gets displayed if the activity is signed"
first-activity-item =not=> (eh/has-class? [:.clj--activity-item] "clj--activity-item__suspicious"))
(fact "a corresponding warning sign gets displayed if the activity is unsigned"
second-activity-item =not=> (eh/has-class? [:.clj--activity-item__suspicious] "clj--STRIP")
second-activity-item => (eh/has-class? [:.clj--activity-item__suspicious] "clj--activity-item__suspicious--untrusted-source"))
(fact "a corresponding warning sign gets displayed if verification of the activity failed"
third-activity-item =not=> (eh/has-class? [:.clj--activity-item__suspicious] "clj--STRIP")
third-activity-item => (eh/has-class? [:.clj--activity-item__suspicious] "clj--activity-item__suspicious--unverified-signature"))))) | |
38bcde704c4ec5e64bf7c95fac60f053a4c8d1bb15e45a3b164e501fb40cc06d | multimodalrouting/osm-fulcro | layers.cljs | (ns app.ui.leaflet.layers
(:require
[com.fulcrologic.fulcro.components :refer [factory]]
[app.ui.leaflet.layers.vectorGrid :refer [VectorGridOverlay]]
[app.ui.leaflet.layers.hexbin :refer [Hexbin]]
[app.ui.leaflet.layers.d3svg-osm :refer [D3SvgOSM style-topo]]
[app.ui.leaflet.layers.d3svg-points :refer [D3SvgPoints]]
[app.ui.leaflet.layers.d3svg-label-points :refer [D3SvgLabelPoints]]
[app.ui.leaflet.layers.d3svg-lines :refer [D3SvgLines]]
[app.ui.leaflet.layers.d3svg-styled-lines :refer [D3SvgStyledLines]]
[app.ui.leaflet.layers.d3svg-piechart :refer [D3SvgPieChart]]
[app.ui.leaflet.layers.d3svg-piechart-comparison :refer [D3SvgPieChartComparison]]))
(def overlay-class->component {:vectorGrid (factory VectorGridOverlay)
:hexbin (factory Hexbin)
:d3SvgOSM (factory D3SvgOSM)
:d3SvgPoints (factory D3SvgPoints)
:d3SvgLabelPoints (factory D3SvgLabelPoints)
:d3SvgLines (factory D3SvgLines)
:d3SvgStyledLines (factory D3SvgStyledLines)
:d3SvgPieChart (factory D3SvgPieChart)
:d3SvgPieChartComparison (factory D3SvgPieChartComparison)})
(def example-layers {nil {:base {:name "NONE (only overlays)"
:tile {:url ""}}}
:aerial {:base {:name "Esri Aearial"
:tile {:url "/{z}/{y}/{x}.png"
:attribution "© <a href=\"\">Esri</a>, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community"}}}
:osm {:base {:name "OSM Tiles"
:tile {:url "https://{s}.tile.osm.org/{z}/{x}/{y}.png"
:attribution "© <a href=\"\">OpenStreetMap</a> contributors"}}}
:memo {:base {:name "PublicTransport (MeMOMaps)"
:tile {:url "/{z}/{x}/{y}.png"
:attribution "<a href=\"\">MeMOMaps"}}}
:openpt {:base {:name "PublicTransport (openptmap)"
:tile {:url "/{z}/{x}/{y}.png"
:attribution "<a href=\"\">Openptmap"}}}
:topo {:osm {:name "Topography of OsmJson-Dataset"
all in osm - dataset / root
:styles style-topo}}
:hexbin-example {:overlays [{:class :hexbin
:dataset :vvo-small
:filter {[:geometry :type] #{"Point"}
[:properties :public_transport] #{"stop_position"}}}]}
:vectorGrid-loschwitz {:prechecked true
:overlays [{:class :vectorGrid
:dataset :mvt-loschwitz}]}
:vectorGrid-trachenberger {:overlays [{:class :vectorGrid
:dataset :trachenberger
:filter {[:geometry :type] #{"LineString"}}}]}
:lines-vvo-connections {:prechecked true
:overlays [{:class :d3SvgLines
:dataset :vvo
:filter {[:geometry :type] #{"LineString"}}}]}
:points-vvo-stops {:prechecked true
:overlays [{:class :d3SvgPoints #_:d3SvgLabelPoints
:dataset :vvo-small
:filter {[:geometry :type] #{"Point"}
[:properties :public_transport] #{"stop_position"}}}]}
:pieChart-stop-positions {:prechecked true
:overlays [{:class :d3SvgPieChartComparison
TODO
#_#_:dataset :stop-positions}]}
#_#_:routinggraph {:prechecked true
:overlays [{:class :d3SvgStyledLines
:dataset :routinggraph}]}
#_#_:routes {:prechecked true
:overlays [{:class :d3SvgStyledLines
:dataset :routes}]}
:isochrones {:overlays [{:class :d3SvgLines
:dataset :isochrones}]}})
| null | https://raw.githubusercontent.com/multimodalrouting/osm-fulcro/dedbf40686a18238349603021687694e5a4c31b6/src/main/app/ui/leaflet/layers.cljs | clojure | (ns app.ui.leaflet.layers
(:require
[com.fulcrologic.fulcro.components :refer [factory]]
[app.ui.leaflet.layers.vectorGrid :refer [VectorGridOverlay]]
[app.ui.leaflet.layers.hexbin :refer [Hexbin]]
[app.ui.leaflet.layers.d3svg-osm :refer [D3SvgOSM style-topo]]
[app.ui.leaflet.layers.d3svg-points :refer [D3SvgPoints]]
[app.ui.leaflet.layers.d3svg-label-points :refer [D3SvgLabelPoints]]
[app.ui.leaflet.layers.d3svg-lines :refer [D3SvgLines]]
[app.ui.leaflet.layers.d3svg-styled-lines :refer [D3SvgStyledLines]]
[app.ui.leaflet.layers.d3svg-piechart :refer [D3SvgPieChart]]
[app.ui.leaflet.layers.d3svg-piechart-comparison :refer [D3SvgPieChartComparison]]))
(def overlay-class->component {:vectorGrid (factory VectorGridOverlay)
:hexbin (factory Hexbin)
:d3SvgOSM (factory D3SvgOSM)
:d3SvgPoints (factory D3SvgPoints)
:d3SvgLabelPoints (factory D3SvgLabelPoints)
:d3SvgLines (factory D3SvgLines)
:d3SvgStyledLines (factory D3SvgStyledLines)
:d3SvgPieChart (factory D3SvgPieChart)
:d3SvgPieChartComparison (factory D3SvgPieChartComparison)})
(def example-layers {nil {:base {:name "NONE (only overlays)"
:tile {:url ""}}}
:aerial {:base {:name "Esri Aearial"
:tile {:url "/{z}/{y}/{x}.png"
:attribution "© <a href=\"\">Esri</a>, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community"}}}
:osm {:base {:name "OSM Tiles"
:tile {:url "https://{s}.tile.osm.org/{z}/{x}/{y}.png"
:attribution "© <a href=\"\">OpenStreetMap</a> contributors"}}}
:memo {:base {:name "PublicTransport (MeMOMaps)"
:tile {:url "/{z}/{x}/{y}.png"
:attribution "<a href=\"\">MeMOMaps"}}}
:openpt {:base {:name "PublicTransport (openptmap)"
:tile {:url "/{z}/{x}/{y}.png"
:attribution "<a href=\"\">Openptmap"}}}
:topo {:osm {:name "Topography of OsmJson-Dataset"
all in osm - dataset / root
:styles style-topo}}
:hexbin-example {:overlays [{:class :hexbin
:dataset :vvo-small
:filter {[:geometry :type] #{"Point"}
[:properties :public_transport] #{"stop_position"}}}]}
:vectorGrid-loschwitz {:prechecked true
:overlays [{:class :vectorGrid
:dataset :mvt-loschwitz}]}
:vectorGrid-trachenberger {:overlays [{:class :vectorGrid
:dataset :trachenberger
:filter {[:geometry :type] #{"LineString"}}}]}
:lines-vvo-connections {:prechecked true
:overlays [{:class :d3SvgLines
:dataset :vvo
:filter {[:geometry :type] #{"LineString"}}}]}
:points-vvo-stops {:prechecked true
:overlays [{:class :d3SvgPoints #_:d3SvgLabelPoints
:dataset :vvo-small
:filter {[:geometry :type] #{"Point"}
[:properties :public_transport] #{"stop_position"}}}]}
:pieChart-stop-positions {:prechecked true
:overlays [{:class :d3SvgPieChartComparison
TODO
#_#_:dataset :stop-positions}]}
#_#_:routinggraph {:prechecked true
:overlays [{:class :d3SvgStyledLines
:dataset :routinggraph}]}
#_#_:routes {:prechecked true
:overlays [{:class :d3SvgStyledLines
:dataset :routes}]}
:isochrones {:overlays [{:class :d3SvgLines
:dataset :isochrones}]}})
| |
01224cbd716ed6fb454b6a068bea36b88f7df36d3c4c209275bae22842a68c5f | tsloughter/kuberl | kuberl_v1beta1_network_policy_list.erl | -module(kuberl_v1beta1_network_policy_list).
-export([encode/1]).
-export_type([kuberl_v1beta1_network_policy_list/0]).
-type kuberl_v1beta1_network_policy_list() ::
#{ 'apiVersion' => binary(),
'items' := list(),
'kind' => binary(),
'metadata' => kuberl_v1_list_meta:kuberl_v1_list_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'items' := Items,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'items' => Items,
'kind' => Kind,
'metadata' => Metadata
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1beta1_network_policy_list.erl | erlang | -module(kuberl_v1beta1_network_policy_list).
-export([encode/1]).
-export_type([kuberl_v1beta1_network_policy_list/0]).
-type kuberl_v1beta1_network_policy_list() ::
#{ 'apiVersion' => binary(),
'items' := list(),
'kind' => binary(),
'metadata' => kuberl_v1_list_meta:kuberl_v1_list_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'items' := Items,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'items' => Items,
'kind' => Kind,
'metadata' => Metadata
}.
| |
e4013c8472ac07d3083c20cc2aff90da013dddda3d5d739ea289b8ac6fee3491 | mhayashi1120/Gauche-dbd-sqlite3 | test.scm | ;;
;; Test dbd.sqlite3 module
;;
(use gauche.test)
(test-start "dbd.sqlite3")
(use dbi)
(use dbd.sqlite3)
(use gauche.collection)
(use gauche.version)
(use gauche.threads)
(test-module 'dbd.sqlite3) ;; This checks the exported symbols are indeed bound.
;; Normal operation test
(define connection #f)
(define (select-rows sql . params)
(let1 rset (apply dbi-do connection sql '() params)
(unwind-protect
(map identity rset)
(dbi-close rset))))
(define (select-rows2 sql . params)
(let1 rset (apply dbi-do connection sql '(:pass-through #t) params)
(unwind-protect
(map identity rset)
(dbi-close rset))))
(define (cleanup-test)
(define (remove-file file)
(when (file-exists? file)
(sys-unlink file)))
(remove-file "test.db")
(remove-file "test.db-journal")
(remove-file "てすと.db")
(remove-file "unacceptable.db"))
(cleanup-test)
(format #t "Testing libsqlite3 version => ~a\n" (sqlite3-libversion))
(test* "dbi-connect"
<sqlite3-connection>
(let1 c (dbi-connect "dbi:sqlite3:test.db")
(set! connection c)
(class-of c)))
(test* "Creating test table"
#f
(dbi-open?
(dbi-execute
;;
(dbi-prepare connection
"CREATE TABLE tbl1(id INTEGER, name TEXT, image NONE, rate REAL);"))))
(test* "Checking insert common fields"
#f
(dbi-open?
(dbi-execute
(dbi-prepare connection
"INSERT INTO tbl1 VALUES(1, 'name 1', x'0101', 0.8);"))))
(test* "Checking insert common fields 2"
#f
(dbi-open?
(dbi-execute
(dbi-prepare connection "INSERT INTO tbl1 VALUES(?, ?, x'0202', ?);")
2 "name 2" 0.7)))
(test* "Checking insert common fields 3"
#f
(dbi-open?
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(3);")))
(test* "Checking field names"
'("id" "name" "image" "rate")
(let1 rset (dbi-do connection "SELECT * FROM tbl1;")
(begin0
(slot-ref rset 'field-names)
;; Must close result if rset is pending query.
;; See the ROLLBACK section.
(dbi-close rset))))
(test* "Checking current inserted values"
'(#(1 "name 1" #u8(1 1) 0.8) #(2 "name 2" #u8(2 2) 0.7) #(3 #f #f #f))
(select-rows "SELECT id, name, image, rate FROM tbl1 ORDER BY id ASC;"))
(let ((rset (dbi-do connection "SELECT id FROM tbl1 ORDER BY id ASC")))
(test* "Checking result when quit on the way"
'(#(1) #(2))
(call-with-iterator rset
(lambda (end? next)
(let loop ((count 0)
(res '()))
(cond
((or (end?)
(> count 1))
(reverse! res))
(else
(loop (+ count 1)
(cons (next) res))))))))
(test* "Checking result 1"
'(#(1) #(2) #(3))
(map identity rset))
(test* "Checking result 2"
'(#(1) #(2) #(3))
(map identity rset)))
(test* "Checking transaction commit"
'(#(101) #(102))
(begin
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(101);")
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(102);")))
(select-rows "SELECT id FROM tbl1 WHERE id IN (101, 102)")))
(test* "Checking transaction rollback"
'()
(begin
(guard (e (else (print (condition-ref e 'message))))
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(103);")
;; non existent table
(dbi-do connection "INSERT INTO tbl (id) VALUES(104);"))))
(select-rows "SELECT id FROM tbl1 WHERE id IN (103, 104)")))
;; See the ROLLBACK section.
(cond
[(version<? (sqlite3-libversion) "3.7.11")
(test* "Checking transaction unable rollback"
'(#(201))
;; Open pending query
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(guard (e [else (print (string-join
(map
(cut condition-ref <> 'message)
(slot-ref e '%conditions))
", "))])
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(201);")
;; non existent table
(dbi-do connection "INSERT INTO tbl (id) VALUES(202);"))))
(dbi-close pending-rset)
(select-rows "SELECT id FROM tbl1 WHERE id IN (201, 202)")))]
[else
;;
2012 March 20 ( 3.7.11 )
Pending statements no longer block ROLLBACK . Instead , the pending
;; statement will return SQLITE_ABORT upon next access after the
ROLLBACK .
(test* "Checking transaction can rollback (but previous version can not)"
(list () (with-module dbd.sqlite3 <sqlite3-error>))
;; Open pending query
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(unwind-protect
(begin
(guard (e [else (print (condition-ref e 'message))])
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(201);")
;; non existent table
(dbi-do connection "INSERT INTO tbl (id) VALUES(202);"))))
(list
(select-rows "SELECT id FROM tbl1 WHERE id IN (201, 202)")
(guard (e [else (class-of e)])
(map (^x x) pending-rset))))
(dbi-close pending-rset))))])
(let* ([query (dbi-prepare connection "SELECT 1 FROM tbl1;")]
[rset (dbi-execute query)])
(begin
(unwind-protect
(test* "Checking working statements 1"
1
(length (sqlite3-working-statements connection)))
(dbi-close rset))
(test* "Checking working statements 2"
0
(length (sqlite3-working-statements connection)))))
(test* "Checking full bit number insertion"
'(#(-1))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-1);")
(select-rows "SELECT id FROM tbl1 WHERE id = -1")))
(test* "Checking long number insertion"
'(#(#x7fffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(2147483647);")
(select-rows "SELECT id FROM tbl1 WHERE id = 2147483647")))
(test* "Checking exceed long number insertion"
'(#(#x80000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(2147483648);")
(select-rows "SELECT id FROM tbl1 WHERE id = 2147483648")))
(test* "Checking exceed long number insertion 3"
'(#(#xffffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967295);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967295")))
(test* "Checking exceed long number insertion 4"
'(#(#x100000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967296);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967296")))
(test* "Checking exceed long number insertion 5"
'(#(#x100000001))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967297);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967297")))
(test* "Checking minus long number insertion"
'(#(#x-80000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-2147483648);")
(select-rows "SELECT id FROM tbl1 WHERE id = -2147483648")))
(test* "Checking exceed minus long number insertion"
'(#(#x-80000001))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-2147483649);")
(select-rows "SELECT id FROM tbl1 WHERE id = -2147483649")))
(test* "Checking minus max number insertion"
'(#(#x-8000000000000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-9223372036854775808);")
(select-rows "SELECT id FROM tbl1 WHERE id = -9223372036854775808")))
(test* "Checking max number insertion"
'(#(#x7fffffffffffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(9223372036854775807);")
(select-rows "SELECT id FROM tbl1 WHERE id = 9223372036854775807")))
(test* "Checking auto increment id"
'(1 2)
(begin
(dbi-do connection "CREATE TABLE tbl2(id INTEGER PRIMARY KEY);")
(dbi-do connection "INSERT INTO tbl2 (id) VALUES(NULL);")
(let1 res1 (sqlite3-last-id connection)
(dbi-do connection "INSERT INTO tbl2 (id) VALUES(NULL);")
(let1 res2 (sqlite3-last-id connection)
(list res1 res2)))))
(test* "Checking compound INSERT statements"
'(#(301) #(302) #(303))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES (301); INSERT INTO tbl1 (id) VALUES (302);INSERT INTO tbl1 (id) VALUES (303)")
(select-rows "SELECT id FROM tbl1 WHERE id IN (301, 302, 303)")))
(test* "Checking compound statements getting last select"
'(#(401) #(402))
(select-rows "INSERT INTO tbl1 (id) VALUES (401); INSERT INTO tbl1 (id) VALUES (402);SELECT id FROM tbl1 WHERE id IN (401, 402)"))
(test* "Checking compound statements getting 1st select"
'(#(401) #(402))
(select-rows "SELECT id FROM tbl1 WHERE id IN (401, 402); INSERT INTO tbl1 (id) VALUES (403);"))
(test* "Checking previous compound 2nd statements working"
'(#(403))
(select-rows "SELECT id FROM tbl1 WHERE id IN (403);"))
(test* "Checking compound statements getting 1st select and 2nd has syntax error"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows "SELECT 1; SELECT;"))
(test* "Checking multiple SELECT statements"
'(#(403) #(301 #f) #(302 #f))
(select-rows "SELECT id FROM tbl1 WHERE id IN (403); SELECT id, name FROM tbl1 WHERE id IN (301, 302)"))
(test* "Checking parameter bindings"
'(#("abcdeあ" #xffff #x7fffffffffffffff 0.99 #f))
(select-rows "SELECT ?, ?, ?, ?, ?;"
"abcdeあ" #xffff #x7fffffffffffffff 0.99 #f))
(test* "Checking named parameter bindings (pass-through)"
'(#("abcdeあ" #xffff #x7fffffffffffffff #x-8000000000000000 #u8(0 1 15 255) 0.99 #f))
(select-rows2
(string-append
"SELECT "
" :string_multibyte1, :small_int, :bigpositive_num, :bignegative_num"
", :u8vector, :float, :null1"
)
:string_multibyte1 "abcdeあ"
:small_int #xffff
:bigpositive_num #x7fffffffffffffff
:bignegative_num #x-8000000000000000
:u8vector #u8(0 1 15 255)
:float 0.99
:null1 #f))
(cond
[(version<=? (gauche-version) "0.9.3.3")
(test* "Checking named parameter overflow number (pass-through)"
'(#(#x-8000000000000000 #x7fffffffffffffff))
(select-rows2
"SELECT :overflow_negative_num, :overflow_positive_num"
:overflow_negative_num #x-8000000000000001
:overflow_positive_num #x8000000000000000))]
[else
(test* "Checking named parameter overflow positive number (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2
"SELECT :overflow_positive_num"
:overflow_positive_num #x8000000000000000))
(test* "Checking named parameter overflow negative number (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2
"SELECT :overflow_negative_num"
:overflow_negative_num #x-8000000000000001))])
(test* "Checking named parameter bindings 2 (pass-through)"
'(#(1 2 3 4 5 6 7))
(select-rows2
(string-append
"SELECT "
;; : prefix
" :a1"
;; @ prefix
", @a2"
;; $ prefix
", $a3"
;; indexed parameter
", ?4"
;; anonymous parameter
", ?"
;; keyword has ? prefix
", ?6"
;; keyword has : prefix
", :a7")
:a1 1 :@a2 2 :$a3 3 :4 4 :? 5 :?6 6 ::a7 7))
(test* "Checking compound statements with named parameter (pass-through)"
'(#(1 2) #(1 3))
(select-rows2
(string-append
"SELECT :a1, :a2;"
"SELECT :a1, :a3;")
:a1 1 :a2 2 :a3 3))
(test* "Checking compound statements with no parameter (pass-through)"
'(#("a1" "a2") #("a3" "a4"))
(select-rows2
(string-append
"SELECT 'a1', 'a2';"
"SELECT 'a3', 'a4';")))
(test* "Checking invalid parameter (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2 "SELECT :a1_a" :a1-a 2))
(cond
[(version>? (sqlite3-libversion) "3.7.12")
(test* "Checking VACUUM is not working when there is pending statement."
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(guard (e [else
(print (condition-ref e 'message))
(dbi-close pending-rset)
(raise e)])
(dbi-do connection "VACUUM"))))
(test* "Checking VACUUM is working."
'()
(map (^x x) (dbi-do connection "VACUUM;")))]
[else
(test* "Checking VACUUM is not working."
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do connection "VACUUM"))])
(test* "Checking statement which has end of space."
'(#(1))
(select-rows2 "SELECT 1; "))
(test* "Checking no working statements"
'()
(sqlite3-working-statements connection))
(test* "Checking dbi-tables"
'("tbl1" "tbl2")
(dbi-tables connection))
(test* "Checking still open connection"
#t
(dbi-open? connection))
(test* "Checking closing connection"
#t
(dbi-close connection))
(test* "Checking connection was closed"
#f
(dbi-open? connection))
(let1 con1 (dbi-connect "dbi:sqlite3:test.db")
(unwind-protect
(let1 con2 (dbi-connect "dbi:sqlite3:test.db")
(unwind-protect
(let1 r1 (dbi-do con1 "BEGIN; INSERT INTO tbl1 (id) VALUES(501);")
(unwind-protect
(test* "Checking raising error if busy time is not set"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);"))
(dbi-close r1)))
(dbi-do con2 "ROLLBACK;")
1 second
(sqlite3-set-timeout con2 1000)
(test* "Checking timed out if another process lock db too long time"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);"))
(dbi-do con2 "ROLLBACK;")
3 seconds
(sqlite3-set-timeout con2 3000)
(let1 th (make-thread
(^ ()
(sys-sleep 1)
(dbi-do con1 "COMMIT;")))
(thread-start! th))
(test* "Checking waiting for another process release db"
'(#(501) #(502))
(begin
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);COMMIT;")
(map identity (dbi-do con2 "SELECT id FROM tbl1 WHERE id IN (501, 502)"))))
(dbi-close con2)))
(dbi-close con1)))
(let* ([con (dbi-connect "dbi:sqlite3:test.db")]
[r1 #f]
[r2 #f])
(unwind-protect
(begin
(set! r1 (dbi-do con "BEGIN; INSERT INTO tbl1 (id) VALUES(601);"))
(set! r2 (dbi-do con "SELECT id FROM tbl1 WHERE 600 <= id AND id <= 699")))
(begin
(test* "Checking working procedure"
1
(length (sqlite3-working-statements con)))
(dbi-close con))))
(cond-expand
FIXME : version ca n't file ..
[gauche.os.cygwin]
[else
(test* "Checking failed to open db"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(begin
(with-output-to-file "unacceptable.db"
(^()))
(sys-chmod "unacceptable.db" #o000)
(dbi-connect "dbi:sqlite3:unacceptable.db")))])
(test* "Checking multibyte filename"
#t
(let1 c (dbi-connect "dbi:sqlite3:てすと.db")
(unwind-protect
(dbi-open? c)
(dbi-close c))))
(test-end)
(cleanup-test)
| null | https://raw.githubusercontent.com/mhayashi1120/Gauche-dbd-sqlite3/f298c4ce2ac181dca4b373057651e5ccd9f63923/test.scm | scheme |
Test dbd.sqlite3 module
This checks the exported symbols are indeed bound.
Normal operation test
Must close result if rset is pending query.
See the ROLLBACK section.
non existent table
See the ROLLBACK section.
Open pending query
non existent table
statement will return SQLITE_ABORT upon next access after the
Open pending query
non existent table
: prefix
@ prefix
$ prefix
indexed parameter
anonymous parameter
keyword has ? prefix
keyword has : prefix |
(use gauche.test)
(test-start "dbd.sqlite3")
(use dbi)
(use dbd.sqlite3)
(use gauche.collection)
(use gauche.version)
(use gauche.threads)
(define connection #f)
(define (select-rows sql . params)
(let1 rset (apply dbi-do connection sql '() params)
(unwind-protect
(map identity rset)
(dbi-close rset))))
(define (select-rows2 sql . params)
(let1 rset (apply dbi-do connection sql '(:pass-through #t) params)
(unwind-protect
(map identity rset)
(dbi-close rset))))
(define (cleanup-test)
(define (remove-file file)
(when (file-exists? file)
(sys-unlink file)))
(remove-file "test.db")
(remove-file "test.db-journal")
(remove-file "てすと.db")
(remove-file "unacceptable.db"))
(cleanup-test)
(format #t "Testing libsqlite3 version => ~a\n" (sqlite3-libversion))
(test* "dbi-connect"
<sqlite3-connection>
(let1 c (dbi-connect "dbi:sqlite3:test.db")
(set! connection c)
(class-of c)))
(test* "Creating test table"
#f
(dbi-open?
(dbi-execute
(dbi-prepare connection
"CREATE TABLE tbl1(id INTEGER, name TEXT, image NONE, rate REAL);"))))
(test* "Checking insert common fields"
#f
(dbi-open?
(dbi-execute
(dbi-prepare connection
"INSERT INTO tbl1 VALUES(1, 'name 1', x'0101', 0.8);"))))
(test* "Checking insert common fields 2"
#f
(dbi-open?
(dbi-execute
(dbi-prepare connection "INSERT INTO tbl1 VALUES(?, ?, x'0202', ?);")
2 "name 2" 0.7)))
(test* "Checking insert common fields 3"
#f
(dbi-open?
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(3);")))
(test* "Checking field names"
'("id" "name" "image" "rate")
(let1 rset (dbi-do connection "SELECT * FROM tbl1;")
(begin0
(slot-ref rset 'field-names)
(dbi-close rset))))
(test* "Checking current inserted values"
'(#(1 "name 1" #u8(1 1) 0.8) #(2 "name 2" #u8(2 2) 0.7) #(3 #f #f #f))
(select-rows "SELECT id, name, image, rate FROM tbl1 ORDER BY id ASC;"))
(let ((rset (dbi-do connection "SELECT id FROM tbl1 ORDER BY id ASC")))
(test* "Checking result when quit on the way"
'(#(1) #(2))
(call-with-iterator rset
(lambda (end? next)
(let loop ((count 0)
(res '()))
(cond
((or (end?)
(> count 1))
(reverse! res))
(else
(loop (+ count 1)
(cons (next) res))))))))
(test* "Checking result 1"
'(#(1) #(2) #(3))
(map identity rset))
(test* "Checking result 2"
'(#(1) #(2) #(3))
(map identity rset)))
(test* "Checking transaction commit"
'(#(101) #(102))
(begin
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(101);")
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(102);")))
(select-rows "SELECT id FROM tbl1 WHERE id IN (101, 102)")))
(test* "Checking transaction rollback"
'()
(begin
(guard (e (else (print (condition-ref e 'message))))
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(103);")
(dbi-do connection "INSERT INTO tbl (id) VALUES(104);"))))
(select-rows "SELECT id FROM tbl1 WHERE id IN (103, 104)")))
(cond
[(version<? (sqlite3-libversion) "3.7.11")
(test* "Checking transaction unable rollback"
'(#(201))
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(guard (e [else (print (string-join
(map
(cut condition-ref <> 'message)
(slot-ref e '%conditions))
", "))])
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(201);")
(dbi-do connection "INSERT INTO tbl (id) VALUES(202);"))))
(dbi-close pending-rset)
(select-rows "SELECT id FROM tbl1 WHERE id IN (201, 202)")))]
[else
2012 March 20 ( 3.7.11 )
Pending statements no longer block ROLLBACK . Instead , the pending
ROLLBACK .
(test* "Checking transaction can rollback (but previous version can not)"
(list () (with-module dbd.sqlite3 <sqlite3-error>))
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(unwind-protect
(begin
(guard (e [else (print (condition-ref e 'message))])
(call-with-transaction connection
(lambda (tran)
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(201);")
(dbi-do connection "INSERT INTO tbl (id) VALUES(202);"))))
(list
(select-rows "SELECT id FROM tbl1 WHERE id IN (201, 202)")
(guard (e [else (class-of e)])
(map (^x x) pending-rset))))
(dbi-close pending-rset))))])
(let* ([query (dbi-prepare connection "SELECT 1 FROM tbl1;")]
[rset (dbi-execute query)])
(begin
(unwind-protect
(test* "Checking working statements 1"
1
(length (sqlite3-working-statements connection)))
(dbi-close rset))
(test* "Checking working statements 2"
0
(length (sqlite3-working-statements connection)))))
(test* "Checking full bit number insertion"
'(#(-1))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-1);")
(select-rows "SELECT id FROM tbl1 WHERE id = -1")))
(test* "Checking long number insertion"
'(#(#x7fffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(2147483647);")
(select-rows "SELECT id FROM tbl1 WHERE id = 2147483647")))
(test* "Checking exceed long number insertion"
'(#(#x80000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(2147483648);")
(select-rows "SELECT id FROM tbl1 WHERE id = 2147483648")))
(test* "Checking exceed long number insertion 3"
'(#(#xffffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967295);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967295")))
(test* "Checking exceed long number insertion 4"
'(#(#x100000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967296);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967296")))
(test* "Checking exceed long number insertion 5"
'(#(#x100000001))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(4294967297);")
(select-rows "SELECT id FROM tbl1 WHERE id = 4294967297")))
(test* "Checking minus long number insertion"
'(#(#x-80000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-2147483648);")
(select-rows "SELECT id FROM tbl1 WHERE id = -2147483648")))
(test* "Checking exceed minus long number insertion"
'(#(#x-80000001))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-2147483649);")
(select-rows "SELECT id FROM tbl1 WHERE id = -2147483649")))
(test* "Checking minus max number insertion"
'(#(#x-8000000000000000))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(-9223372036854775808);")
(select-rows "SELECT id FROM tbl1 WHERE id = -9223372036854775808")))
(test* "Checking max number insertion"
'(#(#x7fffffffffffffff))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES(9223372036854775807);")
(select-rows "SELECT id FROM tbl1 WHERE id = 9223372036854775807")))
(test* "Checking auto increment id"
'(1 2)
(begin
(dbi-do connection "CREATE TABLE tbl2(id INTEGER PRIMARY KEY);")
(dbi-do connection "INSERT INTO tbl2 (id) VALUES(NULL);")
(let1 res1 (sqlite3-last-id connection)
(dbi-do connection "INSERT INTO tbl2 (id) VALUES(NULL);")
(let1 res2 (sqlite3-last-id connection)
(list res1 res2)))))
(test* "Checking compound INSERT statements"
'(#(301) #(302) #(303))
(begin
(dbi-do connection "INSERT INTO tbl1 (id) VALUES (301); INSERT INTO tbl1 (id) VALUES (302);INSERT INTO tbl1 (id) VALUES (303)")
(select-rows "SELECT id FROM tbl1 WHERE id IN (301, 302, 303)")))
(test* "Checking compound statements getting last select"
'(#(401) #(402))
(select-rows "INSERT INTO tbl1 (id) VALUES (401); INSERT INTO tbl1 (id) VALUES (402);SELECT id FROM tbl1 WHERE id IN (401, 402)"))
(test* "Checking compound statements getting 1st select"
'(#(401) #(402))
(select-rows "SELECT id FROM tbl1 WHERE id IN (401, 402); INSERT INTO tbl1 (id) VALUES (403);"))
(test* "Checking previous compound 2nd statements working"
'(#(403))
(select-rows "SELECT id FROM tbl1 WHERE id IN (403);"))
(test* "Checking compound statements getting 1st select and 2nd has syntax error"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows "SELECT 1; SELECT;"))
(test* "Checking multiple SELECT statements"
'(#(403) #(301 #f) #(302 #f))
(select-rows "SELECT id FROM tbl1 WHERE id IN (403); SELECT id, name FROM tbl1 WHERE id IN (301, 302)"))
(test* "Checking parameter bindings"
'(#("abcdeあ" #xffff #x7fffffffffffffff 0.99 #f))
(select-rows "SELECT ?, ?, ?, ?, ?;"
"abcdeあ" #xffff #x7fffffffffffffff 0.99 #f))
(test* "Checking named parameter bindings (pass-through)"
'(#("abcdeあ" #xffff #x7fffffffffffffff #x-8000000000000000 #u8(0 1 15 255) 0.99 #f))
(select-rows2
(string-append
"SELECT "
" :string_multibyte1, :small_int, :bigpositive_num, :bignegative_num"
", :u8vector, :float, :null1"
)
:string_multibyte1 "abcdeあ"
:small_int #xffff
:bigpositive_num #x7fffffffffffffff
:bignegative_num #x-8000000000000000
:u8vector #u8(0 1 15 255)
:float 0.99
:null1 #f))
(cond
[(version<=? (gauche-version) "0.9.3.3")
(test* "Checking named parameter overflow number (pass-through)"
'(#(#x-8000000000000000 #x7fffffffffffffff))
(select-rows2
"SELECT :overflow_negative_num, :overflow_positive_num"
:overflow_negative_num #x-8000000000000001
:overflow_positive_num #x8000000000000000))]
[else
(test* "Checking named parameter overflow positive number (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2
"SELECT :overflow_positive_num"
:overflow_positive_num #x8000000000000000))
(test* "Checking named parameter overflow negative number (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2
"SELECT :overflow_negative_num"
:overflow_negative_num #x-8000000000000001))])
(test* "Checking named parameter bindings 2 (pass-through)"
'(#(1 2 3 4 5 6 7))
(select-rows2
(string-append
"SELECT "
" :a1"
", @a2"
", $a3"
", ?4"
", ?"
", ?6"
", :a7")
:a1 1 :@a2 2 :$a3 3 :4 4 :? 5 :?6 6 ::a7 7))
(test* "Checking compound statements with named parameter (pass-through)"
'(#(1 2) #(1 3))
(select-rows2
(string-append
"SELECT :a1, :a2;"
"SELECT :a1, :a3;")
:a1 1 :a2 2 :a3 3))
(test* "Checking compound statements with no parameter (pass-through)"
'(#("a1" "a2") #("a3" "a4"))
(select-rows2
(string-append
"SELECT 'a1', 'a2';"
"SELECT 'a3', 'a4';")))
(test* "Checking invalid parameter (pass-through)"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(select-rows2 "SELECT :a1_a" :a1-a 2))
(cond
[(version>? (sqlite3-libversion) "3.7.12")
(test* "Checking VACUUM is not working when there is pending statement."
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(let1 pending-rset (dbi-do connection "SELECT 1 FROM tbl1;")
(guard (e [else
(print (condition-ref e 'message))
(dbi-close pending-rset)
(raise e)])
(dbi-do connection "VACUUM"))))
(test* "Checking VACUUM is working."
'()
(map (^x x) (dbi-do connection "VACUUM;")))]
[else
(test* "Checking VACUUM is not working."
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do connection "VACUUM"))])
(test* "Checking statement which has end of space."
'(#(1))
(select-rows2 "SELECT 1; "))
(test* "Checking no working statements"
'()
(sqlite3-working-statements connection))
(test* "Checking dbi-tables"
'("tbl1" "tbl2")
(dbi-tables connection))
(test* "Checking still open connection"
#t
(dbi-open? connection))
(test* "Checking closing connection"
#t
(dbi-close connection))
(test* "Checking connection was closed"
#f
(dbi-open? connection))
(let1 con1 (dbi-connect "dbi:sqlite3:test.db")
(unwind-protect
(let1 con2 (dbi-connect "dbi:sqlite3:test.db")
(unwind-protect
(let1 r1 (dbi-do con1 "BEGIN; INSERT INTO tbl1 (id) VALUES(501);")
(unwind-protect
(test* "Checking raising error if busy time is not set"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);"))
(dbi-close r1)))
(dbi-do con2 "ROLLBACK;")
1 second
(sqlite3-set-timeout con2 1000)
(test* "Checking timed out if another process lock db too long time"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);"))
(dbi-do con2 "ROLLBACK;")
3 seconds
(sqlite3-set-timeout con2 3000)
(let1 th (make-thread
(^ ()
(sys-sleep 1)
(dbi-do con1 "COMMIT;")))
(thread-start! th))
(test* "Checking waiting for another process release db"
'(#(501) #(502))
(begin
(dbi-do con2 "BEGIN; INSERT INTO tbl1 (id) VALUES(502);COMMIT;")
(map identity (dbi-do con2 "SELECT id FROM tbl1 WHERE id IN (501, 502)"))))
(dbi-close con2)))
(dbi-close con1)))
(let* ([con (dbi-connect "dbi:sqlite3:test.db")]
[r1 #f]
[r2 #f])
(unwind-protect
(begin
(set! r1 (dbi-do con "BEGIN; INSERT INTO tbl1 (id) VALUES(601);"))
(set! r2 (dbi-do con "SELECT id FROM tbl1 WHERE 600 <= id AND id <= 699")))
(begin
(test* "Checking working procedure"
1
(length (sqlite3-working-statements con)))
(dbi-close con))))
(cond-expand
FIXME : version ca n't file ..
[gauche.os.cygwin]
[else
(test* "Checking failed to open db"
(test-error (with-module dbd.sqlite3 <sqlite3-error>))
(begin
(with-output-to-file "unacceptable.db"
(^()))
(sys-chmod "unacceptable.db" #o000)
(dbi-connect "dbi:sqlite3:unacceptable.db")))])
(test* "Checking multibyte filename"
#t
(let1 c (dbi-connect "dbi:sqlite3:てすと.db")
(unwind-protect
(dbi-open? c)
(dbi-close c))))
(test-end)
(cleanup-test)
|
64772ec513ee7c8d7a66e3aa5396e88f3c699ee606a83097c84a0213aa219992 | chenyukang/eopl | 18.scm | (load-relative "../libs/init.scm")
(load-relative "./base/test.scm")
(load-relative "./base/data-structures.scm")
(load-relative "./base/type-structures.scm")
(load-relative "./base/type-module.scm")
(load-relative "./base/grammar.scm")
(load-relative "./base/renaming.scm")
(load-relative "./base/subtyping.scm")
(load-relative "./base/expand-type.scm")
(load-relative "./base/type-cases.scm")
;; code refactor
| null | https://raw.githubusercontent.com/chenyukang/eopl/0406ff23b993bfe020294fa70d2597b1ce4f9b78/ch8/18.scm | scheme | code refactor | (load-relative "../libs/init.scm")
(load-relative "./base/test.scm")
(load-relative "./base/data-structures.scm")
(load-relative "./base/type-structures.scm")
(load-relative "./base/type-module.scm")
(load-relative "./base/grammar.scm")
(load-relative "./base/renaming.scm")
(load-relative "./base/subtyping.scm")
(load-relative "./base/expand-type.scm")
(load-relative "./base/type-cases.scm")
|
a943f6325806fa345743cc8900c51499562ea4ed29cd5d36d802618bc3dfd7e3 | msakai/toysolver | Rational.hs | {-# OPTIONS_HADDOCK show-extensions #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : ToySolver.Data.Polynomial.Factorization.Rational
Copyright : ( c ) 2013
-- License : BSD-style
--
-- Maintainer :
-- Stability : provisional
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module ToySolver.Data.Polynomial.Factorization.Rational () where
import Data.List (foldl')
import Data.Ratio
import ToySolver.Data.Polynomial.Base (UPolynomial)
import qualified ToySolver.Data.Polynomial.Base as P
import ToySolver.Data.Polynomial.Factorization.Integer ()
instance P.Factor (UPolynomial Rational) where
factor 0 = [(0,1)]
factor p = [(P.constant c, 1) | c /= 1] ++ qs2
where
qs = P.factor $ P.pp p
qs2 = [(P.mapCoeff fromInteger q, m) | (q,m) <- qs, P.deg q > 0]
c = toRational (product [(P.coeff P.mone q)^m | (q,m) <- qs, P.deg q == 0]) * P.cont p
| null | https://raw.githubusercontent.com/msakai/toysolver/6233d130d3dcea32fa34c26feebd151f546dea85/src/ToySolver/Data/Polynomial/Factorization/Rational.hs | haskell | # OPTIONS_HADDOCK show-extensions #
# LANGUAGE TypeSynonymInstances #
---------------------------------------------------------------------------
|
Module : ToySolver.Data.Polynomial.Factorization.Rational
License : BSD-style
Maintainer :
Stability : provisional
Portability : non-portable
--------------------------------------------------------------------------- | # LANGUAGE FlexibleInstances #
Copyright : ( c ) 2013
module ToySolver.Data.Polynomial.Factorization.Rational () where
import Data.List (foldl')
import Data.Ratio
import ToySolver.Data.Polynomial.Base (UPolynomial)
import qualified ToySolver.Data.Polynomial.Base as P
import ToySolver.Data.Polynomial.Factorization.Integer ()
instance P.Factor (UPolynomial Rational) where
factor 0 = [(0,1)]
factor p = [(P.constant c, 1) | c /= 1] ++ qs2
where
qs = P.factor $ P.pp p
qs2 = [(P.mapCoeff fromInteger q, m) | (q,m) <- qs, P.deg q > 0]
c = toRational (product [(P.coeff P.mone q)^m | (q,m) <- qs, P.deg q == 0]) * P.cont p
|
7fccb8b71b3a7fd1a661826cf288f997f302d2ffcb8863d6da607213e9d9675f | obsidiansystems/beam-automigrate | Postgres.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE MultiWayIf #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Database.Beam.AutoMigrate.Postgres
( getSchema,
)
where
import Control.Monad.State
import Data.Bits (shiftR, (.&.))
import Data.ByteString (ByteString)
import Data.Foldable (asum, foldlM)
import Data.Map (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import qualified Data.Set as S
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import Database.Beam.AutoMigrate.Types
import Database.Beam.Backend.SQL hiding (tableName)
import qualified Database.Beam.Backend.SQL.AST as AST
import qualified Database.PostgreSQL.Simple as Pg
import Database.PostgreSQL.Simple.FromField (FromField (..), fromField, returnError)
import Database.PostgreSQL.Simple.FromRow (FromRow (..), field)
import qualified Database.PostgreSQL.Simple.TypeInfo.Static as Pg
import qualified Database.PostgreSQL.Simple.Types as Pg
--
-- Necessary types to make working with the underlying raw SQL a bit more pleasant
--
data SqlRawOtherConstraintType
= SQL_raw_pk
| SQL_raw_unique
deriving (Show, Eq)
data SqlOtherConstraint = SqlOtherConstraint
{ sqlCon_name :: Text,
sqlCon_constraint_type :: SqlRawOtherConstraintType,
sqlCon_table :: TableName,
sqlCon_fk_colums :: V.Vector ColumnName
}
deriving (Show, Eq)
instance Pg.FromRow SqlOtherConstraint where
fromRow =
SqlOtherConstraint <$> field
<*> field
<*> fmap TableName field
<*> fmap (V.map ColumnName) field
data SqlForeignConstraint = SqlForeignConstraint
{ sqlFk_foreign_table :: TableName,
sqlFk_primary_table :: TableName,
| The columns in the table .
sqlFk_fk_columns :: V.Vector ColumnName,
-- | The columns in the /current/ table.
sqlFk_pk_columns :: V.Vector ColumnName,
sqlFk_name :: Text
}
deriving (Show, Eq)
instance Pg.FromRow SqlForeignConstraint where
fromRow =
SqlForeignConstraint <$> fmap TableName field
<*> fmap TableName field
<*> fmap (V.map ColumnName) field
<*> fmap (V.map ColumnName) field
<*> field
instance FromField TableName where
fromField f dat = TableName <$> fromField f dat
instance FromField ColumnName where
fromField f dat = ColumnName <$> fromField f dat
instance FromField SqlRawOtherConstraintType where
fromField f dat = do
t :: String <- fromField f dat
case t of
"p" -> pure SQL_raw_pk
"u" -> pure SQL_raw_unique
_ -> returnError Pg.ConversionFailed f t
--
-- Postgres queries to extract the schema out of the DB
--
-- | A SQL query to select all user's queries, skipping any beam-related tables (i.e. leftovers from
-- beam-migrate, for example).
userTablesQ :: Pg.Query
userTablesQ =
fromString $
unlines
[ "SELECT cl.oid, relname FROM pg_catalog.pg_class \"cl\" join pg_catalog.pg_namespace \"ns\" ",
"on (ns.oid = relnamespace) where nspname = any (current_schemas(false)) and relkind='r' ",
"and relname NOT LIKE 'beam_%'"
]
-- | Get information about default values for /all/ tables.
defaultsQ :: Pg.Query
defaultsQ =
fromString $
unlines
[ "SELECT col.table_name::text, col.column_name::text, col.column_default::text, col.data_type::text ",
"FROM information_schema.columns col ",
"WHERE col.column_default IS NOT NULL ",
"AND col.table_schema NOT IN('information_schema', 'pg_catalog') ",
"ORDER BY col.table_name"
]
-- | Get information about columns for this table. Due to the fact this is a query executed for /each/
-- table, is important this is as light as possible to keep the performance decent.
tableColumnsQ :: Pg.Query
tableColumnsQ =
fromString $
unlines
[ "SELECT attname, atttypid, atttypmod, attnotnull, pg_catalog.format_type(atttypid, atttypmod) ",
"FROM pg_catalog.pg_attribute att ",
"WHERE att.attrelid=? AND att.attnum>0 AND att.attisdropped='f' "
]
-- | Get the enumeration data for all enum types in the database.
enumerationsQ :: Pg.Query
enumerationsQ =
fromString $
unlines
[ "SELECT t.typname, t.oid, array_agg(e.enumlabel ORDER BY e.enumsortorder)",
"FROM pg_enum e JOIN pg_type t ON t.oid = e.enumtypid",
"GROUP BY t.typname, t.oid"
]
-- | Get the sequence data for all sequence types in the database.
sequencesQ :: Pg.Query
sequencesQ = fromString "SELECT c.relname FROM pg_class c WHERE c.relkind = 'S'"
-- | Return all foreign key constraints for /all/ 'Table's.
foreignKeysQ :: Pg.Query
foreignKeysQ =
fromString $
unlines
[ "SELECT kcu.table_name::text as foreign_table,",
" rel_kcu.table_name::text as primary_table,",
" array_agg(kcu.column_name::text ORDER BY kcu.position_in_unique_constraint)::text[] as fk_columns,",
" array_agg(rel_kcu.column_name::text ORDER BY rel_kcu.ordinal_position)::text[] as pk_columns,",
" kcu.constraint_name as cname",
"FROM information_schema.table_constraints tco",
"JOIN information_schema.key_column_usage kcu",
" on tco.constraint_schema = kcu.constraint_schema",
" and tco.constraint_name = kcu.constraint_name",
"JOIN information_schema.referential_constraints rco",
" on tco.constraint_schema = rco.constraint_schema",
" and tco.constraint_name = rco.constraint_name",
"JOIN information_schema.key_column_usage rel_kcu",
" on rco.unique_constraint_schema = rel_kcu.constraint_schema",
" and rco.unique_constraint_name = rel_kcu.constraint_name",
" and kcu.ordinal_position = rel_kcu.ordinal_position",
"GROUP BY foreign_table, primary_table, cname"
]
-- | Return /all other constraints that are not FKs/ (i.e. 'PRIMARY KEY', 'UNIQUE', etc) for all the tables.
otherConstraintsQ :: Pg.Query
otherConstraintsQ =
fromString $
unlines
[ "SELECT c.conname AS constraint_name,",
" c.contype AS constraint_type,",
" tbl.relname AS \"table\",",
" ARRAY_AGG(col.attname ORDER BY u.attposition) AS columns",
"FROM pg_constraint c",
" JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE",
" JOIN pg_class tbl ON tbl.oid = c.conrelid",
" JOIN pg_namespace sch ON sch.oid = tbl.relnamespace",
" JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum)",
"WHERE c.contype = 'u' OR c.contype = 'p'",
"GROUP BY constraint_name, constraint_type, \"table\"",
"ORDER BY c.contype"
]
-- | Return all \"action types\" for /all/ the constraints.
referenceActionsQ :: Pg.Query
referenceActionsQ =
fromString $
unlines
[ "SELECT c.conname, c. confdeltype, c.confupdtype FROM ",
"(SELECT r.conrelid, r.confrelid, unnest(r.conkey) AS conkey, unnest(r.confkey) AS confkey, r.conname, r.confupdtype, r.confdeltype ",
"FROM pg_catalog.pg_constraint r WHERE r.contype = 'f') AS c ",
"INNER JOIN pg_attribute a_parent ON a_parent.attnum = c.confkey AND a_parent.attrelid = c.confrelid ",
"INNER JOIN pg_class cl_parent ON cl_parent.oid = c.confrelid ",
"INNER JOIN pg_namespace sch_parent ON sch_parent.oid = cl_parent.relnamespace ",
"INNER JOIN pg_attribute a_child ON a_child.attnum = c.conkey AND a_child.attrelid = c.conrelid ",
"INNER JOIN pg_class cl_child ON cl_child.oid = c.conrelid ",
"INNER JOIN pg_namespace sch_child ON sch_child.oid = cl_child.relnamespace ",
"WHERE sch_child.nspname = current_schema() ORDER BY c.conname "
]
-- | Connects to a running PostgreSQL database and extract the relevant 'Schema' out of it.
getSchema :: Pg.Connection -> IO Schema
getSchema conn = do
allTableConstraints <- getAllConstraints conn
allDefaults <- getAllDefaults conn
enumerationData <- Pg.fold_ conn enumerationsQ mempty getEnumeration
sequences <- Pg.fold_ conn sequencesQ mempty getSequence
tables <-
Pg.fold_ conn userTablesQ mempty (getTable allDefaults enumerationData allTableConstraints)
pure $ Schema tables (M.fromList $ M.elems enumerationData) sequences
where
getEnumeration ::
Map Pg.Oid (EnumerationName, Enumeration) ->
(Text, Pg.Oid, V.Vector Text) ->
IO (Map Pg.Oid (EnumerationName, Enumeration))
getEnumeration allEnums (enumName, oid, V.toList -> vals) =
pure $ M.insert oid (EnumerationName enumName, Enumeration vals) allEnums
getSequence ::
Sequences ->
Pg.Only Text ->
IO Sequences
getSequence allSeqs (Pg.Only seqName) =
case T.splitOn "___" seqName of
[tName, cName, "seq"] ->
pure $ M.insert (SequenceName seqName) (Sequence (TableName tName) (ColumnName cName)) allSeqs
_ -> pure allSeqs
getTable ::
AllDefaults ->
Map Pg.Oid (EnumerationName, Enumeration) ->
AllTableConstraints ->
Tables ->
(Pg.Oid, Text) ->
IO Tables
getTable allDefaults enumData allTableConstraints allTables (oid, TableName -> tName) = do
pgColumns <- Pg.query conn tableColumnsQ (Pg.Only oid)
newTable <-
Table (fromMaybe noTableConstraints (M.lookup tName allTableConstraints))
<$> foldlM (getColumns tName enumData allDefaults) mempty pgColumns
pure $ M.insert tName newTable allTables
getColumns ::
TableName ->
Map Pg.Oid (EnumerationName, Enumeration) ->
AllDefaults ->
Columns ->
(ByteString, Pg.Oid, Int, Bool, ByteString) ->
IO Columns
getColumns tName enumData defaultData c (attname, atttypid, atttypmod, attnotnull, format_type) = do
/NOTA BENE(adn)/ : The atttypmod - 4 was originally taken from ' beam - migrate '
( see : )
-- but there are cases where this is not correct, for example in the case of bitstrings.
-- See for example: -does-atttypmod-differ-from-character-maximum-length
let mbPrecision =
if
| atttypmod == -1 -> Nothing
| Pg.typoid Pg.bit == atttypid -> Just atttypmod
| Pg.typoid Pg.varbit == atttypid -> Just atttypmod
| otherwise -> Just (atttypmod - 4)
let columnName = ColumnName (TE.decodeUtf8 attname)
let mbDefault = do
x <- M.lookup tName defaultData
M.lookup columnName x
case asum
[ pgSerialTyColumnType atttypid mbDefault,
pgTypeToColumnType atttypid mbPrecision,
pgEnumTypeToColumnType enumData atttypid
] of
Just cType -> do
let nullConstraint = if attnotnull then S.fromList [NotNull] else mempty
let inferredConstraints = nullConstraint <> fromMaybe mempty (S.singleton <$> mbDefault)
let newColumn = Column cType inferredConstraints
pure $ M.insert columnName newColumn c
Nothing ->
fail $
"Couldn't convert pgType "
<> show format_type
<> " of field "
<> show attname
<> " into a valid ColumnType."
--
-- Postgres type mapping
--
pgEnumTypeToColumnType ::
Map Pg.Oid (EnumerationName, Enumeration) ->
Pg.Oid ->
Maybe ColumnType
pgEnumTypeToColumnType enumData oid =
(\(n, _) -> PgSpecificType (PgEnumeration n)) <$> M.lookup oid enumData
pgSerialTyColumnType ::
Pg.Oid ->
Maybe ColumnConstraint ->
Maybe ColumnType
pgSerialTyColumnType oid (Just (Default d)) = do
guard $ (Pg.typoid Pg.int4 == oid && "nextval" `T.isInfixOf` d && "seq" `T.isInfixOf` d)
pure $ SqlStdType intType
pgSerialTyColumnType _ _ = Nothing
| Tries to convert from a Postgres ' ' Oid ' into ' ColumnType ' .
-- Mostly taken from [beam-migrate](Database.Beam.Postgres.Migrate).
pgTypeToColumnType :: Pg.Oid -> Maybe Int -> Maybe ColumnType
pgTypeToColumnType oid width
| Pg.typoid Pg.int2 == oid =
Just (SqlStdType smallIntType)
| Pg.typoid Pg.int4 == oid =
Just (SqlStdType intType)
| Pg.typoid Pg.int8 == oid =
Just (SqlStdType bigIntType)
| Pg.typoid Pg.bpchar == oid =
Just (SqlStdType $ charType (fromIntegral <$> width) Nothing)
| Pg.typoid Pg.varchar == oid =
Just (SqlStdType $ varCharType (fromIntegral <$> width) Nothing)
| Pg.typoid Pg.bit == oid =
Just (SqlStdType $ bitType (fromIntegral <$> width))
| Pg.typoid Pg.varbit == oid =
Just (SqlStdType $ varBitType (fromIntegral <$> width))
| Pg.typoid Pg.numeric == oid =
let decimals = fromMaybe 0 width .&. 0xFFFF
prec = (fromMaybe 0 width `shiftR` 16) .&. 0xFFFF
in case (prec, decimals) of
(0, 0) -> Just (SqlStdType $ numericType Nothing)
(p, 0) -> Just (SqlStdType $ numericType $ Just (fromIntegral p, Nothing))
_ -> Just (SqlStdType $ numericType (Just (fromIntegral prec, Just (fromIntegral decimals))))
| Pg.typoid Pg.float4 == oid =
Just (SqlStdType realType)
| Pg.typoid Pg.float8 == oid =
Just (SqlStdType doubleType)
| Pg.typoid Pg.date == oid =
Just (SqlStdType dateType)
| Pg.typoid Pg.text == oid =
Just (SqlStdType characterLargeObjectType)
-- I am not sure if this is a bug in beam-core, but both 'characterLargeObjectType' and 'binaryLargeObjectType'
-- get mapped into 'AST.DataTypeCharacterLargeObject', which yields TEXT, whereas we want the latter to
yield bytea .
| Pg.typoid Pg.bytea == oid =
Just (SqlStdType AST.DataTypeBinaryLargeObject)
| Pg.typoid Pg.bool == oid =
Just (SqlStdType booleanType)
| Pg.typoid Pg.time == oid =
Just (SqlStdType $ timeType Nothing False)
| Pg.typoid Pg.timestamp == oid =
Just (SqlStdType $timestampType Nothing False)
| Pg.typoid Pg.timestamptz == oid =
Just (SqlStdType $ timestampType Nothing True)
| Pg.typoid Pg.json == oid =
-- json types
Just (PgSpecificType PgJson)
| Pg.typoid Pg.jsonb == oid =
Just (PgSpecificType PgJsonB)
-- range types
| Pg.typoid Pg.int4range == oid =
Just (PgSpecificType PgRangeInt4)
| Pg.typoid Pg.int8range == oid =
Just (PgSpecificType PgRangeInt8)
| Pg.typoid Pg.numrange == oid =
Just (PgSpecificType PgRangeNum)
| Pg.typoid Pg.tsrange == oid =
Just (PgSpecificType PgRangeTs)
| Pg.typoid Pg.tstzrange == oid =
Just (PgSpecificType PgRangeTsTz)
| Pg.typoid Pg.daterange == oid =
Just (PgSpecificType PgRangeDate)
| Pg.typoid Pg.uuid == oid =
Just (PgSpecificType PgUuid)
| Pg.typoid Pg.oid == oid =
Just (PgSpecificType PgOid)
| otherwise =
Nothing
--
-- Constraints discovery
--
type AllTableConstraints = Map TableName (Set TableConstraint)
type AllDefaults = Map TableName Defaults
type Defaults = Map ColumnName ColumnConstraint
-- Get all defaults values for /all/ the columns.
-- FIXME(adn) __IMPORTANT:__ This function currently __always_ attach an explicit type annotation to the
-- default value, by reading its 'date_type' field, to resolve potential ambiguities.
-- The reason for this is that we cannot reliably guarantee a convertion between default values are read
by postgres and values we infer on the side ( using the ' beam - core ' machinery ) . In theory we
-- wouldn't need to explicitly annotate the types before generating a 'Default' constraint on the 'Schema'
-- side, but this doesn't always work. For example, if we **always** specify a \"::numeric\" annotation for
an ' Int ' , Postgres might yield \"-1::integer\ " for non - positive values and simply \"-1\ " for all the rest .
-- To complicate the situation /even if/ we explicitly specify the cast
( i.e. \"SET DEFAULT ' ? : : character varying ' ) , Postgres will ignore this when reading the default back .
-- What we do here is obviously not optimal, but on the other hand it's not clear to me how to solve this
-- in a meaningful and non-invasive way, for a number of reasons:
--
-- * For example \"beam-migrate"\ seems to resort to be using explicit serialisation for the types, although
-- I couldn't find explicit trace if that applies for defaults explicitly.
( cfr . the \"Database . Beam . AutoMigrate . " module in \"beam - migrate\ " ) .
--
-- * Another big problem is __rounding__: For example if we insert as \"double precision\" the following:
Default " ' -0.22030397057804563 ' " , Postgres will round the value and return Default " ' -0.220303970578046 ' " .
-- Again, it's not clear to me how to prevent the users from shooting themselves here.
--
* Another quirk is with dates : \"beam\ " renders a date like - 05 - 10\ ' ( note the single quotes ) but
-- Postgres strip those when reading the default value back.
--
-- * Range types are also tricky to infer. 'beam-core' escapes the range type name when rendering its default
value , whereas Postgres annotates each individual field and yield the unquoted identifier . Compare :
1 . Beam : \""numrange"(0 , 2 , ' [ ) ' ) \ "
2 . Postgres : \"numrange((0)::numeric , ( 2)::numeric , ' [ ) ' : : text)\ "
--
getAllDefaults :: Pg.Connection -> IO AllDefaults
getAllDefaults conn = Pg.fold_ conn defaultsQ mempty (\acc -> pure . addDefault acc)
where
addDefault :: AllDefaults -> (TableName, ColumnName, Text, Text) -> AllDefaults
addDefault m (tName, colName, defValue, dataType) =
let cleanedDefault = case T.breakOn "::" defValue of
(uncasted, defMb)
| T.null defMb ->
"'" <> T.dropAround ((==) '\'') uncasted <> "'::" <> dataType
_ -> defValue
entry = M.singleton colName (Default cleanedDefault)
in M.alter
( \case
Nothing -> Just entry
Just ss -> Just $ ss <> entry
)
tName
m
getAllConstraints :: Pg.Connection -> IO AllTableConstraints
getAllConstraints conn = do
allActions <- mkActions <$> Pg.query_ conn referenceActionsQ
allForeignKeys <- Pg.fold_ conn foreignKeysQ mempty (\acc -> pure . addFkConstraint allActions acc)
Pg.fold_ conn otherConstraintsQ allForeignKeys (\acc -> pure . addOtherConstraint acc)
where
addFkConstraint ::
ReferenceActions ->
AllTableConstraints ->
SqlForeignConstraint ->
AllTableConstraints
addFkConstraint actions st SqlForeignConstraint {..} = flip execState st $ do
let currentTable = sqlFk_foreign_table
let columnSet = S.fromList $ zip (V.toList sqlFk_fk_columns) (V.toList sqlFk_pk_columns)
let (onDelete, onUpdate) =
case M.lookup sqlFk_name (getActions actions) of
Nothing -> (NoAction, NoAction)
Just a -> (actionOnDelete a, actionOnUpdate a)
addTableConstraint currentTable (ForeignKey sqlFk_name sqlFk_primary_table columnSet onDelete onUpdate)
addOtherConstraint ::
AllTableConstraints ->
SqlOtherConstraint ->
AllTableConstraints
addOtherConstraint st SqlOtherConstraint {..} = flip execState st $ do
let currentTable = sqlCon_table
let columnSet = S.fromList . V.toList $ sqlCon_fk_colums
case sqlCon_constraint_type of
SQL_raw_unique -> addTableConstraint currentTable (Unique sqlCon_name columnSet)
SQL_raw_pk -> if S.null columnSet then pure () else
addTableConstraint currentTable (PrimaryKey sqlCon_name columnSet)
newtype ReferenceActions = ReferenceActions {getActions :: Map Text Actions}
newtype RefEntry = RefEntry {unRefEntry :: (Text, ReferenceAction, ReferenceAction)}
mkActions :: [RefEntry] -> ReferenceActions
mkActions = ReferenceActions . M.fromList . map ((\(a, b, c) -> (a, Actions b c)) . unRefEntry)
instance Pg.FromRow RefEntry where
fromRow =
fmap
RefEntry
( (,,) <$> field
<*> fmap mkAction field
<*> fmap mkAction field
)
data Actions = Actions
{ actionOnDelete :: ReferenceAction,
actionOnUpdate :: ReferenceAction
}
mkAction :: Text -> ReferenceAction
mkAction c = case c of
"a" -> NoAction
"r" -> Restrict
"c" -> Cascade
"n" -> SetNull
"d" -> SetDefault
_ -> error . T.unpack $ "unknown reference action type: " <> c
--
-- Useful combinators to add constraints for a column or table if already there.
--
addTableConstraint ::
TableName ->
TableConstraint ->
State AllTableConstraints ()
addTableConstraint tName cns =
modify'
( M.alter
( \case
Nothing -> Just $ S.singleton cns
Just ss -> Just $ S.insert cns ss
)
tName
)
| null | https://raw.githubusercontent.com/obsidiansystems/beam-automigrate/6e1f316576d8ad0b9a91f691059d67952e51c648/src/Database/Beam/AutoMigrate/Postgres.hs | haskell | # LANGUAGE MultiWayIf #
Necessary types to make working with the underlying raw SQL a bit more pleasant
| The columns in the /current/ table.
Postgres queries to extract the schema out of the DB
| A SQL query to select all user's queries, skipping any beam-related tables (i.e. leftovers from
beam-migrate, for example).
| Get information about default values for /all/ tables.
| Get information about columns for this table. Due to the fact this is a query executed for /each/
table, is important this is as light as possible to keep the performance decent.
| Get the enumeration data for all enum types in the database.
| Get the sequence data for all sequence types in the database.
| Return all foreign key constraints for /all/ 'Table's.
| Return /all other constraints that are not FKs/ (i.e. 'PRIMARY KEY', 'UNIQUE', etc) for all the tables.
| Return all \"action types\" for /all/ the constraints.
| Connects to a running PostgreSQL database and extract the relevant 'Schema' out of it.
but there are cases where this is not correct, for example in the case of bitstrings.
See for example: -does-atttypmod-differ-from-character-maximum-length
Postgres type mapping
Mostly taken from [beam-migrate](Database.Beam.Postgres.Migrate).
I am not sure if this is a bug in beam-core, but both 'characterLargeObjectType' and 'binaryLargeObjectType'
get mapped into 'AST.DataTypeCharacterLargeObject', which yields TEXT, whereas we want the latter to
json types
range types
Constraints discovery
Get all defaults values for /all/ the columns.
FIXME(adn) __IMPORTANT:__ This function currently __always_ attach an explicit type annotation to the
default value, by reading its 'date_type' field, to resolve potential ambiguities.
The reason for this is that we cannot reliably guarantee a convertion between default values are read
wouldn't need to explicitly annotate the types before generating a 'Default' constraint on the 'Schema'
side, but this doesn't always work. For example, if we **always** specify a \"::numeric\" annotation for
To complicate the situation /even if/ we explicitly specify the cast
What we do here is obviously not optimal, but on the other hand it's not clear to me how to solve this
in a meaningful and non-invasive way, for a number of reasons:
* For example \"beam-migrate"\ seems to resort to be using explicit serialisation for the types, although
I couldn't find explicit trace if that applies for defaults explicitly.
* Another big problem is __rounding__: For example if we insert as \"double precision\" the following:
Again, it's not clear to me how to prevent the users from shooting themselves here.
Postgres strip those when reading the default value back.
* Range types are also tricky to infer. 'beam-core' escapes the range type name when rendering its default
Useful combinators to add constraints for a column or table if already there.
| # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Database.Beam.AutoMigrate.Postgres
( getSchema,
)
where
import Control.Monad.State
import Data.Bits (shiftR, (.&.))
import Data.ByteString (ByteString)
import Data.Foldable (asum, foldlM)
import Data.Map (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import qualified Data.Set as S
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import Database.Beam.AutoMigrate.Types
import Database.Beam.Backend.SQL hiding (tableName)
import qualified Database.Beam.Backend.SQL.AST as AST
import qualified Database.PostgreSQL.Simple as Pg
import Database.PostgreSQL.Simple.FromField (FromField (..), fromField, returnError)
import Database.PostgreSQL.Simple.FromRow (FromRow (..), field)
import qualified Database.PostgreSQL.Simple.TypeInfo.Static as Pg
import qualified Database.PostgreSQL.Simple.Types as Pg
data SqlRawOtherConstraintType
= SQL_raw_pk
| SQL_raw_unique
deriving (Show, Eq)
data SqlOtherConstraint = SqlOtherConstraint
{ sqlCon_name :: Text,
sqlCon_constraint_type :: SqlRawOtherConstraintType,
sqlCon_table :: TableName,
sqlCon_fk_colums :: V.Vector ColumnName
}
deriving (Show, Eq)
instance Pg.FromRow SqlOtherConstraint where
fromRow =
SqlOtherConstraint <$> field
<*> field
<*> fmap TableName field
<*> fmap (V.map ColumnName) field
data SqlForeignConstraint = SqlForeignConstraint
{ sqlFk_foreign_table :: TableName,
sqlFk_primary_table :: TableName,
| The columns in the table .
sqlFk_fk_columns :: V.Vector ColumnName,
sqlFk_pk_columns :: V.Vector ColumnName,
sqlFk_name :: Text
}
deriving (Show, Eq)
instance Pg.FromRow SqlForeignConstraint where
fromRow =
SqlForeignConstraint <$> fmap TableName field
<*> fmap TableName field
<*> fmap (V.map ColumnName) field
<*> fmap (V.map ColumnName) field
<*> field
instance FromField TableName where
fromField f dat = TableName <$> fromField f dat
instance FromField ColumnName where
fromField f dat = ColumnName <$> fromField f dat
instance FromField SqlRawOtherConstraintType where
fromField f dat = do
t :: String <- fromField f dat
case t of
"p" -> pure SQL_raw_pk
"u" -> pure SQL_raw_unique
_ -> returnError Pg.ConversionFailed f t
userTablesQ :: Pg.Query
userTablesQ =
fromString $
unlines
[ "SELECT cl.oid, relname FROM pg_catalog.pg_class \"cl\" join pg_catalog.pg_namespace \"ns\" ",
"on (ns.oid = relnamespace) where nspname = any (current_schemas(false)) and relkind='r' ",
"and relname NOT LIKE 'beam_%'"
]
defaultsQ :: Pg.Query
defaultsQ =
fromString $
unlines
[ "SELECT col.table_name::text, col.column_name::text, col.column_default::text, col.data_type::text ",
"FROM information_schema.columns col ",
"WHERE col.column_default IS NOT NULL ",
"AND col.table_schema NOT IN('information_schema', 'pg_catalog') ",
"ORDER BY col.table_name"
]
tableColumnsQ :: Pg.Query
tableColumnsQ =
fromString $
unlines
[ "SELECT attname, atttypid, atttypmod, attnotnull, pg_catalog.format_type(atttypid, atttypmod) ",
"FROM pg_catalog.pg_attribute att ",
"WHERE att.attrelid=? AND att.attnum>0 AND att.attisdropped='f' "
]
enumerationsQ :: Pg.Query
enumerationsQ =
fromString $
unlines
[ "SELECT t.typname, t.oid, array_agg(e.enumlabel ORDER BY e.enumsortorder)",
"FROM pg_enum e JOIN pg_type t ON t.oid = e.enumtypid",
"GROUP BY t.typname, t.oid"
]
sequencesQ :: Pg.Query
sequencesQ = fromString "SELECT c.relname FROM pg_class c WHERE c.relkind = 'S'"
foreignKeysQ :: Pg.Query
foreignKeysQ =
fromString $
unlines
[ "SELECT kcu.table_name::text as foreign_table,",
" rel_kcu.table_name::text as primary_table,",
" array_agg(kcu.column_name::text ORDER BY kcu.position_in_unique_constraint)::text[] as fk_columns,",
" array_agg(rel_kcu.column_name::text ORDER BY rel_kcu.ordinal_position)::text[] as pk_columns,",
" kcu.constraint_name as cname",
"FROM information_schema.table_constraints tco",
"JOIN information_schema.key_column_usage kcu",
" on tco.constraint_schema = kcu.constraint_schema",
" and tco.constraint_name = kcu.constraint_name",
"JOIN information_schema.referential_constraints rco",
" on tco.constraint_schema = rco.constraint_schema",
" and tco.constraint_name = rco.constraint_name",
"JOIN information_schema.key_column_usage rel_kcu",
" on rco.unique_constraint_schema = rel_kcu.constraint_schema",
" and rco.unique_constraint_name = rel_kcu.constraint_name",
" and kcu.ordinal_position = rel_kcu.ordinal_position",
"GROUP BY foreign_table, primary_table, cname"
]
otherConstraintsQ :: Pg.Query
otherConstraintsQ =
fromString $
unlines
[ "SELECT c.conname AS constraint_name,",
" c.contype AS constraint_type,",
" tbl.relname AS \"table\",",
" ARRAY_AGG(col.attname ORDER BY u.attposition) AS columns",
"FROM pg_constraint c",
" JOIN LATERAL UNNEST(c.conkey) WITH ORDINALITY AS u(attnum, attposition) ON TRUE",
" JOIN pg_class tbl ON tbl.oid = c.conrelid",
" JOIN pg_namespace sch ON sch.oid = tbl.relnamespace",
" JOIN pg_attribute col ON (col.attrelid = tbl.oid AND col.attnum = u.attnum)",
"WHERE c.contype = 'u' OR c.contype = 'p'",
"GROUP BY constraint_name, constraint_type, \"table\"",
"ORDER BY c.contype"
]
referenceActionsQ :: Pg.Query
referenceActionsQ =
fromString $
unlines
[ "SELECT c.conname, c. confdeltype, c.confupdtype FROM ",
"(SELECT r.conrelid, r.confrelid, unnest(r.conkey) AS conkey, unnest(r.confkey) AS confkey, r.conname, r.confupdtype, r.confdeltype ",
"FROM pg_catalog.pg_constraint r WHERE r.contype = 'f') AS c ",
"INNER JOIN pg_attribute a_parent ON a_parent.attnum = c.confkey AND a_parent.attrelid = c.confrelid ",
"INNER JOIN pg_class cl_parent ON cl_parent.oid = c.confrelid ",
"INNER JOIN pg_namespace sch_parent ON sch_parent.oid = cl_parent.relnamespace ",
"INNER JOIN pg_attribute a_child ON a_child.attnum = c.conkey AND a_child.attrelid = c.conrelid ",
"INNER JOIN pg_class cl_child ON cl_child.oid = c.conrelid ",
"INNER JOIN pg_namespace sch_child ON sch_child.oid = cl_child.relnamespace ",
"WHERE sch_child.nspname = current_schema() ORDER BY c.conname "
]
getSchema :: Pg.Connection -> IO Schema
getSchema conn = do
allTableConstraints <- getAllConstraints conn
allDefaults <- getAllDefaults conn
enumerationData <- Pg.fold_ conn enumerationsQ mempty getEnumeration
sequences <- Pg.fold_ conn sequencesQ mempty getSequence
tables <-
Pg.fold_ conn userTablesQ mempty (getTable allDefaults enumerationData allTableConstraints)
pure $ Schema tables (M.fromList $ M.elems enumerationData) sequences
where
getEnumeration ::
Map Pg.Oid (EnumerationName, Enumeration) ->
(Text, Pg.Oid, V.Vector Text) ->
IO (Map Pg.Oid (EnumerationName, Enumeration))
getEnumeration allEnums (enumName, oid, V.toList -> vals) =
pure $ M.insert oid (EnumerationName enumName, Enumeration vals) allEnums
getSequence ::
Sequences ->
Pg.Only Text ->
IO Sequences
getSequence allSeqs (Pg.Only seqName) =
case T.splitOn "___" seqName of
[tName, cName, "seq"] ->
pure $ M.insert (SequenceName seqName) (Sequence (TableName tName) (ColumnName cName)) allSeqs
_ -> pure allSeqs
getTable ::
AllDefaults ->
Map Pg.Oid (EnumerationName, Enumeration) ->
AllTableConstraints ->
Tables ->
(Pg.Oid, Text) ->
IO Tables
getTable allDefaults enumData allTableConstraints allTables (oid, TableName -> tName) = do
pgColumns <- Pg.query conn tableColumnsQ (Pg.Only oid)
newTable <-
Table (fromMaybe noTableConstraints (M.lookup tName allTableConstraints))
<$> foldlM (getColumns tName enumData allDefaults) mempty pgColumns
pure $ M.insert tName newTable allTables
getColumns ::
TableName ->
Map Pg.Oid (EnumerationName, Enumeration) ->
AllDefaults ->
Columns ->
(ByteString, Pg.Oid, Int, Bool, ByteString) ->
IO Columns
getColumns tName enumData defaultData c (attname, atttypid, atttypmod, attnotnull, format_type) = do
/NOTA BENE(adn)/ : The atttypmod - 4 was originally taken from ' beam - migrate '
( see : )
let mbPrecision =
if
| atttypmod == -1 -> Nothing
| Pg.typoid Pg.bit == atttypid -> Just atttypmod
| Pg.typoid Pg.varbit == atttypid -> Just atttypmod
| otherwise -> Just (atttypmod - 4)
let columnName = ColumnName (TE.decodeUtf8 attname)
let mbDefault = do
x <- M.lookup tName defaultData
M.lookup columnName x
case asum
[ pgSerialTyColumnType atttypid mbDefault,
pgTypeToColumnType atttypid mbPrecision,
pgEnumTypeToColumnType enumData atttypid
] of
Just cType -> do
let nullConstraint = if attnotnull then S.fromList [NotNull] else mempty
let inferredConstraints = nullConstraint <> fromMaybe mempty (S.singleton <$> mbDefault)
let newColumn = Column cType inferredConstraints
pure $ M.insert columnName newColumn c
Nothing ->
fail $
"Couldn't convert pgType "
<> show format_type
<> " of field "
<> show attname
<> " into a valid ColumnType."
pgEnumTypeToColumnType ::
Map Pg.Oid (EnumerationName, Enumeration) ->
Pg.Oid ->
Maybe ColumnType
pgEnumTypeToColumnType enumData oid =
(\(n, _) -> PgSpecificType (PgEnumeration n)) <$> M.lookup oid enumData
pgSerialTyColumnType ::
Pg.Oid ->
Maybe ColumnConstraint ->
Maybe ColumnType
pgSerialTyColumnType oid (Just (Default d)) = do
guard $ (Pg.typoid Pg.int4 == oid && "nextval" `T.isInfixOf` d && "seq" `T.isInfixOf` d)
pure $ SqlStdType intType
pgSerialTyColumnType _ _ = Nothing
| Tries to convert from a Postgres ' ' Oid ' into ' ColumnType ' .
pgTypeToColumnType :: Pg.Oid -> Maybe Int -> Maybe ColumnType
pgTypeToColumnType oid width
| Pg.typoid Pg.int2 == oid =
Just (SqlStdType smallIntType)
| Pg.typoid Pg.int4 == oid =
Just (SqlStdType intType)
| Pg.typoid Pg.int8 == oid =
Just (SqlStdType bigIntType)
| Pg.typoid Pg.bpchar == oid =
Just (SqlStdType $ charType (fromIntegral <$> width) Nothing)
| Pg.typoid Pg.varchar == oid =
Just (SqlStdType $ varCharType (fromIntegral <$> width) Nothing)
| Pg.typoid Pg.bit == oid =
Just (SqlStdType $ bitType (fromIntegral <$> width))
| Pg.typoid Pg.varbit == oid =
Just (SqlStdType $ varBitType (fromIntegral <$> width))
| Pg.typoid Pg.numeric == oid =
let decimals = fromMaybe 0 width .&. 0xFFFF
prec = (fromMaybe 0 width `shiftR` 16) .&. 0xFFFF
in case (prec, decimals) of
(0, 0) -> Just (SqlStdType $ numericType Nothing)
(p, 0) -> Just (SqlStdType $ numericType $ Just (fromIntegral p, Nothing))
_ -> Just (SqlStdType $ numericType (Just (fromIntegral prec, Just (fromIntegral decimals))))
| Pg.typoid Pg.float4 == oid =
Just (SqlStdType realType)
| Pg.typoid Pg.float8 == oid =
Just (SqlStdType doubleType)
| Pg.typoid Pg.date == oid =
Just (SqlStdType dateType)
| Pg.typoid Pg.text == oid =
Just (SqlStdType characterLargeObjectType)
yield bytea .
| Pg.typoid Pg.bytea == oid =
Just (SqlStdType AST.DataTypeBinaryLargeObject)
| Pg.typoid Pg.bool == oid =
Just (SqlStdType booleanType)
| Pg.typoid Pg.time == oid =
Just (SqlStdType $ timeType Nothing False)
| Pg.typoid Pg.timestamp == oid =
Just (SqlStdType $timestampType Nothing False)
| Pg.typoid Pg.timestamptz == oid =
Just (SqlStdType $ timestampType Nothing True)
| Pg.typoid Pg.json == oid =
Just (PgSpecificType PgJson)
| Pg.typoid Pg.jsonb == oid =
Just (PgSpecificType PgJsonB)
| Pg.typoid Pg.int4range == oid =
Just (PgSpecificType PgRangeInt4)
| Pg.typoid Pg.int8range == oid =
Just (PgSpecificType PgRangeInt8)
| Pg.typoid Pg.numrange == oid =
Just (PgSpecificType PgRangeNum)
| Pg.typoid Pg.tsrange == oid =
Just (PgSpecificType PgRangeTs)
| Pg.typoid Pg.tstzrange == oid =
Just (PgSpecificType PgRangeTsTz)
| Pg.typoid Pg.daterange == oid =
Just (PgSpecificType PgRangeDate)
| Pg.typoid Pg.uuid == oid =
Just (PgSpecificType PgUuid)
| Pg.typoid Pg.oid == oid =
Just (PgSpecificType PgOid)
| otherwise =
Nothing
type AllTableConstraints = Map TableName (Set TableConstraint)
type AllDefaults = Map TableName Defaults
type Defaults = Map ColumnName ColumnConstraint
by postgres and values we infer on the side ( using the ' beam - core ' machinery ) . In theory we
an ' Int ' , Postgres might yield \"-1::integer\ " for non - positive values and simply \"-1\ " for all the rest .
( i.e. \"SET DEFAULT ' ? : : character varying ' ) , Postgres will ignore this when reading the default back .
( cfr . the \"Database . Beam . AutoMigrate . " module in \"beam - migrate\ " ) .
Default " ' -0.22030397057804563 ' " , Postgres will round the value and return Default " ' -0.220303970578046 ' " .
* Another quirk is with dates : \"beam\ " renders a date like - 05 - 10\ ' ( note the single quotes ) but
value , whereas Postgres annotates each individual field and yield the unquoted identifier . Compare :
1 . Beam : \""numrange"(0 , 2 , ' [ ) ' ) \ "
2 . Postgres : \"numrange((0)::numeric , ( 2)::numeric , ' [ ) ' : : text)\ "
getAllDefaults :: Pg.Connection -> IO AllDefaults
getAllDefaults conn = Pg.fold_ conn defaultsQ mempty (\acc -> pure . addDefault acc)
where
addDefault :: AllDefaults -> (TableName, ColumnName, Text, Text) -> AllDefaults
addDefault m (tName, colName, defValue, dataType) =
let cleanedDefault = case T.breakOn "::" defValue of
(uncasted, defMb)
| T.null defMb ->
"'" <> T.dropAround ((==) '\'') uncasted <> "'::" <> dataType
_ -> defValue
entry = M.singleton colName (Default cleanedDefault)
in M.alter
( \case
Nothing -> Just entry
Just ss -> Just $ ss <> entry
)
tName
m
getAllConstraints :: Pg.Connection -> IO AllTableConstraints
getAllConstraints conn = do
allActions <- mkActions <$> Pg.query_ conn referenceActionsQ
allForeignKeys <- Pg.fold_ conn foreignKeysQ mempty (\acc -> pure . addFkConstraint allActions acc)
Pg.fold_ conn otherConstraintsQ allForeignKeys (\acc -> pure . addOtherConstraint acc)
where
addFkConstraint ::
ReferenceActions ->
AllTableConstraints ->
SqlForeignConstraint ->
AllTableConstraints
addFkConstraint actions st SqlForeignConstraint {..} = flip execState st $ do
let currentTable = sqlFk_foreign_table
let columnSet = S.fromList $ zip (V.toList sqlFk_fk_columns) (V.toList sqlFk_pk_columns)
let (onDelete, onUpdate) =
case M.lookup sqlFk_name (getActions actions) of
Nothing -> (NoAction, NoAction)
Just a -> (actionOnDelete a, actionOnUpdate a)
addTableConstraint currentTable (ForeignKey sqlFk_name sqlFk_primary_table columnSet onDelete onUpdate)
addOtherConstraint ::
AllTableConstraints ->
SqlOtherConstraint ->
AllTableConstraints
addOtherConstraint st SqlOtherConstraint {..} = flip execState st $ do
let currentTable = sqlCon_table
let columnSet = S.fromList . V.toList $ sqlCon_fk_colums
case sqlCon_constraint_type of
SQL_raw_unique -> addTableConstraint currentTable (Unique sqlCon_name columnSet)
SQL_raw_pk -> if S.null columnSet then pure () else
addTableConstraint currentTable (PrimaryKey sqlCon_name columnSet)
newtype ReferenceActions = ReferenceActions {getActions :: Map Text Actions}
newtype RefEntry = RefEntry {unRefEntry :: (Text, ReferenceAction, ReferenceAction)}
mkActions :: [RefEntry] -> ReferenceActions
mkActions = ReferenceActions . M.fromList . map ((\(a, b, c) -> (a, Actions b c)) . unRefEntry)
instance Pg.FromRow RefEntry where
fromRow =
fmap
RefEntry
( (,,) <$> field
<*> fmap mkAction field
<*> fmap mkAction field
)
data Actions = Actions
{ actionOnDelete :: ReferenceAction,
actionOnUpdate :: ReferenceAction
}
mkAction :: Text -> ReferenceAction
mkAction c = case c of
"a" -> NoAction
"r" -> Restrict
"c" -> Cascade
"n" -> SetNull
"d" -> SetDefault
_ -> error . T.unpack $ "unknown reference action type: " <> c
addTableConstraint ::
TableName ->
TableConstraint ->
State AllTableConstraints ()
addTableConstraint tName cns =
modify'
( M.alter
( \case
Nothing -> Just $ S.singleton cns
Just ss -> Just $ S.insert cns ss
)
tName
)
|
36fc5ba1581715f60a4b77ab4842601349c340d4ae5fd52dcb29e66dfc73f6a4 | melange-re/melange | jsoo_485_test.ml | let f () =
(ref None) := Some 3
Uncaught ReferenceError : Invalid left - hand side in assignment
(*
function f() {
/* None */0 = /* Some */[3];
return /* () */0;
}*)
let () = f ()
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/test/jsoo_485_test.ml | ocaml |
function f() {
/* None */0 = /* Some */[3];
return /* () */0;
} | let f () =
(ref None) := Some 3
Uncaught ReferenceError : Invalid left - hand side in assignment
let () = f ()
|
855ed3e6bec88f86e0c4023bd45a3921dcfff3b7eb5dc9e3a13405e9e1d42ba3 | fpco/ghc-prof-flamegraph | ProfFile.hs | # LANGUAGE TupleSections #
| Parser for .prof files generated by GHC .
module ProfFile
( Time(..)
, Line(..)
, lIndividualTime
, lInheritedTime
, lIndividualAlloc
, lInheritedAlloc
, parse
, processLines
, findStart
) where
import Control.Arrow (second, left)
import Data.Char (isSpace)
import Data.List (isPrefixOf)
import Text.Read (readEither)
import Control.Monad (unless)
import Control.Applicative
related warnings in GHC>=7.10
data Time = Time
{ tIndividual :: Double
, tInherited :: Double
} deriving (Show, Eq)
data Line = Line
{ lCostCentre :: String
, lModule :: String
, lNumber :: Int
, lEntries :: Int
, lTime :: Time
, lAlloc :: Time
, lTicks :: Int
, lBytes :: Int
, lChildren :: [Line]
} deriving (Show, Eq)
lIndividualTime :: Line -> Double
lIndividualTime = tIndividual . lTime
lInheritedTime :: Line -> Double
lInheritedTime = tInherited . lTime
lIndividualAlloc :: Line -> Double
lIndividualAlloc = tIndividual . lAlloc
lInheritedAlloc :: Line -> Double
lInheritedAlloc = tInherited . lAlloc
data ProfFormat = NoSources | IncludesSources
-- | Returns a function accepting the children and returning a fully
-- formed 'Line'.
parseLine :: ProfFormat -> String -> Either String ([Line] -> Line)
parseLine format s =
case format of
NoSources ->
case words s of
(costCentre:module_:no:entries:indTime:indAlloc:inhTime:inhAlloc:other) ->
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other
_ -> Left $ "Malformed .prof file line:\n" ++ s
IncludesSources ->
case words s of
(costCentre:module_:rest) | (no:entries:indTime:indAlloc:inhTime:inhAlloc:other) <- dropSRC rest ->
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other
_ -> Left $ "Malformed .prof file line:\n" ++ s
where
XXX : The SRC field can contain arbitrary characters ( from the
-- subdirectory name)!
--
As a heuristic , assume SRC spans until the last word which :
--
-- * Ends with '>'
( for special values emitted by GHC like " < no location info > " )
--
-- or
--
-- * Contains a colon eventually followed by another colon or a minus
( to identify the source span , e.g. " : 69:55 - 64 " or " :( 36,1)-(38,30 ) " ,
or maybe for a single character " : 30:3 " )
--
If there is no such word , assume SRC is just one word .
--
-- This heuristic will break if:
--
* In the future , columns to the right of SRC can match the above
-- condition (currently, they're all numeric)
--
-- or
--
* GHC does n't add a source span formatted as assumed above , and the
SRC contains spaces
--
-- The implementation is not very efficient, but I suppose this is not
-- performance-critical.
dropSRC (_:rest) = reverse . takeWhile (not . isPossibleEndOfSRC) . reverse $ rest
dropSRC [] = []
isPossibleEndOfSRC w = last w == '>'
|| case break (==':') w of
(_, _:rest) -> any (`elem` ":-") rest
_ -> False
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other = do
pNo <- readEither' no
pEntries <- readEither' entries
pTime <- Time <$> readEither' indTime <*> readEither' inhTime
pAlloc <- Time <$> readEither' indAlloc <*> readEither' inhAlloc
(pTicks, pBytes) <-
case other of
(ticks:bytes:_) -> (,) <$> readEither' ticks <*> readEither' bytes
_ -> pure (0, 0)
return $ Line costCentre module_ pNo pEntries pTime pAlloc pTicks pBytes
readEither' str = left (("Could not parse value "++show str++": ")++)
(readEither str)
type LineNumber = Int
processLines :: ProfFormat -> [String] -> LineNumber -> Either String [Line]
processLines format lines0 lineNumber0 = do
((ss,_), lines') <- go 0 lines0 lineNumber0
unless (null ss) $
error "processLines: the impossible happened, not all strings were consumed."
return lines'
where
go :: Int -> [String] -> LineNumber -> Either String (([String], LineNumber), [Line])
go _depth [] lineNumber = do
return (([], lineNumber), [])
go depth0 (line : lines') lineNumber = do
let (spaces, rest) = break (not . isSpace) line
let depth = length spaces
if depth < depth0
then return ((line : lines', lineNumber), [])
else do
parsedLine <- left (("Parse error in line "++show lineNumber++": ")++) $
parseLine format rest
((lines'', lineNumber''), children) <- go (depth + 1) lines' (lineNumber + 1)
second (parsedLine children :) <$> go depth lines'' lineNumber''
firstLineNoSources :: [String]
firstLineNoSources = ["COST", "CENTRE", "MODULE", "no.", "entries", "%time", "%alloc", "%time", "%alloc"]
Since GHC 8.0.2 the cost centres include the src location
firstLineIncludesSources :: [String]
firstLineIncludesSources = ["COST", "CENTRE", "MODULE", "SRC", "no.", "entries", "%time", "%alloc", "%time", "%alloc"]
findStart :: [String] -> LineNumber -> Either String (ProfFormat, [String], [String], LineNumber)
findStart [] _ = Left "Malformed .prof file: couldn't find start line"
findStart (line : _empty : lines') lineNumber | (firstLineNoSources `isPrefixOf` words line) = return (NoSources, words line, lines', lineNumber + 2)
| (firstLineIncludesSources `isPrefixOf` words line) = return (IncludesSources, words line, lines', lineNumber + 2)
findStart (_line : lines') lineNumber = findStart lines' (lineNumber + 1)
parse :: String -> Either String ([String], [Line])
parse s = do
(format, names, ss, lineNumber) <- findStart (lines s) 1
return . (names,) =<< processLines format ss lineNumber
| null | https://raw.githubusercontent.com/fpco/ghc-prof-flamegraph/8edd3b4806adeb25a4d55bed51c3afcc8e7a8e14/ProfFile.hs | haskell | | Returns a function accepting the children and returning a fully
formed 'Line'.
subdirectory name)!
* Ends with '>'
or
* Contains a colon eventually followed by another colon or a minus
This heuristic will break if:
condition (currently, they're all numeric)
or
The implementation is not very efficient, but I suppose this is not
performance-critical. | # LANGUAGE TupleSections #
| Parser for .prof files generated by GHC .
module ProfFile
( Time(..)
, Line(..)
, lIndividualTime
, lInheritedTime
, lIndividualAlloc
, lInheritedAlloc
, parse
, processLines
, findStart
) where
import Control.Arrow (second, left)
import Data.Char (isSpace)
import Data.List (isPrefixOf)
import Text.Read (readEither)
import Control.Monad (unless)
import Control.Applicative
related warnings in GHC>=7.10
data Time = Time
{ tIndividual :: Double
, tInherited :: Double
} deriving (Show, Eq)
data Line = Line
{ lCostCentre :: String
, lModule :: String
, lNumber :: Int
, lEntries :: Int
, lTime :: Time
, lAlloc :: Time
, lTicks :: Int
, lBytes :: Int
, lChildren :: [Line]
} deriving (Show, Eq)
lIndividualTime :: Line -> Double
lIndividualTime = tIndividual . lTime
lInheritedTime :: Line -> Double
lInheritedTime = tInherited . lTime
lIndividualAlloc :: Line -> Double
lIndividualAlloc = tIndividual . lAlloc
lInheritedAlloc :: Line -> Double
lInheritedAlloc = tInherited . lAlloc
data ProfFormat = NoSources | IncludesSources
parseLine :: ProfFormat -> String -> Either String ([Line] -> Line)
parseLine format s =
case format of
NoSources ->
case words s of
(costCentre:module_:no:entries:indTime:indAlloc:inhTime:inhAlloc:other) ->
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other
_ -> Left $ "Malformed .prof file line:\n" ++ s
IncludesSources ->
case words s of
(costCentre:module_:rest) | (no:entries:indTime:indAlloc:inhTime:inhAlloc:other) <- dropSRC rest ->
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other
_ -> Left $ "Malformed .prof file line:\n" ++ s
where
XXX : The SRC field can contain arbitrary characters ( from the
As a heuristic , assume SRC spans until the last word which :
( for special values emitted by GHC like " < no location info > " )
( to identify the source span , e.g. " : 69:55 - 64 " or " :( 36,1)-(38,30 ) " ,
or maybe for a single character " : 30:3 " )
If there is no such word , assume SRC is just one word .
* In the future , columns to the right of SRC can match the above
* GHC does n't add a source span formatted as assumed above , and the
SRC contains spaces
dropSRC (_:rest) = reverse . takeWhile (not . isPossibleEndOfSRC) . reverse $ rest
dropSRC [] = []
isPossibleEndOfSRC w = last w == '>'
|| case break (==':') w of
(_, _:rest) -> any (`elem` ":-") rest
_ -> False
parse' costCentre module_ no entries indTime indAlloc inhTime inhAlloc other = do
pNo <- readEither' no
pEntries <- readEither' entries
pTime <- Time <$> readEither' indTime <*> readEither' inhTime
pAlloc <- Time <$> readEither' indAlloc <*> readEither' inhAlloc
(pTicks, pBytes) <-
case other of
(ticks:bytes:_) -> (,) <$> readEither' ticks <*> readEither' bytes
_ -> pure (0, 0)
return $ Line costCentre module_ pNo pEntries pTime pAlloc pTicks pBytes
readEither' str = left (("Could not parse value "++show str++": ")++)
(readEither str)
type LineNumber = Int
processLines :: ProfFormat -> [String] -> LineNumber -> Either String [Line]
processLines format lines0 lineNumber0 = do
((ss,_), lines') <- go 0 lines0 lineNumber0
unless (null ss) $
error "processLines: the impossible happened, not all strings were consumed."
return lines'
where
go :: Int -> [String] -> LineNumber -> Either String (([String], LineNumber), [Line])
go _depth [] lineNumber = do
return (([], lineNumber), [])
go depth0 (line : lines') lineNumber = do
let (spaces, rest) = break (not . isSpace) line
let depth = length spaces
if depth < depth0
then return ((line : lines', lineNumber), [])
else do
parsedLine <- left (("Parse error in line "++show lineNumber++": ")++) $
parseLine format rest
((lines'', lineNumber''), children) <- go (depth + 1) lines' (lineNumber + 1)
second (parsedLine children :) <$> go depth lines'' lineNumber''
firstLineNoSources :: [String]
firstLineNoSources = ["COST", "CENTRE", "MODULE", "no.", "entries", "%time", "%alloc", "%time", "%alloc"]
Since GHC 8.0.2 the cost centres include the src location
firstLineIncludesSources :: [String]
firstLineIncludesSources = ["COST", "CENTRE", "MODULE", "SRC", "no.", "entries", "%time", "%alloc", "%time", "%alloc"]
findStart :: [String] -> LineNumber -> Either String (ProfFormat, [String], [String], LineNumber)
findStart [] _ = Left "Malformed .prof file: couldn't find start line"
findStart (line : _empty : lines') lineNumber | (firstLineNoSources `isPrefixOf` words line) = return (NoSources, words line, lines', lineNumber + 2)
| (firstLineIncludesSources `isPrefixOf` words line) = return (IncludesSources, words line, lines', lineNumber + 2)
findStart (_line : lines') lineNumber = findStart lines' (lineNumber + 1)
parse :: String -> Either String ([String], [Line])
parse s = do
(format, names, ss, lineNumber) <- findStart (lines s) 1
return . (names,) =<< processLines format ss lineNumber
|
598fd0f5a3479729908fd9889e6c53c52adc239eb955c2e8a01984d51ab8a12f | heitor-lassarote/iolp | AST.hs | module Language.CSS.AST where
import Universum
import Data.Aeson
type Attribute = (Text, Text)
type ClassName = Text
data AST
= CSS ![Class]
deriving (Eq, Generic, Show, FromJSON, ToJSON)
data Class
= Class !ClassName ![Attribute]
deriving (Eq, Show)
instance FromJSON Class where
parseJSON = withObject "Language.LowCode.CSS.AST.AST" \o ->
Class <$> o .: "className"
<*> o .: "attributes"
instance ToJSON Class where
toJSON (Class name attributes) = object
[ "className" .= String name
, "attributes" .= toJSON attributes
]
| null | https://raw.githubusercontent.com/heitor-lassarote/iolp/6284ff7127969b95a841a5d7b2af07aa77a38b2d/code-generator/src/Language/CSS/AST.hs | haskell | module Language.CSS.AST where
import Universum
import Data.Aeson
type Attribute = (Text, Text)
type ClassName = Text
data AST
= CSS ![Class]
deriving (Eq, Generic, Show, FromJSON, ToJSON)
data Class
= Class !ClassName ![Attribute]
deriving (Eq, Show)
instance FromJSON Class where
parseJSON = withObject "Language.LowCode.CSS.AST.AST" \o ->
Class <$> o .: "className"
<*> o .: "attributes"
instance ToJSON Class where
toJSON (Class name attributes) = object
[ "className" .= String name
, "attributes" .= toJSON attributes
]
| |
de901111283181ab6efe024e18f52a1aefb91f3934989b70bf2db6150c743ad7 | ekmett/ekmett.github.com | Char.hs | # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - incomplete - patterns #
{-# OPTIONS_HADDOCK prune #-}
-- |
Module : Data . Buffer . Rope .
Copyright : ( c ) 2006
( c ) 2006
( c ) 2010
-- License : BSD-style
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Some operations, such as concat, append, reverse and cons, have
-- better complexity than their "Data.Buffer" equivalents, due to
-- optimisations resulting from the fingertree spine.
--
-- This module is intended to be imported @qualified@, to avoid name
clashes with " Prelude " functions . eg .
--
> import qualified Data . Buffer . Rope . as R
--
Original GHC implementation by O\'Sullivan .
Rewritten to use ' Data . Array . . UArray ' by .
-- Rewritten to support slices and use 'Foreign.ForeignPtr.ForeignPtr'
by .
Polished and extended by .
Lazy variant by and .
A number of FingerTree algorithms are by and
Converted to use a fingertree by
module Data.Buffer.Rope.Char (
-- * The @Buffer@ type
instances : , Ord , Show , Read , Data , Typeable
* Introducing and eliminating ' 's
empty, -- :: Rope
: :
pack, -- :: String -> Rope
unpack, -- :: Rope -> String
fromChunks, -- :: [Buffer] -> Rope
toChunks, -- :: Rope -> [Buffer]
-- * Basic interface
: :
snoc, -- :: Rope -> Char -> Rope
append, -- :: Rope -> Rope -> Rope
: : Rope - >
: : Rope - > Maybe ( , Rope )
: : Rope - >
tail, -- :: Rope -> Rope
init, -- :: Rope -> Rope
null, -- :: Rope -> Bool
length, -- :: Rope -> Int
-- * Transforming 'Rope's
: : ( ) - > Rope - > Rope
reverse, -- :: Rope -> Rope
: :
intercalate, -- :: Rope -> [Rope] -> Rope
transpose, -- :: [Rope] -> [Rope]
-- * Reducing 'Rope's (folds)
foldl, -- :: (a -> Char -> a) -> a -> Rope -> a
foldl', -- :: (a -> Char -> a) -> a -> Rope -> a
: : ( Char - > ) - > Rope - >
: : ( Char - > ) - > Rope - >
foldr, -- :: (Char -> a -> a) -> a -> Rope -> a
: : ( Char - > ) - > Rope - >
-- ** Special folds
concat, -- :: [Rope] -> Rope
concatMap, -- :: (Char -> Rope) -> Rope -> Rope
: : ( Bool ) - > Rope - > Bool
: : ( Bool ) - > Rope - > Bool
: : Rope - >
: : Rope - >
-- * Building 'Rope's
-- ** Scans
: : ( Char - > ) - > Char - > Rope - > Rope
: : ( Char - > ) - > Rope - > Rope
: : ( Char - > ) - > Char - > Rope - > Rope
: : ( Char - > ) - > Rope - > Rope
-- ** Accumulating maps
: : ( acc - > Char - > ( acc , ) ) - > acc - > Rope - > ( acc , Rope )
: : ( acc - > Char - > ( acc , ) ) - > acc - > Rope - > ( acc , Rope )
-- ** Replicated 'Rope's
replicate, -- :: Int -> Char -> Rope
replicateBuffer, -- :: Int -> Buffer -> Rope
-- ** Unfolding Buffers
: : ( a - > Maybe ( , a ) ) - > a - > Rope
* Substrings
-- ** Breaking strings
take, -- :: Int -> Rope -> Rope
drop, -- :: Int -> Rope -> Rope
splitAt, -- :: Int -> Rope -> (Rope, Rope)
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > ( Rope , Rope )
: : ( Bool ) - > Rope - > ( Rope , Rope )
group, -- :: Rope -> [Rope]
: : ( Char - > Bool ) - > Rope - > [ Rope ]
inits, -- :: Rope -> [Rope]
tails, -- :: Rope -> [Rope]
-- ** Breaking into many substrings
: : [ Rope ]
: : ( Bool ) - > Rope - > [ Rope ]
-- * Predicates
isPrefixOf, -- :: Rope -> Rope -> Bool
isSuffixOf, -- :: Rope -> Rope -> Bool
-- * Searching Ropes
-- ** Searching by equality
: : Bool
: : Bool
-- ** Searching with a predicate
: : ( Bool ) - > Rope - > Maybe
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > ( Rope , Rope )
-- * Indexing Ropes
: : Rope - > Int - >
: : - > Maybe Int
: : [ Int ]
: : ( Bool ) - > Rope - > Maybe Int
: : ( Bool ) - > Rope - > [ Int ]
: :
-- * Zipping and unzipping Ropes
: : Rope - > Rope - > [ ( , ) ]
: : ( Char - > c ) - > Rope - > Rope - > [ c ]
: : [ ( , ) ] - > ( Rope , Rope )
-- * Ordered Ropes
-- sort, -- :: Rope -> Rope
-- * Low level conversions
-- ** Copying Ropes
copy, -- :: Rope -> Rope
-- defrag, -- :: Rope -> Rope
* I\/O with ' 's
-- ** Standard input and output
: : IO Rope
putStr, -- :: Rope -> IO ()
putStrLn, -- :: Rope -> IO ()
interact, -- :: (Rope -> Rope) -> IO ()
-- ** Files
readFile, -- :: FilePath -> IO Rope
writeFile, -- :: FilePath -> Rope -> IO ()
appendFile, -- :: FilePath -> Rope -> IO ()
-- ** I\/O with Handles
hGetContents, -- :: Handle -> IO Rope
hGet, -- :: Handle -> Int -> IO Rope
hGetNonBlocking, -- :: Handle -> Int -> IO Rope
hPut, -- :: Handle -> Rope -> IO ()
hPutStr, -- :: Handle -> Rope -> IO ()
) where
import Prelude hiding
(reverse,head,tail,last,init,null,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile,dropWhile,span,break,elem,filter,maximum
,minimum,all,concatMap,foldl1,foldr1,scanl, scanl1, scanr, scanr1
,repeat, cycle, interact, iterate,readFile,writeFile,appendFile,replicate
,getContents,getLine,putStr,putStrLn ,zip,zipWith,unzip,notElem)
import qualified Data.List as L -- L for list/lazy
import qualified Data.Buffer.Word8 as P (Buffer) -- type name only
import qualified Data.Buffer.Word8 as S -- S for strict (hmm...)
import qualified Data.Buffer.Char as C
import qualified Data.Buffer.Internal as S
import qualified Data.Buffer.Unsafe as S
import Data.Buffer.Lazy.Internal
import Data.Monoid (Monoid(..))
import Data.Word (Word8)
import Data.Int (Int64)
import System.IO (Handle,stdin,stdout,openBinaryFile,IOMode(..)
,hClose,hWaitForInput,hIsEOF)
import System.IO.Error (mkIOError, illegalOperationErrorType)
import System.IO.Unsafe
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr
import Foreign.Storable
-- -----------------------------------------------------------------------------
--
-- Useful macros, until we have bang patterns
--
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#define STRICT5(f) f a b c d e | a `seq` b `seq` c `seq` d `seq` e `seq` False = undefined
-- -----------------------------------------------------------------------------
instance Eq Buffer
where (==) = eq
instance Ord Buffer
where compare = cmp
instance Monoid Buffer where
mempty = empty
mappend = append
mconcat = concat
eq :: Buffer -> Buffer -> Bool
eq Empty Empty = True
eq Empty _ = False
eq _ Empty = False
eq (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> a == (S.take (S.length a) b) && eq as (Chunk (S.drop (S.length a) b) bs)
EQ -> a == b && eq as bs
GT -> (S.take (S.length b) a) == b && eq (Chunk (S.drop (S.length b) a) as) bs
cmp :: Buffer -> Buffer -> Ordering
cmp Empty Empty = EQ
cmp Empty _ = LT
cmp _ Empty = GT
cmp (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> case compare a (S.take (S.length a) b) of
EQ -> cmp as (Chunk (S.drop (S.length a) b) bs)
result -> result
EQ -> case compare a b of
EQ -> cmp as bs
result -> result
GT -> case compare (S.take (S.length b) a) b of
EQ -> cmp (Chunk (S.drop (S.length b) a) as) bs
result -> result
-- -----------------------------------------------------------------------------
Introducing and eliminating ' 's
-- | /O(1)/ The empty 'Buffer'
empty :: Buffer
empty = Empty
{-# INLINE empty #-}
-- | /O(1)/ Convert a 'Word8' into a 'Buffer'
singleton :: Word8 -> Buffer
singleton w = Chunk (S.singleton w) Empty
# INLINE singleton #
-- | /O(n)/ Convert a '[Word8]' into a 'Buffer'.
pack :: [Word8] -> Buffer
pack ws = L.foldr (Chunk . S.pack) Empty (chunks defaultChunkSize ws)
where
chunks :: Int -> [a] -> [[a]]
chunks _ [] = []
chunks size xs = case L.splitAt size xs of
(xs', xs'') -> xs' : chunks size xs''
-- | /O(n)/ Converts a 'Buffer' to a '[Word8]'.
unpack :: Buffer -> [Word8]
unpack cs = L.concatMap S.unpack (toChunks cs)
--TODO: we can do better here by integrating the concat with the unpack
-- | /O(c)/ Convert a list of strict 'Buffer' into a lazy 'Buffer'
fromChunks :: [P.Buffer] -> Buffer
fromChunks cs = L.foldr chunk Empty cs
-- | /O(n)/ Convert a lazy 'Buffer' into a list of strict 'Buffer'
toChunks :: Buffer -> [P.Buffer]
toChunks cs = foldrChunks (:) [] cs
------------------------------------------------------------------------
{-
-- | /O(n)/ Convert a '[a]' into a 'Buffer' using some
-- conversion function
packWith :: (a -> Word8) -> [a] -> Buffer
packWith k str = LPS $ L.map (P.packWith k) (chunk defaultChunkSize str)
{-# INLINE packWith #-}
# SPECIALIZE packWith : : ( ) - > [ Buffer #
-- | /O(n)/ Converts a 'Buffer' to a '[a]', using a conversion function.
unpackWith :: (Word8 -> a) -> Buffer -> [a]
unpackWith k (LPS ss) = L.concatMap (S.unpackWith k) ss
# INLINE unpackWith #
# SPECIALIZE unpackWith : : ( Word8 - > ) - > Buffer - > [ ] #
-}
-- ---------------------------------------------------------------------
-- Basic interface
-- | /O(1)/ Test whether a Buffer is empty.
null :: Buffer -> Bool
null Empty = True
null _ = False
# INLINE null #
| /O(n\/c)/ ' length ' returns the length of a as an ' Int64 '
length :: Buffer -> Int64
length cs = foldlChunks (\n c -> n + fromIntegral (S.length c)) 0 cs
# INLINE length #
-- | /O(1)/ 'cons' is analogous to '(:)' for lists.
--
cons :: Word8 -> Buffer -> Buffer
cons c cs = Chunk (S.singleton c) cs
# INLINE cons #
| /O(1)/ Unlike ' cons ' , ' '' is
-- strict in the Buffer that we are consing onto. More precisely, it forces
the head and the first chunk . It does this because , for space efficiency , it
may coalesce the new byte onto the first \'chunk\ ' rather than starting a
-- new \'chunk\'.
--
-- So that means you can't use a lazy recursive contruction like this:
--
-- > let xs = cons\' c xs in xs
--
-- You can however use 'cons', as well as 'repeat' and 'cycle', to build
-- infinite lazy Buffers.
--
cons' :: Word8 -> Buffer -> Buffer
cons' w (Chunk c cs) | S.length c < 16 = Chunk (S.cons w c) cs
cons' w cs = Chunk (S.singleton w) cs
{-# INLINE cons' #-}
-- | /O(n\/c)/ Append a byte to the end of a 'Buffer'
snoc :: Buffer -> Word8 -> Buffer
snoc cs w = foldrChunks Chunk (singleton w) cs
# INLINE snoc #
| /O(1)/ Extract the first element of a Buffer , which must be non - empty .
head :: Buffer -> Word8
head Empty = errorEmptyList "head"
head (Chunk c _) = S.unsafeHead c
# INLINE head #
-- | /O(1)/ Extract the head and tail of a Buffer, returning Nothing
-- if it is empty.
uncons :: Buffer -> Maybe (Word8, Buffer)
uncons Empty = Nothing
uncons (Chunk c cs)
= Just (S.unsafeHead c,
if S.length c == 1 then cs else Chunk (S.unsafeTail c) cs)
# INLINE uncons #
-- | /O(1)/ Extract the elements after the head of a Buffer, which must be
-- non-empty.
tail :: Buffer -> Buffer
tail Empty = errorEmptyList "tail"
tail (Chunk c cs)
| S.length c == 1 = cs
| otherwise = Chunk (S.unsafeTail c) cs
# INLINE tail #
-- | /O(n\/c)/ Extract the last element of a Buffer, which must be finite
-- and non-empty.
last :: Buffer -> Word8
last Empty = errorEmptyList "last"
last (Chunk c0 cs0) = go c0 cs0
where go c Empty = S.last c
go _ (Chunk c cs) = go c cs
-- XXX Don't inline this. Something breaks with 6.8.2 (haven't investigated yet)
-- | /O(n\/c)/ Return all the elements of a 'Buffer' except the last one.
init :: Buffer -> Buffer
init Empty = errorEmptyList "init"
init (Chunk c0 cs0) = go c0 cs0
where go c Empty | S.length c == 1 = Empty
| otherwise = Chunk (S.init c) Empty
go c (Chunk c' cs) = Chunk c (go c' cs)
| /O(n\/c)/ Append two Buffers
append :: Buffer -> Buffer -> Buffer
append xs ys = foldrChunks Chunk ys xs
# INLINE append #
-- ---------------------------------------------------------------------
-- Transformations
| /O(n)/ ' map ' is the Buffer obtained by applying @f@ to each
element of @xs@.
map :: (Word8 -> Word8) -> Buffer -> Buffer
map f s = go s
where
go Empty = Empty
go (Chunk x xs) = Chunk y ys
where
y = S.map f x
ys = go xs
# INLINE map #
-- | /O(n)/ 'reverse' @xs@ returns the elements of @xs@ in reverse order.
reverse :: Buffer -> Buffer
reverse cs0 = rev Empty cs0
where rev a Empty = a
rev a (Chunk c cs) = rev (Chunk (S.reverse c) a) cs
# INLINE reverse #
-- | The 'intersperse' function takes a 'Word8' and a 'Buffer' and
' that byte between the elements of the ' Buffer ' .
-- It is analogous to the intersperse function on Lists.
intersperse :: Word8 -> Buffer -> Buffer
intersperse _ Empty = Empty
intersperse w (Chunk c cs) = Chunk (S.intersperse w c)
(foldrChunks (Chunk . intersperse') Empty cs)
where intersperse' :: P.Buffer -> P.Buffer
intersperse' (S.PS fp o l e) =
S.unsafeCreate' (2*l) $ \p' -> withForeignPtr fp $ \p -> do
poke p' w
S.c_intersperse (p' `plusPtr` 1) (p `plusPtr` o) (fromIntegral l) w
return $! e + l * S.extra w
-- | The 'transpose' function transposes the rows and columns of its
-- 'Buffer' argument.
transpose :: [Buffer] -> [Buffer]
transpose css = L.map (\ss -> Chunk (S.pack ss) Empty)
(L.transpose (L.map unpack css))
--TODO: make this fast
-- ---------------------------------------------------------------------
-- Reducing 'Buffer's
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a Buffer, reduces the
-- Buffer using the binary operator, from left to right.
foldl :: (a -> Word8 -> a) -> a -> Buffer -> a
foldl f z = go z
where go a Empty = a
go a (Chunk c cs) = go (S.foldl f a c) cs
{-# INLINE foldl #-}
-- | 'foldl\'' is like 'foldl', but strict in the accumulator.
foldl' :: (a -> Word8 -> a) -> a -> Buffer -> a
foldl' f z = go z
where go a _ | a `seq` False = undefined
go a Empty = a
go a (Chunk c cs) = go (S.foldl f a c) cs
{-# INLINE foldl' #-}
-- | 'foldr', applied to a binary operator, a starting value
-- (typically the right-identity of the operator), and a Buffer,
-- reduces the Buffer using the binary operator, from right to left.
foldr :: (Word8 -> a -> a) -> a -> Buffer -> a
foldr k z cs = foldrChunks (flip (S.foldr k)) z cs
# INLINE foldr #
| ' foldl1 ' is a variant of ' foldl ' that has no starting value
-- argument, and thus must be applied to non-empty 'Buffers'.
-- This function is subject to array fusion.
foldl1 :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldl1 _ Empty = errorEmptyList "foldl1"
foldl1 f (Chunk c cs) = foldl f (S.unsafeHead c) (Chunk (S.unsafeTail c) cs)
| ' foldl1\ '' is like ' foldl1 ' , but strict in the accumulator .
foldl1' :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldl1' _ Empty = errorEmptyList "foldl1'"
foldl1' f (Chunk c cs) = foldl' f (S.unsafeHead c) (Chunk (S.unsafeTail c) cs)
| ' ' is a variant of ' foldr ' that has no starting value argument ,
-- and thus must be applied to non-empty 'Buffer's
foldr1 :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldr1 _ Empty = errorEmptyList "foldr1"
foldr1 f (Chunk c0 cs0) = go c0 cs0
where go c Empty = S.foldr1 f c
go c (Chunk c' cs) = S.foldr f (go c' cs) c
-- ---------------------------------------------------------------------
-- Special folds
| /O(n)/ a list of Buffers .
concat :: [Buffer] -> Buffer
concat css0 = to css0
where
go Empty css = to css
go (Chunk c cs) css = Chunk c (go cs css)
to [] = Empty
to (cs:css) = go cs css
-- | Map a function over a 'Buffer' and concatenate the results
concatMap :: (Word8 -> Buffer) -> Buffer -> Buffer
concatMap _ Empty = Empty
concatMap f (Chunk c0 cs0) = to c0 cs0
where
go :: Buffer -> P.Buffer -> Buffer -> Buffer
go Empty c' cs' = to c' cs'
go (Chunk c cs) c' cs' = Chunk c (go cs c' cs')
to :: P.Buffer -> Buffer -> Buffer
to c cs | S.null c = case cs of
Empty -> Empty
(Chunk c' cs') -> to c' cs'
| otherwise = go (f (S.unsafeHead c)) (S.unsafeTail c) cs
| /O(n)/ Applied to a predicate and a Buffer , ' any ' determines if
-- any element of the 'Buffer' satisfies the predicate.
any :: (Word8 -> Bool) -> Buffer -> Bool
any f cs = foldrChunks (\c rest -> S.any f c || rest) False cs
# INLINE any #
-- todo fuse
| /O(n)/ Applied to a predicate and a ' Buffer ' , ' all ' determines
-- if all elements of the 'Buffer' satisfy the predicate.
all :: (Word8 -> Bool) -> Buffer -> Bool
all f cs = foldrChunks (\c rest -> S.all f c && rest) True cs
# INLINE all #
-- todo fuse
-- | /O(n)/ 'maximum' returns the maximum value from a 'Buffer'
maximum :: Buffer -> Word8
maximum Empty = errorEmptyList "maximum"
maximum (Chunk c cs) = foldlChunks (\n c' -> n `max` S.maximum c')
(S.maximum c) cs
# INLINE maximum #
-- | /O(n)/ 'minimum' returns the minimum value from a 'Buffer'
minimum :: Buffer -> Word8
minimum Empty = errorEmptyList "minimum"
minimum (Chunk c cs) = foldlChunks (\n c' -> n `min` S.minimum c')
(S.minimum c) cs
# INLINE minimum #
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a Buffer,
-- passing an accumulating parameter from left to right, and returning a
final value of this accumulator together with the new Buffer .
mapAccumL :: (acc -> Word8 -> (acc, Word8)) -> acc -> Buffer -> (acc, Buffer)
mapAccumL f s0 cs0 = go s0 cs0
where
go s Empty = (s, Empty)
go s (Chunk c cs) = (s'', Chunk c' cs')
where (s', c') = S.mapAccumL f s c
(s'', cs') = go s' cs
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a Buffer,
-- passing an accumulating parameter from right to left, and returning a
final value of this accumulator together with the new Buffer .
mapAccumR :: (acc -> Word8 -> (acc, Word8)) -> acc -> Buffer -> (acc, Buffer)
mapAccumR f s0 cs0 = go s0 cs0
where
go s Empty = (s, Empty)
go s (Chunk c cs) = (s'', Chunk c' cs')
where (s'', c') = S.mapAccumR f s' c
(s', cs') = go s cs
-- ---------------------------------------------------------------------
-- Building Buffers
| ' ' is similar to ' foldl ' , but returns a list of successive
-- reduced values from the left. This function will fuse.
--
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
--
-- Note that
--
> last ( f z xs ) = = foldl f z xs .
scanl :: (Word8 -> Word8 -> Word8) -> Word8 -> Buffer -> Buffer
scanl f z = snd . foldl k (z,singleton z)
where
k (c,acc) a = let n = f c a in (n, acc `snoc` n)
# INLINE scanl #
-- ---------------------------------------------------------------------
-- Unfolds and replicates
| @'iterate ' f returns an infinite Buffer of repeated applications
of to @x@ :
--
-- > iterate f x == [x, f x, f (f x), ...]
--
iterate :: (Word8 -> Word8) -> Word8 -> Buffer
iterate f = unfoldr (\x -> case f x of x' -> x' `seq` Just (x', x'))
| @'repeat ' is an infinite Buffer , with @x@ the value of every
-- element.
--
repeat :: Word8 -> Buffer
repeat w = cs where cs = Chunk (S.replicate smallChunkSize w) cs
| /O(n)/ @'replicate ' n is a Buffer of length @n@ with
-- the value of every element.
--
replicate :: Int64 -> Word8 -> Buffer
replicate n w
| n <= 0 = Empty
| n < fromIntegral smallChunkSize = Chunk (S.replicate (fromIntegral n) w) Empty
| r == 0 = cs -- preserve invariant
| otherwise = Chunk (S.unsafeTake (fromIntegral r) c) cs
where
c = S.replicate smallChunkSize w
cs = nChunks q
(q, r) = quotRem n (fromIntegral smallChunkSize)
nChunks 0 = Empty
nChunks m = Chunk c (nChunks (m-1))
| ' cycle ' ties a finite into a circular one , or equivalently ,
the infinite repetition of the original .
--
cycle :: Buffer -> Buffer
cycle Empty = errorEmptyList "cycle"
cycle cs = cs' where cs' = foldrChunks Chunk cs' cs
-- | /O(n)/ The 'unfoldr' function is analogous to the List \'unfoldr\'.
-- 'unfoldr' builds a Buffer from a seed value. The function takes
-- the element and returns 'Nothing' if it is done producing the
-- Buffer or returns 'Just' @(a,b)@, in which case, @a@ is a
prepending to the and @b@ is used as the next element in a
-- recursive call.
unfoldr :: (a -> Maybe (Word8, a)) -> a -> Buffer
unfoldr f s0 = unfoldChunk 32 s0
where unfoldChunk n s =
case S.unfoldrN n f s of
(c, Nothing)
| S.null c -> Empty
| otherwise -> Chunk c Empty
(c, Just s') -> Chunk c (unfoldChunk (n*2) s')
-- ---------------------------------------------------------------------
Substrings
| /O(n\/c)/ ' take ' @n@ , applied to a @xs@ , returns the prefix
of @xs@ of length @n@ , or @xs@ itself if @n > ' length ' xs@.
take :: Int64 -> Buffer -> Buffer
take i _ | i <= 0 = Empty
take i cs0 = take' i cs0
where take' 0 _ = Empty
take' _ Empty = Empty
take' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then Chunk (S.take (fromIntegral n) c) Empty
else Chunk c (take' (n - fromIntegral (S.length c)) cs)
| /O(n\/c)/ ' drop ' @n returns the suffix of @xs@ after the first @n@
elements , or @[]@ if @n > ' length ' xs@.
drop :: Int64 -> Buffer -> Buffer
drop i p | i <= 0 = p
drop i cs0 = drop' i cs0
where drop' 0 cs = cs
drop' _ Empty = Empty
drop' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then Chunk (S.drop (fromIntegral n) c) cs
else drop' (n - fromIntegral (S.length c)) cs
| /O(n\/c)/ ' splitAt ' @n is equivalent to @('take ' n xs , ' drop ' n xs)@.
splitAt :: Int64 -> Buffer -> (Buffer, Buffer)
splitAt i cs0 | i <= 0 = (Empty, cs0)
splitAt i cs0 = splitAt' i cs0
where splitAt' 0 cs = (Empty, cs)
splitAt' _ Empty = (Empty, Empty)
splitAt' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then (Chunk (S.take (fromIntegral n) c) Empty
,Chunk (S.drop (fromIntegral n) c) cs)
else let (cs', cs'') = splitAt' (n - fromIntegral (S.length c)) cs
in (Chunk c cs', cs'')
-- | 'takeWhile', applied to a predicate @p@ and a Buffer @xs@,
-- returns the longest prefix (possibly empty) of @xs@ of elements that
satisfy @p@.
takeWhile :: (Word8 -> Bool) -> Buffer -> Buffer
takeWhile f cs0 = takeWhile' cs0
where takeWhile' Empty = Empty
takeWhile' (Chunk c cs) =
case findIndexOrEnd (not . f) c of
0 -> Empty
n | n < S.length c -> Chunk (S.take n c) Empty
| otherwise -> Chunk c (takeWhile' cs)
| ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@.
dropWhile :: (Word8 -> Bool) -> Buffer -> Buffer
dropWhile f cs0 = dropWhile' cs0
where dropWhile' Empty = Empty
dropWhile' (Chunk c cs) =
case findIndexOrEnd (not . f) c of
n | n < S.length c -> Chunk (S.drop n c) cs
| otherwise -> dropWhile' cs
| ' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
break :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
break f cs0 = break' cs0
where break' Empty = (Empty, Empty)
break' (Chunk c cs) =
case findIndexOrEnd f c of
0 -> (Empty, Chunk c cs)
n | n < S.length c -> (Chunk (S.take n c) Empty
,Chunk (S.drop n c) cs)
| otherwise -> let (cs', cs'') = break' cs
in (Chunk c cs', cs'')
--
TODO
--
-- Add rules
--
-- | ' breakByte ' breaks its Buffer argument at the first occurence
-- of the specified byte . It is more efficient than ' break ' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (= = ' c ' ) " abcd " = = breakByte ' c ' " abcd "
--
breakByte : : Word8 - > Buffer - > ( Buffer , Buffer )
breakByte c ( LPS ps ) = case ( breakByte ' ps ) of ( a , b ) - > ( LPS a , LPS b )
where breakByte ' [ ] = ( [ ] , [ ] )
breakByte ' ( x : xs ) =
case P.elemIndex c x of
Just 0 - > ( [ ] , x : xs )
Just n - > ( x : [ ] , P.drop n x : xs )
Nothing - > let ( xs ' , xs '' ) = breakByte ' xs
in ( x : xs ' , xs '' )
-- | ' spanByte ' breaks its Buffer argument at the first
-- occurence of a byte other than its argument . It is more efficient
-- than ' span (= =) '
--
-- > span (= = ' c ' ) " abcd " = = spanByte ' c ' " abcd "
--
spanByte : : Word8 - > Buffer - > ( Buffer , Buffer )
spanByte c ( LPS ps ) = case ( spanByte ' ps ) of ( a , b ) - > ( LPS a , LPS b )
where spanByte ' [ ] = ( [ ] , [ ] )
spanByte ' ( x : xs ) =
case P.spanByte c x of
( x ' , x '' ) | P.null x ' - > ( [ ] , x : xs )
| P.null x '' - > let ( xs ' , xs '' ) = spanByte ' xs
in ( x : xs ' , xs '' )
| otherwise - > ( x ' : [ ] , x '' : xs )
-- | 'breakByte' breaks its Buffer argument at the first occurence
-- of the specified byte. It is more efficient than 'break' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (=='c') "abcd" == breakByte 'c' "abcd"
--
breakByte :: Word8 -> Buffer -> (Buffer, Buffer)
breakByte c (LPS ps) = case (breakByte' ps) of (a,b) -> (LPS a, LPS b)
where breakByte' [] = ([], [])
breakByte' (x:xs) =
case P.elemIndex c x of
Just 0 -> ([], x : xs)
Just n -> (P.take n x : [], P.drop n x : xs)
Nothing -> let (xs', xs'') = breakByte' xs
in (x : xs', xs'')
-- | 'spanByte' breaks its Buffer argument at the first
-- occurence of a byte other than its argument. It is more efficient
-- than 'span (==)'
--
-- > span (=='c') "abcd" == spanByte 'c' "abcd"
--
spanByte :: Word8 -> Buffer -> (Buffer, Buffer)
spanByte c (LPS ps) = case (spanByte' ps) of (a,b) -> (LPS a, LPS b)
where spanByte' [] = ([], [])
spanByte' (x:xs) =
case P.spanByte c x of
(x', x'') | P.null x' -> ([], x : xs)
| P.null x'' -> let (xs', xs'') = spanByte' xs
in (x : xs', xs'')
| otherwise -> (x' : [], x'' : xs)
-}
| ' span ' @p xs@ breaks the Buffer into two segments . It is
equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
span p = break (not . p)
-- | /O(n)/ Splits a 'Buffer' into components delimited by
-- separators, where the predicate returns True for a separator element.
The resulting components do not contain the separators . Two adjacent
-- separators result in an empty component in the output. eg.
--
-- > splitWith (=='a') "aabbaca" == ["","","bb","c",""]
-- > splitWith (=='a') [] == []
--
splitWith :: (Word8 -> Bool) -> Buffer -> [Buffer]
splitWith _ Empty = []
splitWith p (Chunk c0 cs0) = comb [] (S.splitWith p c0) cs0
where comb :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
comb acc (s:[]) Empty = revChunks (s:acc) : []
comb acc (s:[]) (Chunk c cs) = comb (s:acc) (S.splitWith p c) cs
comb acc (s:ss) cs = revChunks (s:acc) : comb [] ss cs
# INLINE splitWith #
-- | /O(n)/ Break a 'Buffer' into pieces separated by the byte
-- argument, consuming the delimiter. I.e.
--
-- > split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
> split ' a ' " aXaXaXa " = = [ " " , " X","X","X " , " " ]
-- > split 'x' "x" == ["",""]
--
-- and
--
-- > intercalate [c] . split c == id
> split = = splitWith . (=
--
-- As for all splitting functions in this library, this function does
-- not copy the substrings, it just constructs new 'Buffers' that
-- are slices of the original.
--
split :: Word8 -> Buffer -> [Buffer]
split _ Empty = []
split w (Chunk c0 cs0) = comb [] (S.split w c0) cs0
where comb :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
comb acc (s:[]) Empty = revChunks (s:acc) : []
comb acc (s:[]) (Chunk c cs) = comb (s:acc) (S.split w c) cs
comb acc (s:ss) cs = revChunks (s:acc) : comb [] ss cs
# INLINE split #
-- | Like ' splitWith ' , except that sequences of adjacent separators are
-- treated as a single separator . eg .
--
-- > tokens (= = ' a ' ) " aabbaca " = = [ " bb","c " ]
--
tokens : : ( Word8 - > Bool ) - > Buffer - > [ Buffer ]
tokens f = ( not.null ) . splitWith f
-- | Like 'splitWith', except that sequences of adjacent separators are
-- treated as a single separator. eg.
--
-- > tokens (=='a') "aabbaca" == ["bb","c"]
--
tokens :: (Word8 -> Bool) -> Buffer -> [Buffer]
tokens f = L.filter (not.null) . splitWith f
-}
-- | The 'group' function takes a Buffer and returns a list of
-- Buffers such that the concatenation of the result is equal to the
-- argument. Moreover, each sublist in the result contains only equal
-- elements. For example,
--
> group " Mississippi " = [ " M","i","ss","i","ss","i","pp","i " ]
--
-- It is a special case of 'groupBy', which allows the programmer to
-- supply their own equality test.
group :: Buffer -> [Buffer]
group Empty = []
group (Chunk c0 cs0) = group' [] (S.group c0) cs0
where
group' :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
group' acc@(s':_) ss@(s:_) cs
| S.unsafeHead s'
/= S.unsafeHead s = revNonEmptyChunks acc : group' [] ss cs
group' acc (s:[]) Empty = revNonEmptyChunks (s:acc) : []
group' acc (s:[]) (Chunk c cs) = group' (s:acc) (S.group c) cs
group' acc (s:ss) cs = revNonEmptyChunks (s:acc) : group' [] ss cs
{-
TODO: check if something like this might be faster
group :: Buffer -> [Buffer]
group xs
| null xs = []
| otherwise = ys : group zs
where
(ys, zs) = spanByte (unsafeHead xs) xs
-}
-- | The 'groupBy' function is the non-overloaded version of 'group'.
--
groupBy :: (Word8 -> Word8 -> Bool) -> Buffer -> [Buffer]
groupBy _ Empty = []
groupBy k (Chunk c0 cs0) = groupBy' [] 0 (S.groupBy k c0) cs0
where
groupBy' :: [P.Buffer] -> Word8 -> [P.Buffer] -> Buffer -> [Buffer]
groupBy' acc@(_:_) c ss@(s:_) cs
| not (c `k` S.unsafeHead s) = revNonEmptyChunks acc : groupBy' [] 0 ss cs
groupBy' acc _ (s:[]) Empty = revNonEmptyChunks (s : acc) : []
groupBy' acc w (s:[]) (Chunk c cs) = groupBy' (s:acc) w' (S.groupBy k c) cs
where w' | L.null acc = S.unsafeHead s
| otherwise = w
groupBy' acc _ (s:ss) cs = revNonEmptyChunks (s : acc) : groupBy' [] 0 ss cs
TODO : check if something like this might be faster
groupBy : : ( Word8 - > Word8 - > Bool ) - > Buffer - > [ Buffer ]
groupBy k xs
| null xs = [ ]
| otherwise = take n xs : groupBy k ( drop n xs )
where
n = 1 + findIndexOrEnd ( not . k ( head xs ) ) ( tail xs )
TODO: check if something like this might be faster
groupBy :: (Word8 -> Word8 -> Bool) -> Buffer -> [Buffer]
groupBy k xs
| null xs = []
| otherwise = take n xs : groupBy k (drop n xs)
where
n = 1 + findIndexOrEnd (not . k (head xs)) (tail xs)
-}
-- | /O(n)/ The 'intercalate' function takes a 'Buffer' and a list of
' 's and concatenates the list after interspersing the first
-- argument between each element of the list.
intercalate :: Buffer -> [Buffer] -> Buffer
intercalate s = concat . (L.intersperse s)
-- ---------------------------------------------------------------------
-- Indexing Buffers
-- | /O(c)/ 'Buffer' index (subscript) operator, starting from 0.
index :: Buffer -> Int64 -> Word8
index _ i | i < 0 = moduleError "index" ("negative index: " ++ show i)
index cs0 i = index' cs0 i
where index' Empty n = moduleError "index" ("index too large: " ++ show n)
index' (Chunk c cs) n
| n >= fromIntegral (S.length c) =
index' cs (n - fromIntegral (S.length c))
| otherwise = S.unsafeIndex c (fromIntegral n)
| /O(n)/ The ' elemIndex ' function returns the index of the first
-- element in the given 'Buffer' which is equal to the query
-- element, or 'Nothing' if there is no such element.
-- This implementation uses memchr(3).
elemIndex :: Word8 -> Buffer -> Maybe Int64
elemIndex w cs0 = elemIndex' 0 cs0
where elemIndex' _ Empty = Nothing
elemIndex' n (Chunk c cs) =
case S.elemIndex w c of
Nothing -> elemIndex' (n + fromIntegral (S.length c)) cs
Just i -> Just (n + fromIntegral i)
-- | /O(n)/ The ' elemIndexEnd ' function returns the last index of the
-- element in the given ' Buffer ' which is equal to the query
-- element , or ' Nothing ' if there is no such element . The following
-- holds :
--
-- > elemIndexEnd c xs = =
-- > ( - ) ( length xs - 1 ) ` fmap ` elemIndex c ( reverse xs )
--
elemIndexEnd : : Word8 - > Buffer - > Maybe Int
elemIndexEnd ch ( PS x s l ) = inlinePerformIO $ withForeignPtr x $ \p - >
go ( p ` plusPtr ` s ) ( l-1 )
where
STRICT2(go )
go p i | i < 0 = return Nothing
| otherwise = do ch ' < - peekByteOff p i
if ch = = ch '
then return $ Just i
else go p ( i-1 )
-- | /O(n)/ The 'elemIndexEnd' function returns the last index of the
-- element in the given 'Buffer' which is equal to the query
-- element, or 'Nothing' if there is no such element. The following
-- holds:
--
-- > elemIndexEnd c xs ==
-- > (-) (length xs - 1) `fmap` elemIndex c (reverse xs)
--
elemIndexEnd :: Word8 -> Buffer -> Maybe Int
elemIndexEnd ch (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p ->
go (p `plusPtr` s) (l-1)
where
STRICT2(go)
go p i | i < 0 = return Nothing
| otherwise = do ch' <- peekByteOff p i
if ch == ch'
then return $ Just i
else go p (i-1)
-}
-- | /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
-- the indices of all elements equal to the query element, in ascending order.
-- This implementation uses memchr(3).
elemIndices :: Word8 -> Buffer -> [Int64]
elemIndices w cs0 = elemIndices' 0 cs0
where elemIndices' _ Empty = []
elemIndices' n (Chunk c cs) = L.map ((+n).fromIntegral) (S.elemIndices w c)
++ elemIndices' (n + fromIntegral (S.length c)) cs
| count returns the number of times its argument appears in the
--
-- > count = length . elemIndices
--
-- But more efficiently than using length on the intermediate list.
count :: Word8 -> Buffer -> Int64
count w cs = foldlChunks (\n c -> n + fromIntegral (S.count w c)) 0 cs
-- | The 'findIndex' function takes a predicate and a 'Buffer' and
returns the index of the first element in the
-- satisfying the predicate.
findIndex :: (Word8 -> Bool) -> Buffer -> Maybe Int64
findIndex k cs0 = findIndex' 0 cs0
where findIndex' _ Empty = Nothing
findIndex' n (Chunk c cs) =
case S.findIndex k c of
Nothing -> findIndex' (n + fromIntegral (S.length c)) cs
Just i -> Just (n + fromIntegral i)
# INLINE findIndex #
-- | /O(n)/ The 'find' function takes a predicate and a Buffer,
and returns the first element in matching the predicate , or ' Nothing '
-- if there is no such element.
--
-- > find f p = case findIndex f p of Just n -> Just (p ! n) ; _ -> Nothing
--
find :: (Word8 -> Bool) -> Buffer -> Maybe Word8
find f cs0 = find' cs0
where find' Empty = Nothing
find' (Chunk c cs) = case S.find f c of
Nothing -> find' cs
Just w -> Just w
{-# INLINE find #-}
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
findIndices :: (Word8 -> Bool) -> Buffer -> [Int64]
findIndices k cs0 = findIndices' 0 cs0
where findIndices' _ Empty = []
findIndices' n (Chunk c cs) = L.map ((+n).fromIntegral) (S.findIndices k c)
++ findIndices' (n + fromIntegral (S.length c)) cs
-- ---------------------------------------------------------------------
Searching Buffers
-- | /O(n)/ 'elem' is the 'Buffer' membership predicate.
elem :: Word8 -> Buffer -> Bool
elem w cs = case elemIndex w cs of Nothing -> False ; _ -> True
-- | /O(n)/ 'notElem' is the inverse of 'elem'
notElem :: Word8 -> Buffer -> Bool
notElem w cs = not (elem w cs)
-- | /O(n)/ 'filter', applied to a predicate and a Buffer,
-- returns a Buffer containing those characters that satisfy the
-- predicate.
filter :: (Word8 -> Bool) -> Buffer -> Buffer
filter p s = go s
where
go Empty = Empty
go (Chunk x xs) = chunk (S.filter p x) (go xs)
# INLINE filter #
-- | /O(n)/ and /O(n\/c ) space/ A first order equivalent of /filter .
-- (= =) / , for the common case of filtering a single byte . It is more
-- efficient to use /filterByte/ in this case .
--
-- > filterByte = = filter . (= -- filterByte is around 10x faster , and uses much less space , than its
-- filter equivalent
filterByte : : Word8 - > Buffer - > Buffer
filterByte w ps = replicate ( count w ps ) w
{ - # INLINE filterByte #
-- | /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
-- (==)/, for the common case of filtering a single byte. It is more
-- efficient to use /filterByte/ in this case.
--
-- > filterByte == filter . (==)
--
-- filterByte is around 10x faster, and uses much less space, than its
-- filter equivalent
filterByte :: Word8 -> Buffer -> Buffer
filterByte w ps = replicate (count w ps) w
{-# INLINE filterByte #-}
# RULES
" Buffer specialise filter (= = x ) " forall filter ( (= ) = filterByte x
" Buffer specialise filter (= = x ) " forall filter (= = x ) = filterByte x
#
"Buffer specialise filter (== x)" forall x.
filter ((==) x) = filterByte x
"Buffer specialise filter (== x)" forall x.
filter (== x) = filterByte x
#-}
-}
-- | /O(n)/ A first order equivalent of /filter . ( \/=)/ , for the common
-- case of filtering a single byte out of a list . It is more efficient
-- to use /filterNotByte/ in this case .
--
-- > filterNotByte = = filter . ( /= )
--
-- filterNotByte is around 2x faster than its filter equivalent .
: : Word8 - > Buffer - > Buffer
filterNotByte w ( LPS xs ) = LPS ( filterMap ( P.filterNotByte w ) xs )
-- | /O(n)/ A first order equivalent of /filter . (\/=)/, for the common
-- case of filtering a single byte out of a list. It is more efficient
-- to use /filterNotByte/ in this case.
--
-- > filterNotByte == filter . (/=)
--
-- filterNotByte is around 2x faster than its filter equivalent.
filterNotByte :: Word8 -> Buffer -> Buffer
filterNotByte w (LPS xs) = LPS (filterMap (P.filterNotByte w) xs)
-}
| /O(n)/ The ' partition ' function takes a predicate a and returns
-- the pair of Buffers with elements which do and do not satisfy the
-- predicate, respectively; i.e.,
--
-- > partition p bs == (filter p xs, filter (not . p) xs)
--
partition :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
partition f p = (filter f p, filter (not . f) p)
--TODO: use a better implementation
-- ---------------------------------------------------------------------
-- Searching for substrings
| /O(n)/ The ' isPrefixOf ' function takes two Buffers and returns ' True '
iff the first is a prefix of the second .
isPrefixOf :: Buffer -> Buffer -> Bool
isPrefixOf Empty _ = True
isPrefixOf _ Empty = False
isPrefixOf (Chunk x xs) (Chunk y ys)
| S.length x == S.length y = x == y && isPrefixOf xs ys
| S.length x < S.length y = x == yh && isPrefixOf xs (Chunk yt ys)
| otherwise = xh == y && isPrefixOf (Chunk xt xs) ys
where (xh,xt) = S.splitAt (S.length y) x
(yh,yt) = S.splitAt (S.length x) y
| /O(n)/ The ' isSuffixOf ' function takes two Buffers and returns ' True '
iff the first is a suffix of the second .
--
-- The following holds:
--
-- > isSuffixOf x y == reverse x `isPrefixOf` reverse y
--
isSuffixOf :: Buffer -> Buffer -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
--TODO: a better implementation
-- ---------------------------------------------------------------------
-- Zipping
| /O(n)/ ' zip ' takes two Buffers and returns a list of
corresponding pairs of bytes . If one input is short ,
-- excess elements of the longer Buffer are discarded. This is
-- equivalent to a pair of 'unpack' operations.
zip :: Buffer -> Buffer -> [(Word8,Word8)]
zip = zipWith (,)
-- | 'zipWith' generalises 'zip' by zipping with the function given as
the first argument , instead of a tupling function . For example ,
@'zipWith ' ( + ) @ is applied to two Buffers to produce the list of
-- corresponding sums.
zipWith :: (Word8 -> Word8 -> a) -> Buffer -> Buffer -> [a]
zipWith _ Empty _ = []
zipWith _ _ Empty = []
zipWith f (Chunk a as) (Chunk b bs) = go a as b bs
where
go x xs y ys = f (S.unsafeHead x) (S.unsafeHead y)
: to (S.unsafeTail x) xs (S.unsafeTail y) ys
to x Empty _ _ | S.null x = []
to _ _ y Empty | S.null y = []
to x xs y ys | not (S.null x)
&& not (S.null y) = go x xs y ys
to x xs _ (Chunk y' ys) | not (S.null x) = go x xs y' ys
to _ (Chunk x' xs) y ys | not (S.null y) = go x' xs y ys
to _ (Chunk x' xs) _ (Chunk y' ys) = go x' xs y' ys
-- | /O(n)/ 'unzip' transforms a list of pairs of bytes into a pair of
Buffers . Note that this performs two ' pack ' operations .
unzip :: [(Word8,Word8)] -> (Buffer,Buffer)
unzip ls = (pack (L.map fst ls), pack (L.map snd ls))
# INLINE unzip #
-- ---------------------------------------------------------------------
-- Special lists
| /O(n)/ Return all initial segments of the given ' Buffer ' , shortest first .
inits :: Buffer -> [Buffer]
inits = (Empty :) . inits'
where inits' Empty = []
inits' (Chunk c cs) = L.map (\c' -> Chunk c' Empty) (L.tail (S.inits c))
++ L.map (Chunk c) (inits' cs)
| /O(n)/ Return all final segments of the given ' Buffer ' , longest first .
tails :: Buffer -> [Buffer]
tails Empty = Empty : []
tails cs@(Chunk c cs')
| S.length c == 1 = cs : tails cs'
| otherwise = cs : tails (Chunk (S.unsafeTail c) cs')
-- ---------------------------------------------------------------------
-- Low level constructors
-- | /O(n)/ Make a copy of the 'Buffer' with its own storage.
-- This is mainly useful to allow the rest of the data pointed
-- to by the 'Buffer' to be garbage collected, for example
-- if a large string has been read in, and only a small part of it
-- is needed in the rest of the program.
copy :: Buffer -> Buffer
copy cs = foldrChunks (Chunk . S.copy) Empty cs
TODO , we could coalese small blocks here
--FIXME: probably not strict enough, if we're doing this to avoid retaining
-- the parent blocks then we'd better copy strictly.
-- ---------------------------------------------------------------------
TODO defrag func that concatenates block together that are below a threshold
-- defrag :: Buffer -> Buffer
-- ---------------------------------------------------------------------
Lazy Buffer IO
--
-- Rule for when to close: is it expected to read the whole file?
-- If so, close when done.
--
-- | Read entire handle contents /lazily/ into a 'Buffer'. Chunks
-- are read on demand, in at most @k@-sized chunks. It does not block
-- waiting for a whole @k@-sized chunk, so if less than @k@ bytes are
-- available then they will be returned immediately as a smaller chunk.
--
The handle is closed on EOF .
--
hGetContentsN :: Int -> Handle -> IO Buffer
TODO close on exceptions
where
lazyRead = unsafeInterleaveIO loop
loop = do
c <- S.hGetNonBlocking h k
TODO : I think this should distinguish EOF from no data available
-- the underlying POSIX call makes this distincion, returning either
0 or EAGAIN
if S.null c
then do eof <- hIsEOF h
if eof then hClose h >> return Empty
else hWaitForInput h (-1)
>> loop
else do cs <- lazyRead
return (Chunk c cs)
-- | Read @n@ bytes into a 'Buffer', directly from the
-- specified 'Handle', in chunks of size @k@.
--
hGetN :: Int -> Handle -> Int -> IO Buffer
hGetN k h n | n > 0 = readChunks n
where
STRICT1(readChunks)
readChunks i = do
c <- S.hGet h (min k i)
case S.length c of
0 -> return Empty
m -> do cs <- readChunks (i - m)
return (Chunk c cs)
hGetN _ _ 0 = return Empty
hGetN _ h n = illegalBufferSize h "hGet" n
-- | hGetNonBlockingN is similar to 'hGetContentsN', except that it will never block
-- waiting for data to become available, instead it returns only whatever data
-- is available. Chunks are read on demand, in @k@-sized chunks.
--
hGetNonBlockingN :: Int -> Handle -> Int -> IO Buffer
#if defined(__GLASGOW_HASKELL__)
hGetNonBlockingN k h n | n > 0= readChunks n
where
STRICT1(readChunks)
readChunks i = do
c <- S.hGetNonBlocking h (min k i)
case S.length c of
0 -> return Empty
m -> do cs <- readChunks (i - m)
return (Chunk c cs)
hGetNonBlockingN _ _ 0 = return Empty
hGetNonBlockingN _ h n = illegalBufferSize h "hGetNonBlocking" n
#else
hGetNonBlockingN = hGetN
#endif
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn sz =
ioError (mkIOError illegalOperationErrorType msg (Just handle) Nothing)
TODO : System . IO uses InvalidArgument here , but it 's not exported :-(
where
msg = fn ++ ": illegal Buffer size " ++ showsPrec 9 sz []
-- | Read entire handle contents /lazily/ into a 'Buffer'. Chunks
-- are read on demand, using the default chunk size.
--
Once EOF is encountered , the Handle is closed .
--
hGetContents :: Handle -> IO Buffer
hGetContents = hGetContentsN defaultChunkSize
-- | Read @n@ bytes into a 'Buffer', directly from the specified 'Handle'.
--
hGet :: Handle -> Int -> IO Buffer
hGet = hGetN defaultChunkSize
| hGetNonBlocking is similar to ' hGet ' , except that it will never block
-- waiting for data to become available, instead it returns only whatever data
-- is available.
#if defined(__GLASGOW_HASKELL__)
hGetNonBlocking :: Handle -> Int -> IO Buffer
hGetNonBlocking = hGetNonBlockingN defaultChunkSize
#else
hGetNonBlocking = hGet
#endif
-- | Read an entire file /lazily/ into a 'Buffer'.
The Handle will be held open until EOF is encountered .
--
readFile :: FilePath -> IO Buffer
readFile f = openBinaryFile f ReadMode >>= hGetContents
-- | Write a 'Buffer' to a file.
--
writeFile :: FilePath -> Buffer -> IO ()
writeFile f txt = bracket (openBinaryFile f WriteMode) hClose
(\hdl -> hPut hdl txt)
-- | Append a 'Buffer' to a file.
--
appendFile :: FilePath -> Buffer -> IO ()
appendFile f txt = bracket (openBinaryFile f AppendMode) hClose
(\hdl -> hPut hdl txt)
| getContents . Equivalent to . Will read /lazily/
--
getContents :: IO Buffer
getContents = hGetContents stdin
-- | Outputs a 'Buffer' to the specified 'Handle'.
--
hPut :: Handle -> Buffer -> IO ()
hPut h cs = foldrChunks (\c rest -> S.hPut h c >> rest) (return ()) cs
-- | A synonym for @hPut@, for compatibility
--
hPutStr :: Handle -> Buffer -> IO ()
hPutStr = hPut
-- | Write a Buffer to stdout
putStr :: Buffer -> IO ()
putStr = hPut stdout
-- | Write a Buffer to stdout, appending a newline byte
--
putStrLn :: Buffer -> IO ()
putStrLn ps = hPut stdout ps >> hPut stdout (singleton 0x0a)
-- | The interact function takes a function of type @Buffer -> Buffer@
-- as its argument. The entire input from the standard input device is passed
-- to this function as its argument, and the resulting string is output on the
-- standard output device.
--
interact :: (Buffer -> Buffer) -> IO ()
interact transformer = putStr . transformer =<< getContents
-- ---------------------------------------------------------------------
-- Internal utilities
-- Common up near identical calls to `error' to reduce the number
-- constant strings created when compiled:
errorEmptyList :: String -> a
errorEmptyList fun = moduleError fun "empty Buffer"
moduleError :: String -> String -> a
moduleError fun msg = error ("Data.Buffer.Lazy." ++ fun ++ ':':' ':msg)
-- reverse a list of non-empty chunks into a lazy Buffer
revNonEmptyChunks :: [P.Buffer] -> Buffer
revNonEmptyChunks cs = L.foldl' (flip Chunk) Empty cs
-- reverse a list of possibly-empty chunks into a lazy Buffer
revChunks :: [P.Buffer] -> Buffer
revChunks cs = L.foldl' (flip chunk) Empty cs
-- | 'findIndexOrEnd' is a variant of findIndex, that returns the length
-- of the string if no element is found, rather than Nothing.
findIndexOrEnd :: (Word8 -> Bool) -> P.Buffer -> Int
findIndexOrEnd k (S.PS x s l _) = S.inlinePerformIO $ withForeignPtr x $ \f -> go (f `plusPtr` s) 0
where
STRICT2(go)
go ptr n | n >= l = return l
| otherwise = do w <- peek ptr
if k w
then return n
else go (ptr `plusPtr` 1) (n+1)
# INLINE findIndexOrEnd #
| null | https://raw.githubusercontent.com/ekmett/ekmett.github.com/8d3abab5b66db631e148e1d046d18909bece5893/haskell/buffer/Data/Buffer/Rope/Char.hs | haskell | # OPTIONS_HADDOCK prune #
|
License : BSD-style
Maintainer :
Stability : experimental
Portability : portable
Some operations, such as concat, append, reverse and cons, have
better complexity than their "Data.Buffer" equivalents, due to
optimisations resulting from the fingertree spine.
This module is intended to be imported @qualified@, to avoid name
Rewritten to support slices and use 'Foreign.ForeignPtr.ForeignPtr'
* The @Buffer@ type
:: Rope
:: String -> Rope
:: Rope -> String
:: [Buffer] -> Rope
:: Rope -> [Buffer]
* Basic interface
:: Rope -> Char -> Rope
:: Rope -> Rope -> Rope
:: Rope -> Rope
:: Rope -> Rope
:: Rope -> Bool
:: Rope -> Int
* Transforming 'Rope's
:: Rope -> Rope
:: Rope -> [Rope] -> Rope
:: [Rope] -> [Rope]
* Reducing 'Rope's (folds)
:: (a -> Char -> a) -> a -> Rope -> a
:: (a -> Char -> a) -> a -> Rope -> a
:: (Char -> a -> a) -> a -> Rope -> a
** Special folds
:: [Rope] -> Rope
:: (Char -> Rope) -> Rope -> Rope
* Building 'Rope's
** Scans
** Accumulating maps
** Replicated 'Rope's
:: Int -> Char -> Rope
:: Int -> Buffer -> Rope
** Unfolding Buffers
** Breaking strings
:: Int -> Rope -> Rope
:: Int -> Rope -> Rope
:: Int -> Rope -> (Rope, Rope)
:: Rope -> [Rope]
:: Rope -> [Rope]
:: Rope -> [Rope]
** Breaking into many substrings
* Predicates
:: Rope -> Rope -> Bool
:: Rope -> Rope -> Bool
* Searching Ropes
** Searching by equality
** Searching with a predicate
* Indexing Ropes
* Zipping and unzipping Ropes
* Ordered Ropes
sort, -- :: Rope -> Rope
* Low level conversions
** Copying Ropes
:: Rope -> Rope
defrag, -- :: Rope -> Rope
** Standard input and output
:: Rope -> IO ()
:: Rope -> IO ()
:: (Rope -> Rope) -> IO ()
** Files
:: FilePath -> IO Rope
:: FilePath -> Rope -> IO ()
:: FilePath -> Rope -> IO ()
** I\/O with Handles
:: Handle -> IO Rope
:: Handle -> Int -> IO Rope
:: Handle -> Int -> IO Rope
:: Handle -> Rope -> IO ()
:: Handle -> Rope -> IO ()
L for list/lazy
type name only
S for strict (hmm...)
-----------------------------------------------------------------------------
Useful macros, until we have bang patterns
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| /O(1)/ The empty 'Buffer'
# INLINE empty #
| /O(1)/ Convert a 'Word8' into a 'Buffer'
| /O(n)/ Convert a '[Word8]' into a 'Buffer'.
| /O(n)/ Converts a 'Buffer' to a '[Word8]'.
TODO: we can do better here by integrating the concat with the unpack
| /O(c)/ Convert a list of strict 'Buffer' into a lazy 'Buffer'
| /O(n)/ Convert a lazy 'Buffer' into a list of strict 'Buffer'
----------------------------------------------------------------------
-- | /O(n)/ Convert a '[a]' into a 'Buffer' using some
-- conversion function
packWith :: (a -> Word8) -> [a] -> Buffer
packWith k str = LPS $ L.map (P.packWith k) (chunk defaultChunkSize str)
{-# INLINE packWith #
| /O(n)/ Converts a 'Buffer' to a '[a]', using a conversion function.
---------------------------------------------------------------------
Basic interface
| /O(1)/ Test whether a Buffer is empty.
| /O(1)/ 'cons' is analogous to '(:)' for lists.
strict in the Buffer that we are consing onto. More precisely, it forces
new \'chunk\'.
So that means you can't use a lazy recursive contruction like this:
> let xs = cons\' c xs in xs
You can however use 'cons', as well as 'repeat' and 'cycle', to build
infinite lazy Buffers.
# INLINE cons' #
| /O(n\/c)/ Append a byte to the end of a 'Buffer'
| /O(1)/ Extract the head and tail of a Buffer, returning Nothing
if it is empty.
| /O(1)/ Extract the elements after the head of a Buffer, which must be
non-empty.
| /O(n\/c)/ Extract the last element of a Buffer, which must be finite
and non-empty.
XXX Don't inline this. Something breaks with 6.8.2 (haven't investigated yet)
| /O(n\/c)/ Return all the elements of a 'Buffer' except the last one.
---------------------------------------------------------------------
Transformations
| /O(n)/ 'reverse' @xs@ returns the elements of @xs@ in reverse order.
| The 'intersperse' function takes a 'Word8' and a 'Buffer' and
It is analogous to the intersperse function on Lists.
| The 'transpose' function transposes the rows and columns of its
'Buffer' argument.
TODO: make this fast
---------------------------------------------------------------------
Reducing 'Buffer's
| 'foldl', applied to a binary operator, a starting value (typically
the left-identity of the operator), and a Buffer, reduces the
Buffer using the binary operator, from left to right.
# INLINE foldl #
| 'foldl\'' is like 'foldl', but strict in the accumulator.
# INLINE foldl' #
| 'foldr', applied to a binary operator, a starting value
(typically the right-identity of the operator), and a Buffer,
reduces the Buffer using the binary operator, from right to left.
argument, and thus must be applied to non-empty 'Buffers'.
This function is subject to array fusion.
and thus must be applied to non-empty 'Buffer's
---------------------------------------------------------------------
Special folds
| Map a function over a 'Buffer' and concatenate the results
any element of the 'Buffer' satisfies the predicate.
todo fuse
if all elements of the 'Buffer' satisfy the predicate.
todo fuse
| /O(n)/ 'maximum' returns the maximum value from a 'Buffer'
| /O(n)/ 'minimum' returns the minimum value from a 'Buffer'
| The 'mapAccumL' function behaves like a combination of 'map' and
'foldl'; it applies a function to each element of a Buffer,
passing an accumulating parameter from left to right, and returning a
| The 'mapAccumR' function behaves like a combination of 'map' and
'foldr'; it applies a function to each element of a Buffer,
passing an accumulating parameter from right to left, and returning a
---------------------------------------------------------------------
Building Buffers
reduced values from the left. This function will fuse.
Note that
---------------------------------------------------------------------
Unfolds and replicates
> iterate f x == [x, f x, f (f x), ...]
element.
the value of every element.
preserve invariant
| /O(n)/ The 'unfoldr' function is analogous to the List \'unfoldr\'.
'unfoldr' builds a Buffer from a seed value. The function takes
the element and returns 'Nothing' if it is done producing the
Buffer or returns 'Just' @(a,b)@, in which case, @a@ is a
recursive call.
---------------------------------------------------------------------
| 'takeWhile', applied to a predicate @p@ and a Buffer @xs@,
returns the longest prefix (possibly empty) of @xs@ of elements that
Add rules
| ' breakByte ' breaks its Buffer argument at the first occurence
of the specified byte . It is more efficient than ' break ' as it is
implemented with @memchr(3)@. I.e.
> break (= = ' c ' ) " abcd " = = breakByte ' c ' " abcd "
| ' spanByte ' breaks its Buffer argument at the first
occurence of a byte other than its argument . It is more efficient
than ' span (= =) '
> span (= = ' c ' ) " abcd " = = spanByte ' c ' " abcd "
| 'breakByte' breaks its Buffer argument at the first occurence
of the specified byte. It is more efficient than 'break' as it is
implemented with @memchr(3)@. I.e.
> break (=='c') "abcd" == breakByte 'c' "abcd"
| 'spanByte' breaks its Buffer argument at the first
occurence of a byte other than its argument. It is more efficient
than 'span (==)'
> span (=='c') "abcd" == spanByte 'c' "abcd"
| /O(n)/ Splits a 'Buffer' into components delimited by
separators, where the predicate returns True for a separator element.
separators result in an empty component in the output. eg.
> splitWith (=='a') "aabbaca" == ["","","bb","c",""]
> splitWith (=='a') [] == []
| /O(n)/ Break a 'Buffer' into pieces separated by the byte
argument, consuming the delimiter. I.e.
> split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
> split 'x' "x" == ["",""]
and
> intercalate [c] . split c == id
As for all splitting functions in this library, this function does
not copy the substrings, it just constructs new 'Buffers' that
are slices of the original.
| Like ' splitWith ' , except that sequences of adjacent separators are
treated as a single separator . eg .
> tokens (= = ' a ' ) " aabbaca " = = [ " bb","c " ]
| Like 'splitWith', except that sequences of adjacent separators are
treated as a single separator. eg.
> tokens (=='a') "aabbaca" == ["bb","c"]
| The 'group' function takes a Buffer and returns a list of
Buffers such that the concatenation of the result is equal to the
argument. Moreover, each sublist in the result contains only equal
elements. For example,
It is a special case of 'groupBy', which allows the programmer to
supply their own equality test.
TODO: check if something like this might be faster
group :: Buffer -> [Buffer]
group xs
| null xs = []
| otherwise = ys : group zs
where
(ys, zs) = spanByte (unsafeHead xs) xs
| The 'groupBy' function is the non-overloaded version of 'group'.
| /O(n)/ The 'intercalate' function takes a 'Buffer' and a list of
argument between each element of the list.
---------------------------------------------------------------------
Indexing Buffers
| /O(c)/ 'Buffer' index (subscript) operator, starting from 0.
element in the given 'Buffer' which is equal to the query
element, or 'Nothing' if there is no such element.
This implementation uses memchr(3).
| /O(n)/ The ' elemIndexEnd ' function returns the last index of the
element in the given ' Buffer ' which is equal to the query
element , or ' Nothing ' if there is no such element . The following
holds :
> elemIndexEnd c xs = =
> ( - ) ( length xs - 1 ) ` fmap ` elemIndex c ( reverse xs )
| /O(n)/ The 'elemIndexEnd' function returns the last index of the
element in the given 'Buffer' which is equal to the query
element, or 'Nothing' if there is no such element. The following
holds:
> elemIndexEnd c xs ==
> (-) (length xs - 1) `fmap` elemIndex c (reverse xs)
| /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
the indices of all elements equal to the query element, in ascending order.
This implementation uses memchr(3).
> count = length . elemIndices
But more efficiently than using length on the intermediate list.
| The 'findIndex' function takes a predicate and a 'Buffer' and
satisfying the predicate.
| /O(n)/ The 'find' function takes a predicate and a Buffer,
if there is no such element.
> find f p = case findIndex f p of Just n -> Just (p ! n) ; _ -> Nothing
# INLINE find #
| The 'findIndices' function extends 'findIndex', by returning the
indices of all elements satisfying the predicate, in ascending order.
---------------------------------------------------------------------
| /O(n)/ 'elem' is the 'Buffer' membership predicate.
| /O(n)/ 'notElem' is the inverse of 'elem'
| /O(n)/ 'filter', applied to a predicate and a Buffer,
returns a Buffer containing those characters that satisfy the
predicate.
| /O(n)/ and /O(n\/c ) space/ A first order equivalent of /filter .
(= =) / , for the common case of filtering a single byte . It is more
efficient to use /filterByte/ in this case .
> filterByte = = filter . (= -- filterByte is around 10x faster , and uses much less space , than its
filter equivalent
| /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
(==)/, for the common case of filtering a single byte. It is more
efficient to use /filterByte/ in this case.
> filterByte == filter . (==)
filterByte is around 10x faster, and uses much less space, than its
filter equivalent
# INLINE filterByte #
| /O(n)/ A first order equivalent of /filter . ( \/=)/ , for the common
case of filtering a single byte out of a list . It is more efficient
to use /filterNotByte/ in this case .
> filterNotByte = = filter . ( /= )
filterNotByte is around 2x faster than its filter equivalent .
| /O(n)/ A first order equivalent of /filter . (\/=)/, for the common
case of filtering a single byte out of a list. It is more efficient
to use /filterNotByte/ in this case.
> filterNotByte == filter . (/=)
filterNotByte is around 2x faster than its filter equivalent.
the pair of Buffers with elements which do and do not satisfy the
predicate, respectively; i.e.,
> partition p bs == (filter p xs, filter (not . p) xs)
TODO: use a better implementation
---------------------------------------------------------------------
Searching for substrings
The following holds:
> isSuffixOf x y == reverse x `isPrefixOf` reverse y
TODO: a better implementation
---------------------------------------------------------------------
Zipping
excess elements of the longer Buffer are discarded. This is
equivalent to a pair of 'unpack' operations.
| 'zipWith' generalises 'zip' by zipping with the function given as
corresponding sums.
| /O(n)/ 'unzip' transforms a list of pairs of bytes into a pair of
---------------------------------------------------------------------
Special lists
---------------------------------------------------------------------
Low level constructors
| /O(n)/ Make a copy of the 'Buffer' with its own storage.
This is mainly useful to allow the rest of the data pointed
to by the 'Buffer' to be garbage collected, for example
if a large string has been read in, and only a small part of it
is needed in the rest of the program.
FIXME: probably not strict enough, if we're doing this to avoid retaining
the parent blocks then we'd better copy strictly.
---------------------------------------------------------------------
defrag :: Buffer -> Buffer
---------------------------------------------------------------------
Rule for when to close: is it expected to read the whole file?
If so, close when done.
| Read entire handle contents /lazily/ into a 'Buffer'. Chunks
are read on demand, in at most @k@-sized chunks. It does not block
waiting for a whole @k@-sized chunk, so if less than @k@ bytes are
available then they will be returned immediately as a smaller chunk.
the underlying POSIX call makes this distincion, returning either
| Read @n@ bytes into a 'Buffer', directly from the
specified 'Handle', in chunks of size @k@.
| hGetNonBlockingN is similar to 'hGetContentsN', except that it will never block
waiting for data to become available, instead it returns only whatever data
is available. Chunks are read on demand, in @k@-sized chunks.
| Read entire handle contents /lazily/ into a 'Buffer'. Chunks
are read on demand, using the default chunk size.
| Read @n@ bytes into a 'Buffer', directly from the specified 'Handle'.
waiting for data to become available, instead it returns only whatever data
is available.
| Read an entire file /lazily/ into a 'Buffer'.
| Write a 'Buffer' to a file.
| Append a 'Buffer' to a file.
| Outputs a 'Buffer' to the specified 'Handle'.
| A synonym for @hPut@, for compatibility
| Write a Buffer to stdout
| Write a Buffer to stdout, appending a newline byte
| The interact function takes a function of type @Buffer -> Buffer@
as its argument. The entire input from the standard input device is passed
to this function as its argument, and the resulting string is output on the
standard output device.
---------------------------------------------------------------------
Internal utilities
Common up near identical calls to `error' to reduce the number
constant strings created when compiled:
reverse a list of non-empty chunks into a lazy Buffer
reverse a list of possibly-empty chunks into a lazy Buffer
| 'findIndexOrEnd' is a variant of findIndex, that returns the length
of the string if no element is found, rather than Nothing.
| # LANGUAGE CPP #
# OPTIONS_GHC -fno - warn - incomplete - patterns #
Module : Data . Buffer . Rope .
Copyright : ( c ) 2006
( c ) 2006
( c ) 2010
clashes with " Prelude " functions . eg .
> import qualified Data . Buffer . Rope . as R
Original GHC implementation by O\'Sullivan .
Rewritten to use ' Data . Array . . UArray ' by .
by .
Polished and extended by .
Lazy variant by and .
A number of FingerTree algorithms are by and
Converted to use a fingertree by
module Data.Buffer.Rope.Char (
instances : , Ord , Show , Read , Data , Typeable
* Introducing and eliminating ' 's
: :
: :
: : Rope - >
: : Rope - > Maybe ( , Rope )
: : Rope - >
: : ( ) - > Rope - > Rope
: :
: : ( Char - > ) - > Rope - >
: : ( Char - > ) - > Rope - >
: : ( Char - > ) - > Rope - >
: : ( Bool ) - > Rope - > Bool
: : ( Bool ) - > Rope - > Bool
: : Rope - >
: : Rope - >
: : ( Char - > ) - > Char - > Rope - > Rope
: : ( Char - > ) - > Rope - > Rope
: : ( Char - > ) - > Char - > Rope - > Rope
: : ( Char - > ) - > Rope - > Rope
: : ( acc - > Char - > ( acc , ) ) - > acc - > Rope - > ( acc , Rope )
: : ( acc - > Char - > ( acc , ) ) - > acc - > Rope - > ( acc , Rope )
: : ( a - > Maybe ( , a ) ) - > a - > Rope
* Substrings
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > ( Rope , Rope )
: : ( Bool ) - > Rope - > ( Rope , Rope )
: : ( Char - > Bool ) - > Rope - > [ Rope ]
: : [ Rope ]
: : ( Bool ) - > Rope - > [ Rope ]
: : Bool
: : Bool
: : ( Bool ) - > Rope - > Maybe
: : ( Bool ) - > Rope - > Rope
: : ( Bool ) - > Rope - > ( Rope , Rope )
: : Rope - > Int - >
: : - > Maybe Int
: : [ Int ]
: : ( Bool ) - > Rope - > Maybe Int
: : ( Bool ) - > Rope - > [ Int ]
: :
: : Rope - > Rope - > [ ( , ) ]
: : ( Char - > c ) - > Rope - > Rope - > [ c ]
: : [ ( , ) ] - > ( Rope , Rope )
* I\/O with ' 's
: : IO Rope
) where
import Prelude hiding
(reverse,head,tail,last,init,null,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile,dropWhile,span,break,elem,filter,maximum
,minimum,all,concatMap,foldl1,foldr1,scanl, scanl1, scanr, scanr1
,repeat, cycle, interact, iterate,readFile,writeFile,appendFile,replicate
,getContents,getLine,putStr,putStrLn ,zip,zipWith,unzip,notElem)
import qualified Data.Buffer.Char as C
import qualified Data.Buffer.Internal as S
import qualified Data.Buffer.Unsafe as S
import Data.Buffer.Lazy.Internal
import Data.Monoid (Monoid(..))
import Data.Word (Word8)
import Data.Int (Int64)
import System.IO (Handle,stdin,stdout,openBinaryFile,IOMode(..)
,hClose,hWaitForInput,hIsEOF)
import System.IO.Error (mkIOError, illegalOperationErrorType)
import System.IO.Unsafe
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr
import Foreign.Storable
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#define STRICT5(f) f a b c d e | a `seq` b `seq` c `seq` d `seq` e `seq` False = undefined
instance Eq Buffer
where (==) = eq
instance Ord Buffer
where compare = cmp
instance Monoid Buffer where
mempty = empty
mappend = append
mconcat = concat
eq :: Buffer -> Buffer -> Bool
eq Empty Empty = True
eq Empty _ = False
eq _ Empty = False
eq (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> a == (S.take (S.length a) b) && eq as (Chunk (S.drop (S.length a) b) bs)
EQ -> a == b && eq as bs
GT -> (S.take (S.length b) a) == b && eq (Chunk (S.drop (S.length b) a) as) bs
cmp :: Buffer -> Buffer -> Ordering
cmp Empty Empty = EQ
cmp Empty _ = LT
cmp _ Empty = GT
cmp (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> case compare a (S.take (S.length a) b) of
EQ -> cmp as (Chunk (S.drop (S.length a) b) bs)
result -> result
EQ -> case compare a b of
EQ -> cmp as bs
result -> result
GT -> case compare (S.take (S.length b) a) b of
EQ -> cmp (Chunk (S.drop (S.length b) a) as) bs
result -> result
Introducing and eliminating ' 's
empty :: Buffer
empty = Empty
singleton :: Word8 -> Buffer
singleton w = Chunk (S.singleton w) Empty
# INLINE singleton #
pack :: [Word8] -> Buffer
pack ws = L.foldr (Chunk . S.pack) Empty (chunks defaultChunkSize ws)
where
chunks :: Int -> [a] -> [[a]]
chunks _ [] = []
chunks size xs = case L.splitAt size xs of
(xs', xs'') -> xs' : chunks size xs''
unpack :: Buffer -> [Word8]
unpack cs = L.concatMap S.unpack (toChunks cs)
fromChunks :: [P.Buffer] -> Buffer
fromChunks cs = L.foldr chunk Empty cs
toChunks :: Buffer -> [P.Buffer]
toChunks cs = foldrChunks (:) [] cs
# SPECIALIZE packWith : : ( ) - > [ Buffer #
unpackWith :: (Word8 -> a) -> Buffer -> [a]
unpackWith k (LPS ss) = L.concatMap (S.unpackWith k) ss
# INLINE unpackWith #
# SPECIALIZE unpackWith : : ( Word8 - > ) - > Buffer - > [ ] #
-}
null :: Buffer -> Bool
null Empty = True
null _ = False
# INLINE null #
| /O(n\/c)/ ' length ' returns the length of a as an ' Int64 '
length :: Buffer -> Int64
length cs = foldlChunks (\n c -> n + fromIntegral (S.length c)) 0 cs
# INLINE length #
cons :: Word8 -> Buffer -> Buffer
cons c cs = Chunk (S.singleton c) cs
# INLINE cons #
| /O(1)/ Unlike ' cons ' , ' '' is
the head and the first chunk . It does this because , for space efficiency , it
may coalesce the new byte onto the first \'chunk\ ' rather than starting a
cons' :: Word8 -> Buffer -> Buffer
cons' w (Chunk c cs) | S.length c < 16 = Chunk (S.cons w c) cs
cons' w cs = Chunk (S.singleton w) cs
snoc :: Buffer -> Word8 -> Buffer
snoc cs w = foldrChunks Chunk (singleton w) cs
# INLINE snoc #
| /O(1)/ Extract the first element of a Buffer , which must be non - empty .
head :: Buffer -> Word8
head Empty = errorEmptyList "head"
head (Chunk c _) = S.unsafeHead c
# INLINE head #
uncons :: Buffer -> Maybe (Word8, Buffer)
uncons Empty = Nothing
uncons (Chunk c cs)
= Just (S.unsafeHead c,
if S.length c == 1 then cs else Chunk (S.unsafeTail c) cs)
# INLINE uncons #
tail :: Buffer -> Buffer
tail Empty = errorEmptyList "tail"
tail (Chunk c cs)
| S.length c == 1 = cs
| otherwise = Chunk (S.unsafeTail c) cs
# INLINE tail #
last :: Buffer -> Word8
last Empty = errorEmptyList "last"
last (Chunk c0 cs0) = go c0 cs0
where go c Empty = S.last c
go _ (Chunk c cs) = go c cs
init :: Buffer -> Buffer
init Empty = errorEmptyList "init"
init (Chunk c0 cs0) = go c0 cs0
where go c Empty | S.length c == 1 = Empty
| otherwise = Chunk (S.init c) Empty
go c (Chunk c' cs) = Chunk c (go c' cs)
| /O(n\/c)/ Append two Buffers
append :: Buffer -> Buffer -> Buffer
append xs ys = foldrChunks Chunk ys xs
# INLINE append #
| /O(n)/ ' map ' is the Buffer obtained by applying @f@ to each
element of @xs@.
map :: (Word8 -> Word8) -> Buffer -> Buffer
map f s = go s
where
go Empty = Empty
go (Chunk x xs) = Chunk y ys
where
y = S.map f x
ys = go xs
# INLINE map #
reverse :: Buffer -> Buffer
reverse cs0 = rev Empty cs0
where rev a Empty = a
rev a (Chunk c cs) = rev (Chunk (S.reverse c) a) cs
# INLINE reverse #
' that byte between the elements of the ' Buffer ' .
intersperse :: Word8 -> Buffer -> Buffer
intersperse _ Empty = Empty
intersperse w (Chunk c cs) = Chunk (S.intersperse w c)
(foldrChunks (Chunk . intersperse') Empty cs)
where intersperse' :: P.Buffer -> P.Buffer
intersperse' (S.PS fp o l e) =
S.unsafeCreate' (2*l) $ \p' -> withForeignPtr fp $ \p -> do
poke p' w
S.c_intersperse (p' `plusPtr` 1) (p `plusPtr` o) (fromIntegral l) w
return $! e + l * S.extra w
transpose :: [Buffer] -> [Buffer]
transpose css = L.map (\ss -> Chunk (S.pack ss) Empty)
(L.transpose (L.map unpack css))
foldl :: (a -> Word8 -> a) -> a -> Buffer -> a
foldl f z = go z
where go a Empty = a
go a (Chunk c cs) = go (S.foldl f a c) cs
foldl' :: (a -> Word8 -> a) -> a -> Buffer -> a
foldl' f z = go z
where go a _ | a `seq` False = undefined
go a Empty = a
go a (Chunk c cs) = go (S.foldl f a c) cs
foldr :: (Word8 -> a -> a) -> a -> Buffer -> a
foldr k z cs = foldrChunks (flip (S.foldr k)) z cs
# INLINE foldr #
| ' foldl1 ' is a variant of ' foldl ' that has no starting value
foldl1 :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldl1 _ Empty = errorEmptyList "foldl1"
foldl1 f (Chunk c cs) = foldl f (S.unsafeHead c) (Chunk (S.unsafeTail c) cs)
| ' foldl1\ '' is like ' foldl1 ' , but strict in the accumulator .
foldl1' :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldl1' _ Empty = errorEmptyList "foldl1'"
foldl1' f (Chunk c cs) = foldl' f (S.unsafeHead c) (Chunk (S.unsafeTail c) cs)
| ' ' is a variant of ' foldr ' that has no starting value argument ,
foldr1 :: (Word8 -> Word8 -> Word8) -> Buffer -> Word8
foldr1 _ Empty = errorEmptyList "foldr1"
foldr1 f (Chunk c0 cs0) = go c0 cs0
where go c Empty = S.foldr1 f c
go c (Chunk c' cs) = S.foldr f (go c' cs) c
| /O(n)/ a list of Buffers .
concat :: [Buffer] -> Buffer
concat css0 = to css0
where
go Empty css = to css
go (Chunk c cs) css = Chunk c (go cs css)
to [] = Empty
to (cs:css) = go cs css
concatMap :: (Word8 -> Buffer) -> Buffer -> Buffer
concatMap _ Empty = Empty
concatMap f (Chunk c0 cs0) = to c0 cs0
where
go :: Buffer -> P.Buffer -> Buffer -> Buffer
go Empty c' cs' = to c' cs'
go (Chunk c cs) c' cs' = Chunk c (go cs c' cs')
to :: P.Buffer -> Buffer -> Buffer
to c cs | S.null c = case cs of
Empty -> Empty
(Chunk c' cs') -> to c' cs'
| otherwise = go (f (S.unsafeHead c)) (S.unsafeTail c) cs
| /O(n)/ Applied to a predicate and a Buffer , ' any ' determines if
any :: (Word8 -> Bool) -> Buffer -> Bool
any f cs = foldrChunks (\c rest -> S.any f c || rest) False cs
# INLINE any #
| /O(n)/ Applied to a predicate and a ' Buffer ' , ' all ' determines
all :: (Word8 -> Bool) -> Buffer -> Bool
all f cs = foldrChunks (\c rest -> S.all f c && rest) True cs
# INLINE all #
maximum :: Buffer -> Word8
maximum Empty = errorEmptyList "maximum"
maximum (Chunk c cs) = foldlChunks (\n c' -> n `max` S.maximum c')
(S.maximum c) cs
# INLINE maximum #
minimum :: Buffer -> Word8
minimum Empty = errorEmptyList "minimum"
minimum (Chunk c cs) = foldlChunks (\n c' -> n `min` S.minimum c')
(S.minimum c) cs
# INLINE minimum #
final value of this accumulator together with the new Buffer .
mapAccumL :: (acc -> Word8 -> (acc, Word8)) -> acc -> Buffer -> (acc, Buffer)
mapAccumL f s0 cs0 = go s0 cs0
where
go s Empty = (s, Empty)
go s (Chunk c cs) = (s'', Chunk c' cs')
where (s', c') = S.mapAccumL f s c
(s'', cs') = go s' cs
final value of this accumulator together with the new Buffer .
mapAccumR :: (acc -> Word8 -> (acc, Word8)) -> acc -> Buffer -> (acc, Buffer)
mapAccumR f s0 cs0 = go s0 cs0
where
go s Empty = (s, Empty)
go s (Chunk c cs) = (s'', Chunk c' cs')
where (s'', c') = S.mapAccumR f s' c
(s', cs') = go s cs
| ' ' is similar to ' foldl ' , but returns a list of successive
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
> last ( f z xs ) = = foldl f z xs .
scanl :: (Word8 -> Word8 -> Word8) -> Word8 -> Buffer -> Buffer
scanl f z = snd . foldl k (z,singleton z)
where
k (c,acc) a = let n = f c a in (n, acc `snoc` n)
# INLINE scanl #
| @'iterate ' f returns an infinite Buffer of repeated applications
of to @x@ :
iterate :: (Word8 -> Word8) -> Word8 -> Buffer
iterate f = unfoldr (\x -> case f x of x' -> x' `seq` Just (x', x'))
| @'repeat ' is an infinite Buffer , with @x@ the value of every
repeat :: Word8 -> Buffer
repeat w = cs where cs = Chunk (S.replicate smallChunkSize w) cs
| /O(n)/ @'replicate ' n is a Buffer of length @n@ with
replicate :: Int64 -> Word8 -> Buffer
replicate n w
| n <= 0 = Empty
| n < fromIntegral smallChunkSize = Chunk (S.replicate (fromIntegral n) w) Empty
| otherwise = Chunk (S.unsafeTake (fromIntegral r) c) cs
where
c = S.replicate smallChunkSize w
cs = nChunks q
(q, r) = quotRem n (fromIntegral smallChunkSize)
nChunks 0 = Empty
nChunks m = Chunk c (nChunks (m-1))
| ' cycle ' ties a finite into a circular one , or equivalently ,
the infinite repetition of the original .
cycle :: Buffer -> Buffer
cycle Empty = errorEmptyList "cycle"
cycle cs = cs' where cs' = foldrChunks Chunk cs' cs
prepending to the and @b@ is used as the next element in a
unfoldr :: (a -> Maybe (Word8, a)) -> a -> Buffer
unfoldr f s0 = unfoldChunk 32 s0
where unfoldChunk n s =
case S.unfoldrN n f s of
(c, Nothing)
| S.null c -> Empty
| otherwise -> Chunk c Empty
(c, Just s') -> Chunk c (unfoldChunk (n*2) s')
Substrings
| /O(n\/c)/ ' take ' @n@ , applied to a @xs@ , returns the prefix
of @xs@ of length @n@ , or @xs@ itself if @n > ' length ' xs@.
take :: Int64 -> Buffer -> Buffer
take i _ | i <= 0 = Empty
take i cs0 = take' i cs0
where take' 0 _ = Empty
take' _ Empty = Empty
take' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then Chunk (S.take (fromIntegral n) c) Empty
else Chunk c (take' (n - fromIntegral (S.length c)) cs)
| /O(n\/c)/ ' drop ' @n returns the suffix of @xs@ after the first @n@
elements , or @[]@ if @n > ' length ' xs@.
drop :: Int64 -> Buffer -> Buffer
drop i p | i <= 0 = p
drop i cs0 = drop' i cs0
where drop' 0 cs = cs
drop' _ Empty = Empty
drop' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then Chunk (S.drop (fromIntegral n) c) cs
else drop' (n - fromIntegral (S.length c)) cs
| /O(n\/c)/ ' splitAt ' @n is equivalent to @('take ' n xs , ' drop ' n xs)@.
splitAt :: Int64 -> Buffer -> (Buffer, Buffer)
splitAt i cs0 | i <= 0 = (Empty, cs0)
splitAt i cs0 = splitAt' i cs0
where splitAt' 0 cs = (Empty, cs)
splitAt' _ Empty = (Empty, Empty)
splitAt' n (Chunk c cs) =
if n < fromIntegral (S.length c)
then (Chunk (S.take (fromIntegral n) c) Empty
,Chunk (S.drop (fromIntegral n) c) cs)
else let (cs', cs'') = splitAt' (n - fromIntegral (S.length c)) cs
in (Chunk c cs', cs'')
satisfy @p@.
takeWhile :: (Word8 -> Bool) -> Buffer -> Buffer
takeWhile f cs0 = takeWhile' cs0
where takeWhile' Empty = Empty
takeWhile' (Chunk c cs) =
case findIndexOrEnd (not . f) c of
0 -> Empty
n | n < S.length c -> Chunk (S.take n c) Empty
| otherwise -> Chunk c (takeWhile' cs)
| ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@.
dropWhile :: (Word8 -> Bool) -> Buffer -> Buffer
dropWhile f cs0 = dropWhile' cs0
where dropWhile' Empty = Empty
dropWhile' (Chunk c cs) =
case findIndexOrEnd (not . f) c of
n | n < S.length c -> Chunk (S.drop n c) cs
| otherwise -> dropWhile' cs
| ' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
break :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
break f cs0 = break' cs0
where break' Empty = (Empty, Empty)
break' (Chunk c cs) =
case findIndexOrEnd f c of
0 -> (Empty, Chunk c cs)
n | n < S.length c -> (Chunk (S.take n c) Empty
,Chunk (S.drop n c) cs)
| otherwise -> let (cs', cs'') = break' cs
in (Chunk c cs', cs'')
TODO
breakByte : : Word8 - > Buffer - > ( Buffer , Buffer )
breakByte c ( LPS ps ) = case ( breakByte ' ps ) of ( a , b ) - > ( LPS a , LPS b )
where breakByte ' [ ] = ( [ ] , [ ] )
breakByte ' ( x : xs ) =
case P.elemIndex c x of
Just 0 - > ( [ ] , x : xs )
Just n - > ( x : [ ] , P.drop n x : xs )
Nothing - > let ( xs ' , xs '' ) = breakByte ' xs
in ( x : xs ' , xs '' )
spanByte : : Word8 - > Buffer - > ( Buffer , Buffer )
spanByte c ( LPS ps ) = case ( spanByte ' ps ) of ( a , b ) - > ( LPS a , LPS b )
where spanByte ' [ ] = ( [ ] , [ ] )
spanByte ' ( x : xs ) =
case P.spanByte c x of
( x ' , x '' ) | P.null x ' - > ( [ ] , x : xs )
| P.null x '' - > let ( xs ' , xs '' ) = spanByte ' xs
in ( x : xs ' , xs '' )
| otherwise - > ( x ' : [ ] , x '' : xs )
breakByte :: Word8 -> Buffer -> (Buffer, Buffer)
breakByte c (LPS ps) = case (breakByte' ps) of (a,b) -> (LPS a, LPS b)
where breakByte' [] = ([], [])
breakByte' (x:xs) =
case P.elemIndex c x of
Just 0 -> ([], x : xs)
Just n -> (P.take n x : [], P.drop n x : xs)
Nothing -> let (xs', xs'') = breakByte' xs
in (x : xs', xs'')
spanByte :: Word8 -> Buffer -> (Buffer, Buffer)
spanByte c (LPS ps) = case (spanByte' ps) of (a,b) -> (LPS a, LPS b)
where spanByte' [] = ([], [])
spanByte' (x:xs) =
case P.spanByte c x of
(x', x'') | P.null x' -> ([], x : xs)
| P.null x'' -> let (xs', xs'') = spanByte' xs
in (x : xs', xs'')
| otherwise -> (x' : [], x'' : xs)
-}
| ' span ' @p xs@ breaks the Buffer into two segments . It is
equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
span p = break (not . p)
The resulting components do not contain the separators . Two adjacent
splitWith :: (Word8 -> Bool) -> Buffer -> [Buffer]
splitWith _ Empty = []
splitWith p (Chunk c0 cs0) = comb [] (S.splitWith p c0) cs0
where comb :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
comb acc (s:[]) Empty = revChunks (s:acc) : []
comb acc (s:[]) (Chunk c cs) = comb (s:acc) (S.splitWith p c) cs
comb acc (s:ss) cs = revChunks (s:acc) : comb [] ss cs
# INLINE splitWith #
> split ' a ' " aXaXaXa " = = [ " " , " X","X","X " , " " ]
> split = = splitWith . (=
split :: Word8 -> Buffer -> [Buffer]
split _ Empty = []
split w (Chunk c0 cs0) = comb [] (S.split w c0) cs0
where comb :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
comb acc (s:[]) Empty = revChunks (s:acc) : []
comb acc (s:[]) (Chunk c cs) = comb (s:acc) (S.split w c) cs
comb acc (s:ss) cs = revChunks (s:acc) : comb [] ss cs
# INLINE split #
tokens : : ( Word8 - > Bool ) - > Buffer - > [ Buffer ]
tokens f = ( not.null ) . splitWith f
tokens :: (Word8 -> Bool) -> Buffer -> [Buffer]
tokens f = L.filter (not.null) . splitWith f
-}
> group " Mississippi " = [ " M","i","ss","i","ss","i","pp","i " ]
group :: Buffer -> [Buffer]
group Empty = []
group (Chunk c0 cs0) = group' [] (S.group c0) cs0
where
group' :: [P.Buffer] -> [P.Buffer] -> Buffer -> [Buffer]
group' acc@(s':_) ss@(s:_) cs
| S.unsafeHead s'
/= S.unsafeHead s = revNonEmptyChunks acc : group' [] ss cs
group' acc (s:[]) Empty = revNonEmptyChunks (s:acc) : []
group' acc (s:[]) (Chunk c cs) = group' (s:acc) (S.group c) cs
group' acc (s:ss) cs = revNonEmptyChunks (s:acc) : group' [] ss cs
groupBy :: (Word8 -> Word8 -> Bool) -> Buffer -> [Buffer]
groupBy _ Empty = []
groupBy k (Chunk c0 cs0) = groupBy' [] 0 (S.groupBy k c0) cs0
where
groupBy' :: [P.Buffer] -> Word8 -> [P.Buffer] -> Buffer -> [Buffer]
groupBy' acc@(_:_) c ss@(s:_) cs
| not (c `k` S.unsafeHead s) = revNonEmptyChunks acc : groupBy' [] 0 ss cs
groupBy' acc _ (s:[]) Empty = revNonEmptyChunks (s : acc) : []
groupBy' acc w (s:[]) (Chunk c cs) = groupBy' (s:acc) w' (S.groupBy k c) cs
where w' | L.null acc = S.unsafeHead s
| otherwise = w
groupBy' acc _ (s:ss) cs = revNonEmptyChunks (s : acc) : groupBy' [] 0 ss cs
TODO : check if something like this might be faster
groupBy : : ( Word8 - > Word8 - > Bool ) - > Buffer - > [ Buffer ]
groupBy k xs
| null xs = [ ]
| otherwise = take n xs : groupBy k ( drop n xs )
where
n = 1 + findIndexOrEnd ( not . k ( head xs ) ) ( tail xs )
TODO: check if something like this might be faster
groupBy :: (Word8 -> Word8 -> Bool) -> Buffer -> [Buffer]
groupBy k xs
| null xs = []
| otherwise = take n xs : groupBy k (drop n xs)
where
n = 1 + findIndexOrEnd (not . k (head xs)) (tail xs)
-}
' 's and concatenates the list after interspersing the first
intercalate :: Buffer -> [Buffer] -> Buffer
intercalate s = concat . (L.intersperse s)
index :: Buffer -> Int64 -> Word8
index _ i | i < 0 = moduleError "index" ("negative index: " ++ show i)
index cs0 i = index' cs0 i
where index' Empty n = moduleError "index" ("index too large: " ++ show n)
index' (Chunk c cs) n
| n >= fromIntegral (S.length c) =
index' cs (n - fromIntegral (S.length c))
| otherwise = S.unsafeIndex c (fromIntegral n)
| /O(n)/ The ' elemIndex ' function returns the index of the first
elemIndex :: Word8 -> Buffer -> Maybe Int64
elemIndex w cs0 = elemIndex' 0 cs0
where elemIndex' _ Empty = Nothing
elemIndex' n (Chunk c cs) =
case S.elemIndex w c of
Nothing -> elemIndex' (n + fromIntegral (S.length c)) cs
Just i -> Just (n + fromIntegral i)
elemIndexEnd : : Word8 - > Buffer - > Maybe Int
elemIndexEnd ch ( PS x s l ) = inlinePerformIO $ withForeignPtr x $ \p - >
go ( p ` plusPtr ` s ) ( l-1 )
where
STRICT2(go )
go p i | i < 0 = return Nothing
| otherwise = do ch ' < - peekByteOff p i
if ch = = ch '
then return $ Just i
else go p ( i-1 )
elemIndexEnd :: Word8 -> Buffer -> Maybe Int
elemIndexEnd ch (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p ->
go (p `plusPtr` s) (l-1)
where
STRICT2(go)
go p i | i < 0 = return Nothing
| otherwise = do ch' <- peekByteOff p i
if ch == ch'
then return $ Just i
else go p (i-1)
-}
elemIndices :: Word8 -> Buffer -> [Int64]
elemIndices w cs0 = elemIndices' 0 cs0
where elemIndices' _ Empty = []
elemIndices' n (Chunk c cs) = L.map ((+n).fromIntegral) (S.elemIndices w c)
++ elemIndices' (n + fromIntegral (S.length c)) cs
| count returns the number of times its argument appears in the
count :: Word8 -> Buffer -> Int64
count w cs = foldlChunks (\n c -> n + fromIntegral (S.count w c)) 0 cs
returns the index of the first element in the
findIndex :: (Word8 -> Bool) -> Buffer -> Maybe Int64
findIndex k cs0 = findIndex' 0 cs0
where findIndex' _ Empty = Nothing
findIndex' n (Chunk c cs) =
case S.findIndex k c of
Nothing -> findIndex' (n + fromIntegral (S.length c)) cs
Just i -> Just (n + fromIntegral i)
# INLINE findIndex #
and returns the first element in matching the predicate , or ' Nothing '
find :: (Word8 -> Bool) -> Buffer -> Maybe Word8
find f cs0 = find' cs0
where find' Empty = Nothing
find' (Chunk c cs) = case S.find f c of
Nothing -> find' cs
Just w -> Just w
findIndices :: (Word8 -> Bool) -> Buffer -> [Int64]
findIndices k cs0 = findIndices' 0 cs0
where findIndices' _ Empty = []
findIndices' n (Chunk c cs) = L.map ((+n).fromIntegral) (S.findIndices k c)
++ findIndices' (n + fromIntegral (S.length c)) cs
Searching Buffers
elem :: Word8 -> Buffer -> Bool
elem w cs = case elemIndex w cs of Nothing -> False ; _ -> True
notElem :: Word8 -> Buffer -> Bool
notElem w cs = not (elem w cs)
filter :: (Word8 -> Bool) -> Buffer -> Buffer
filter p s = go s
where
go Empty = Empty
go (Chunk x xs) = chunk (S.filter p x) (go xs)
# INLINE filter #
filterByte : : Word8 - > Buffer - > Buffer
filterByte w ps = replicate ( count w ps ) w
{ - # INLINE filterByte #
filterByte :: Word8 -> Buffer -> Buffer
filterByte w ps = replicate (count w ps) w
# RULES
" Buffer specialise filter (= = x ) " forall filter ( (= ) = filterByte x
" Buffer specialise filter (= = x ) " forall filter (= = x ) = filterByte x
#
"Buffer specialise filter (== x)" forall x.
filter ((==) x) = filterByte x
"Buffer specialise filter (== x)" forall x.
filter (== x) = filterByte x
#-}
-}
: : Word8 - > Buffer - > Buffer
filterNotByte w ( LPS xs ) = LPS ( filterMap ( P.filterNotByte w ) xs )
filterNotByte :: Word8 -> Buffer -> Buffer
filterNotByte w (LPS xs) = LPS (filterMap (P.filterNotByte w) xs)
-}
| /O(n)/ The ' partition ' function takes a predicate a and returns
partition :: (Word8 -> Bool) -> Buffer -> (Buffer, Buffer)
partition f p = (filter f p, filter (not . f) p)
| /O(n)/ The ' isPrefixOf ' function takes two Buffers and returns ' True '
iff the first is a prefix of the second .
isPrefixOf :: Buffer -> Buffer -> Bool
isPrefixOf Empty _ = True
isPrefixOf _ Empty = False
isPrefixOf (Chunk x xs) (Chunk y ys)
| S.length x == S.length y = x == y && isPrefixOf xs ys
| S.length x < S.length y = x == yh && isPrefixOf xs (Chunk yt ys)
| otherwise = xh == y && isPrefixOf (Chunk xt xs) ys
where (xh,xt) = S.splitAt (S.length y) x
(yh,yt) = S.splitAt (S.length x) y
| /O(n)/ The ' isSuffixOf ' function takes two Buffers and returns ' True '
iff the first is a suffix of the second .
isSuffixOf :: Buffer -> Buffer -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
| /O(n)/ ' zip ' takes two Buffers and returns a list of
corresponding pairs of bytes . If one input is short ,
zip :: Buffer -> Buffer -> [(Word8,Word8)]
zip = zipWith (,)
the first argument , instead of a tupling function . For example ,
@'zipWith ' ( + ) @ is applied to two Buffers to produce the list of
zipWith :: (Word8 -> Word8 -> a) -> Buffer -> Buffer -> [a]
zipWith _ Empty _ = []
zipWith _ _ Empty = []
zipWith f (Chunk a as) (Chunk b bs) = go a as b bs
where
go x xs y ys = f (S.unsafeHead x) (S.unsafeHead y)
: to (S.unsafeTail x) xs (S.unsafeTail y) ys
to x Empty _ _ | S.null x = []
to _ _ y Empty | S.null y = []
to x xs y ys | not (S.null x)
&& not (S.null y) = go x xs y ys
to x xs _ (Chunk y' ys) | not (S.null x) = go x xs y' ys
to _ (Chunk x' xs) y ys | not (S.null y) = go x' xs y ys
to _ (Chunk x' xs) _ (Chunk y' ys) = go x' xs y' ys
Buffers . Note that this performs two ' pack ' operations .
unzip :: [(Word8,Word8)] -> (Buffer,Buffer)
unzip ls = (pack (L.map fst ls), pack (L.map snd ls))
# INLINE unzip #
| /O(n)/ Return all initial segments of the given ' Buffer ' , shortest first .
inits :: Buffer -> [Buffer]
inits = (Empty :) . inits'
where inits' Empty = []
inits' (Chunk c cs) = L.map (\c' -> Chunk c' Empty) (L.tail (S.inits c))
++ L.map (Chunk c) (inits' cs)
| /O(n)/ Return all final segments of the given ' Buffer ' , longest first .
tails :: Buffer -> [Buffer]
tails Empty = Empty : []
tails cs@(Chunk c cs')
| S.length c == 1 = cs : tails cs'
| otherwise = cs : tails (Chunk (S.unsafeTail c) cs')
copy :: Buffer -> Buffer
copy cs = foldrChunks (Chunk . S.copy) Empty cs
TODO , we could coalese small blocks here
TODO defrag func that concatenates block together that are below a threshold
Lazy Buffer IO
The handle is closed on EOF .
hGetContentsN :: Int -> Handle -> IO Buffer
TODO close on exceptions
where
lazyRead = unsafeInterleaveIO loop
loop = do
c <- S.hGetNonBlocking h k
TODO : I think this should distinguish EOF from no data available
0 or EAGAIN
if S.null c
then do eof <- hIsEOF h
if eof then hClose h >> return Empty
else hWaitForInput h (-1)
>> loop
else do cs <- lazyRead
return (Chunk c cs)
hGetN :: Int -> Handle -> Int -> IO Buffer
hGetN k h n | n > 0 = readChunks n
where
STRICT1(readChunks)
readChunks i = do
c <- S.hGet h (min k i)
case S.length c of
0 -> return Empty
m -> do cs <- readChunks (i - m)
return (Chunk c cs)
hGetN _ _ 0 = return Empty
hGetN _ h n = illegalBufferSize h "hGet" n
hGetNonBlockingN :: Int -> Handle -> Int -> IO Buffer
#if defined(__GLASGOW_HASKELL__)
hGetNonBlockingN k h n | n > 0= readChunks n
where
STRICT1(readChunks)
readChunks i = do
c <- S.hGetNonBlocking h (min k i)
case S.length c of
0 -> return Empty
m -> do cs <- readChunks (i - m)
return (Chunk c cs)
hGetNonBlockingN _ _ 0 = return Empty
hGetNonBlockingN _ h n = illegalBufferSize h "hGetNonBlocking" n
#else
hGetNonBlockingN = hGetN
#endif
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn sz =
ioError (mkIOError illegalOperationErrorType msg (Just handle) Nothing)
TODO : System . IO uses InvalidArgument here , but it 's not exported :-(
where
msg = fn ++ ": illegal Buffer size " ++ showsPrec 9 sz []
Once EOF is encountered , the Handle is closed .
hGetContents :: Handle -> IO Buffer
hGetContents = hGetContentsN defaultChunkSize
hGet :: Handle -> Int -> IO Buffer
hGet = hGetN defaultChunkSize
| hGetNonBlocking is similar to ' hGet ' , except that it will never block
#if defined(__GLASGOW_HASKELL__)
hGetNonBlocking :: Handle -> Int -> IO Buffer
hGetNonBlocking = hGetNonBlockingN defaultChunkSize
#else
hGetNonBlocking = hGet
#endif
The Handle will be held open until EOF is encountered .
readFile :: FilePath -> IO Buffer
readFile f = openBinaryFile f ReadMode >>= hGetContents
writeFile :: FilePath -> Buffer -> IO ()
writeFile f txt = bracket (openBinaryFile f WriteMode) hClose
(\hdl -> hPut hdl txt)
appendFile :: FilePath -> Buffer -> IO ()
appendFile f txt = bracket (openBinaryFile f AppendMode) hClose
(\hdl -> hPut hdl txt)
| getContents . Equivalent to . Will read /lazily/
getContents :: IO Buffer
getContents = hGetContents stdin
hPut :: Handle -> Buffer -> IO ()
hPut h cs = foldrChunks (\c rest -> S.hPut h c >> rest) (return ()) cs
hPutStr :: Handle -> Buffer -> IO ()
hPutStr = hPut
putStr :: Buffer -> IO ()
putStr = hPut stdout
putStrLn :: Buffer -> IO ()
putStrLn ps = hPut stdout ps >> hPut stdout (singleton 0x0a)
interact :: (Buffer -> Buffer) -> IO ()
interact transformer = putStr . transformer =<< getContents
errorEmptyList :: String -> a
errorEmptyList fun = moduleError fun "empty Buffer"
moduleError :: String -> String -> a
moduleError fun msg = error ("Data.Buffer.Lazy." ++ fun ++ ':':' ':msg)
revNonEmptyChunks :: [P.Buffer] -> Buffer
revNonEmptyChunks cs = L.foldl' (flip Chunk) Empty cs
revChunks :: [P.Buffer] -> Buffer
revChunks cs = L.foldl' (flip chunk) Empty cs
findIndexOrEnd :: (Word8 -> Bool) -> P.Buffer -> Int
findIndexOrEnd k (S.PS x s l _) = S.inlinePerformIO $ withForeignPtr x $ \f -> go (f `plusPtr` s) 0
where
STRICT2(go)
go ptr n | n >= l = return l
| otherwise = do w <- peek ptr
if k w
then return n
else go (ptr `plusPtr` 1) (n+1)
# INLINE findIndexOrEnd #
|
db4b6bf01a5e092ecdc96831c0473808f91b5d6c29a00014b4cdea5785c7b35a | martijnbastiaan/doctest-parallel | Spec.hs | module Main where
import Test.Hspec
import qualified ExtractSpec
import qualified GhciWrapperSpec
import qualified InterpreterSpec
import qualified LocationSpec
import qualified MainSpec
import qualified OptionsSpec
import qualified ParseSpec
import qualified PropertySpec
import qualified RunnerSpec
import qualified RunSpec
import qualified UtilSpec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "ExtractSpec" ExtractSpec.spec
describe "GhciWrapperSpec" GhciWrapperSpec.spec
describe "InterpreterSpec" InterpreterSpec.spec
describe "LocationSpec" LocationSpec.spec
describe "MainSpec" MainSpec.spec
describe "OptionsSpec" OptionsSpec.spec
describe "ParseSpec" ParseSpec.spec
describe "PropertySpec" PropertySpec.spec
describe "RunnerSpec" RunnerSpec.spec
describe "RunSpec" RunSpec.spec
describe "UtilSpec" UtilSpec.spec
| null | https://raw.githubusercontent.com/martijnbastiaan/doctest-parallel/80d7024bbf2f2c751bb497b94276749c7dc6bf99/test/Spec.hs | haskell | module Main where
import Test.Hspec
import qualified ExtractSpec
import qualified GhciWrapperSpec
import qualified InterpreterSpec
import qualified LocationSpec
import qualified MainSpec
import qualified OptionsSpec
import qualified ParseSpec
import qualified PropertySpec
import qualified RunnerSpec
import qualified RunSpec
import qualified UtilSpec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "ExtractSpec" ExtractSpec.spec
describe "GhciWrapperSpec" GhciWrapperSpec.spec
describe "InterpreterSpec" InterpreterSpec.spec
describe "LocationSpec" LocationSpec.spec
describe "MainSpec" MainSpec.spec
describe "OptionsSpec" OptionsSpec.spec
describe "ParseSpec" ParseSpec.spec
describe "PropertySpec" PropertySpec.spec
describe "RunnerSpec" RunnerSpec.spec
describe "RunSpec" RunSpec.spec
describe "UtilSpec" UtilSpec.spec
| |
bcaa964325ed2f19c5e6bbfaf5f808722e021d563288c458a6f365581018efc6 | mattmundell/nightshade | nlx.lisp | ;;; The definitions of VOPs used for non-local exit (throw, lexical exit,
;;; etc.)
(in-package "ALPHA")
MAKE - NLX - SP - TN -- Interface
;;;
Make an environment - live stack TN for saving the SP for NLX entry .
;;;
(def-vm-support-routine make-nlx-sp-tn (env)
(environment-live-tn
(make-representation-tn *fixnum-primitive-type* immediate-arg-scn)
env))
Make - NLX - Entry - Argument - Start - Location -- Interface
;;;
Make a TN for the argument count passing location for a non - local
;;; entry.
;;;
(def-vm-support-routine make-nlx-entry-argument-start-location ()
(make-wired-tn *fixnum-primitive-type* immediate-arg-scn ocfp-offset))
;;; Save and restore dynamic environment.
;;;
;;; These VOPs are used in the reentered function to restore the
;;; appropriate dynamic environment. Currently we only save the
;;; Current-Catch and binding stack pointer. We don't need to save/restore
;;; the current unwind-protect, since unwind-protects are implicitly
;;; processed during unwinding. If there were any additional stacks, then
;;; this would be the place to restore the top pointers.
Make - Dynamic - State - TNs -- Interface
;;;
Return a list of TNs that can be used to snapshot the dynamic state for
use with the Save / Restore - Dynamic - Environment VOPs .
;;;
(def-vm-support-routine make-dynamic-state-tns ()
(list (make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)))
(define-vop (save-dynamic-state)
(:results (catch :scs (descriptor-reg))
(nfp :scs (descriptor-reg))
(nsp :scs (descriptor-reg))
(eval :scs (descriptor-reg)))
(:vop-var vop)
(:generator 13
(load-symbol-value catch lisp::*current-catch-block*)
(let ((cur-nfp (current-nfp-tn vop)))
(when cur-nfp
(inst mskll cur-nfp 4 nfp)))
(inst mskll nsp-tn 4 nsp)
(load-symbol-value eval lisp::*eval-stack-top*)))
(define-vop (restore-dynamic-state)
(:args (catch :scs (descriptor-reg))
(nfp :scs (descriptor-reg))
(nsp :scs (descriptor-reg))
(eval :scs (descriptor-reg)))
(:vop-var vop)
(:temporary (:sc any-reg) temp)
(:generator 10
(store-symbol-value catch lisp::*current-catch-block*)
(store-symbol-value eval lisp::*eval-stack-top*)
(inst mskll nsp-tn 0 temp)
(let ((cur-nfp (current-nfp-tn vop)))
(when cur-nfp
(inst bis nfp temp cur-nfp)))
(inst bis nsp temp nsp-tn)))
(define-vop (current-stack-pointer)
(:results (res :scs (any-reg descriptor-reg)))
(:generator 1
(move csp-tn res)))
(define-vop (current-binding-pointer)
(:results (res :scs (any-reg descriptor-reg)))
(:generator 1
(move bsp-tn res)))
;;;; Unwind block hackery.
Compute the address of the catch block from its TN , then store into the
block the current Fp , Env , Unwind - Protect , and the entry PC .
;;;
(define-vop (make-unwind-block)
(:args (tn))
(:info entry-label)
(:results (block :scs (any-reg)))
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:generator 22
(inst lda block (* (tn-offset tn) vm:word-bytes) cfp-tn)
(load-symbol-value temp lisp::*current-unwind-protect-block*)
(storew temp block vm:unwind-block-current-uwp-slot)
(storew cfp-tn block vm:unwind-block-current-cont-slot)
(storew code-tn block vm:unwind-block-current-code-slot)
(inst compute-lra-from-code temp code-tn entry-label ndescr)
(storew temp block vm:catch-block-entry-pc-slot)))
;;; Like Make-Unwind-Block, except that we also store in the specified tag, and
;;; link the block into the Current-Catch list.
;;;
(define-vop (make-catch-block)
(:args (tn)
(tag :scs (descriptor-reg)))
(:info entry-label)
(:results (block :scs (any-reg)))
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg) :target block :to (:result 0)) result)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:generator 44
(inst lda result (* (tn-offset tn) vm:word-bytes) cfp-tn)
(load-symbol-value temp lisp::*current-unwind-protect-block*)
(storew temp result vm:catch-block-current-uwp-slot)
(storew cfp-tn result vm:catch-block-current-cont-slot)
(storew code-tn result vm:catch-block-current-code-slot)
(inst compute-lra-from-code temp code-tn entry-label ndescr)
(storew temp result vm:catch-block-entry-pc-slot)
(storew tag result vm:catch-block-tag-slot)
(load-symbol-value temp lisp::*current-catch-block*)
(storew temp result vm:catch-block-previous-catch-slot)
(store-symbol-value result lisp::*current-catch-block*)
(move result block)))
Just set the current unwind - protect to TN 's address . This instantiates an
;;; unwind block as an unwind-protect.
;;;
(define-vop (set-unwind-protect)
(:args (tn))
(:temporary (:scs (descriptor-reg)) new-uwp)
(:generator 7
(inst lda new-uwp (* (tn-offset tn) vm:word-bytes) cfp-tn)
(store-symbol-value new-uwp lisp::*current-unwind-protect-block*)))
(define-vop (unlink-catch-block)
(:temporary (:scs (any-reg)) block)
(:policy :fast-safe)
(:translate %catch-breakup)
(:generator 17
(load-symbol-value block lisp::*current-catch-block*)
(loadw block block vm:catch-block-previous-catch-slot)
(store-symbol-value block lisp::*current-catch-block*)))
(define-vop (unlink-unwind-protect)
(:temporary (:scs (any-reg)) block)
(:policy :fast-safe)
(:translate %unwind-protect-breakup)
(:generator 17
(load-symbol-value block lisp::*current-unwind-protect-block*)
(loadw block block vm:unwind-block-current-uwp-slot)
(store-symbol-value block lisp::*current-unwind-protect-block*)))
NLX entry VOPs .
(define-vop (nlx-entry)
(:args (sp) ; Note: we can't list an sc-restriction, 'cause any load vops
would be inserted before the LRA .
(start)
(count))
(:results (values :more t))
(:temporary (:scs (descriptor-reg)) move-temp)
(:temporary (:sc non-descriptor-reg) temp)
(:info label nvals)
(:save-p :force-to-stack)
(:vop-var vop)
(:generator 30
(emit-return-pc label)
(note-this-location vop :non-local-entry)
(cond ((zerop nvals))
((= nvals 1)
(let ((no-values (gen-label)))
(move null-tn (tn-ref-tn values))
(inst beq count no-values)
(loadw (tn-ref-tn values) start)
(emit-label no-values)))
(t
(collect ((defaults))
(do ((i 0 (1+ i))
(tn-ref values (tn-ref-across tn-ref)))
((null tn-ref))
(let ((default-lab (gen-label))
(tn (tn-ref-tn tn-ref)))
(defaults (cons default-lab tn))
(inst move count temp)
(inst lda count (fixnum -1) count)
(inst beq temp default-lab)
(sc-case tn
((descriptor-reg any-reg)
(loadw tn start i))
(control-stack
(loadw move-temp start i)
(store-stack-tn tn move-temp)))))
(let ((defaulting-done (gen-label)))
(emit-label defaulting-done)
(assemble (*elsewhere*)
(dolist (def (defaults))
(emit-label (car def))
(let ((tn (cdr def)))
(sc-case tn
((descriptor-reg any-reg)
(move null-tn tn))
(control-stack
(store-stack-tn tn null-tn)))))
(inst br zero-tn defaulting-done))))))
(load-stack-tn csp-tn sp)))
(define-vop (nlx-entry-multiple)
(:args (top :target dst) (start :target src) (count :target num))
Again , no SC restrictions for the args , 'cause the loading would
;; happen before the entry label.
(:info label)
(:temporary (:scs (any-reg) :from (:argument 0)) dst)
(:temporary (:scs (any-reg) :from (:argument 1)) src)
(:temporary (:scs (any-reg) :from (:argument 2)) num)
(:temporary (:scs (descriptor-reg)) temp)
(:results (new-start) (new-count))
(:save-p :force-to-stack)
(:vop-var vop)
(:generator 30
(emit-return-pc label)
(note-this-location vop :non-local-entry)
(let ((loop (gen-label))
(done (gen-label)))
;; Copy args.
(load-stack-tn dst top)
(move start src)
(move count num)
;; Establish results.
(sc-case new-start
(any-reg (move dst new-start))
(control-stack (store-stack-tn new-start dst)))
(sc-case new-count
(any-reg (inst move num new-count))
(control-stack (store-stack-tn new-count num)))
(inst beq num done)
;; Copy stuff on stack.
(emit-label loop)
(loadw temp src)
(inst lda src vm:word-bytes src)
(storew temp dst)
(inst lda num (fixnum -1) num)
(inst lda dst vm:word-bytes dst)
(inst bne num loop)
(emit-label done)
(inst move dst csp-tn))))
This VOP is just to force the TNs used in the cleanup onto the stack .
;;;
(define-vop (uwp-entry)
(:info label)
(:save-p :force-to-stack)
(:results (block) (start) (count))
(:ignore block start count)
(:vop-var vop)
(:generator 0
(emit-return-pc label)
(note-this-location vop :non-local-entry)))
| null | https://raw.githubusercontent.com/mattmundell/nightshade/d8abd7bd3424b95b70bed599e0cfe033e15299e0/src/compiler/alpha/nlx.lisp | lisp | The definitions of VOPs used for non-local exit (throw, lexical exit,
etc.)
entry.
Save and restore dynamic environment.
These VOPs are used in the reentered function to restore the
appropriate dynamic environment. Currently we only save the
Current-Catch and binding stack pointer. We don't need to save/restore
the current unwind-protect, since unwind-protects are implicitly
processed during unwinding. If there were any additional stacks, then
this would be the place to restore the top pointers.
Unwind block hackery.
Like Make-Unwind-Block, except that we also store in the specified tag, and
link the block into the Current-Catch list.
unwind block as an unwind-protect.
Note: we can't list an sc-restriction, 'cause any load vops
happen before the entry label.
Copy args.
Establish results.
Copy stuff on stack.
|
(in-package "ALPHA")
MAKE - NLX - SP - TN -- Interface
Make an environment - live stack TN for saving the SP for NLX entry .
(def-vm-support-routine make-nlx-sp-tn (env)
(environment-live-tn
(make-representation-tn *fixnum-primitive-type* immediate-arg-scn)
env))
Make - NLX - Entry - Argument - Start - Location -- Interface
Make a TN for the argument count passing location for a non - local
(def-vm-support-routine make-nlx-entry-argument-start-location ()
(make-wired-tn *fixnum-primitive-type* immediate-arg-scn ocfp-offset))
Make - Dynamic - State - TNs -- Interface
Return a list of TNs that can be used to snapshot the dynamic state for
use with the Save / Restore - Dynamic - Environment VOPs .
(def-vm-support-routine make-dynamic-state-tns ()
(list (make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)
(make-normal-tn *any-primitive-type*)))
(define-vop (save-dynamic-state)
(:results (catch :scs (descriptor-reg))
(nfp :scs (descriptor-reg))
(nsp :scs (descriptor-reg))
(eval :scs (descriptor-reg)))
(:vop-var vop)
(:generator 13
(load-symbol-value catch lisp::*current-catch-block*)
(let ((cur-nfp (current-nfp-tn vop)))
(when cur-nfp
(inst mskll cur-nfp 4 nfp)))
(inst mskll nsp-tn 4 nsp)
(load-symbol-value eval lisp::*eval-stack-top*)))
(define-vop (restore-dynamic-state)
(:args (catch :scs (descriptor-reg))
(nfp :scs (descriptor-reg))
(nsp :scs (descriptor-reg))
(eval :scs (descriptor-reg)))
(:vop-var vop)
(:temporary (:sc any-reg) temp)
(:generator 10
(store-symbol-value catch lisp::*current-catch-block*)
(store-symbol-value eval lisp::*eval-stack-top*)
(inst mskll nsp-tn 0 temp)
(let ((cur-nfp (current-nfp-tn vop)))
(when cur-nfp
(inst bis nfp temp cur-nfp)))
(inst bis nsp temp nsp-tn)))
(define-vop (current-stack-pointer)
(:results (res :scs (any-reg descriptor-reg)))
(:generator 1
(move csp-tn res)))
(define-vop (current-binding-pointer)
(:results (res :scs (any-reg descriptor-reg)))
(:generator 1
(move bsp-tn res)))
Compute the address of the catch block from its TN , then store into the
block the current Fp , Env , Unwind - Protect , and the entry PC .
(define-vop (make-unwind-block)
(:args (tn))
(:info entry-label)
(:results (block :scs (any-reg)))
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:generator 22
(inst lda block (* (tn-offset tn) vm:word-bytes) cfp-tn)
(load-symbol-value temp lisp::*current-unwind-protect-block*)
(storew temp block vm:unwind-block-current-uwp-slot)
(storew cfp-tn block vm:unwind-block-current-cont-slot)
(storew code-tn block vm:unwind-block-current-code-slot)
(inst compute-lra-from-code temp code-tn entry-label ndescr)
(storew temp block vm:catch-block-entry-pc-slot)))
(define-vop (make-catch-block)
(:args (tn)
(tag :scs (descriptor-reg)))
(:info entry-label)
(:results (block :scs (any-reg)))
(:temporary (:scs (descriptor-reg)) temp)
(:temporary (:scs (descriptor-reg) :target block :to (:result 0)) result)
(:temporary (:scs (non-descriptor-reg)) ndescr)
(:generator 44
(inst lda result (* (tn-offset tn) vm:word-bytes) cfp-tn)
(load-symbol-value temp lisp::*current-unwind-protect-block*)
(storew temp result vm:catch-block-current-uwp-slot)
(storew cfp-tn result vm:catch-block-current-cont-slot)
(storew code-tn result vm:catch-block-current-code-slot)
(inst compute-lra-from-code temp code-tn entry-label ndescr)
(storew temp result vm:catch-block-entry-pc-slot)
(storew tag result vm:catch-block-tag-slot)
(load-symbol-value temp lisp::*current-catch-block*)
(storew temp result vm:catch-block-previous-catch-slot)
(store-symbol-value result lisp::*current-catch-block*)
(move result block)))
Just set the current unwind - protect to TN 's address . This instantiates an
(define-vop (set-unwind-protect)
(:args (tn))
(:temporary (:scs (descriptor-reg)) new-uwp)
(:generator 7
(inst lda new-uwp (* (tn-offset tn) vm:word-bytes) cfp-tn)
(store-symbol-value new-uwp lisp::*current-unwind-protect-block*)))
(define-vop (unlink-catch-block)
(:temporary (:scs (any-reg)) block)
(:policy :fast-safe)
(:translate %catch-breakup)
(:generator 17
(load-symbol-value block lisp::*current-catch-block*)
(loadw block block vm:catch-block-previous-catch-slot)
(store-symbol-value block lisp::*current-catch-block*)))
(define-vop (unlink-unwind-protect)
(:temporary (:scs (any-reg)) block)
(:policy :fast-safe)
(:translate %unwind-protect-breakup)
(:generator 17
(load-symbol-value block lisp::*current-unwind-protect-block*)
(loadw block block vm:unwind-block-current-uwp-slot)
(store-symbol-value block lisp::*current-unwind-protect-block*)))
NLX entry VOPs .
(define-vop (nlx-entry)
would be inserted before the LRA .
(start)
(count))
(:results (values :more t))
(:temporary (:scs (descriptor-reg)) move-temp)
(:temporary (:sc non-descriptor-reg) temp)
(:info label nvals)
(:save-p :force-to-stack)
(:vop-var vop)
(:generator 30
(emit-return-pc label)
(note-this-location vop :non-local-entry)
(cond ((zerop nvals))
((= nvals 1)
(let ((no-values (gen-label)))
(move null-tn (tn-ref-tn values))
(inst beq count no-values)
(loadw (tn-ref-tn values) start)
(emit-label no-values)))
(t
(collect ((defaults))
(do ((i 0 (1+ i))
(tn-ref values (tn-ref-across tn-ref)))
((null tn-ref))
(let ((default-lab (gen-label))
(tn (tn-ref-tn tn-ref)))
(defaults (cons default-lab tn))
(inst move count temp)
(inst lda count (fixnum -1) count)
(inst beq temp default-lab)
(sc-case tn
((descriptor-reg any-reg)
(loadw tn start i))
(control-stack
(loadw move-temp start i)
(store-stack-tn tn move-temp)))))
(let ((defaulting-done (gen-label)))
(emit-label defaulting-done)
(assemble (*elsewhere*)
(dolist (def (defaults))
(emit-label (car def))
(let ((tn (cdr def)))
(sc-case tn
((descriptor-reg any-reg)
(move null-tn tn))
(control-stack
(store-stack-tn tn null-tn)))))
(inst br zero-tn defaulting-done))))))
(load-stack-tn csp-tn sp)))
(define-vop (nlx-entry-multiple)
(:args (top :target dst) (start :target src) (count :target num))
Again , no SC restrictions for the args , 'cause the loading would
(:info label)
(:temporary (:scs (any-reg) :from (:argument 0)) dst)
(:temporary (:scs (any-reg) :from (:argument 1)) src)
(:temporary (:scs (any-reg) :from (:argument 2)) num)
(:temporary (:scs (descriptor-reg)) temp)
(:results (new-start) (new-count))
(:save-p :force-to-stack)
(:vop-var vop)
(:generator 30
(emit-return-pc label)
(note-this-location vop :non-local-entry)
(let ((loop (gen-label))
(done (gen-label)))
(load-stack-tn dst top)
(move start src)
(move count num)
(sc-case new-start
(any-reg (move dst new-start))
(control-stack (store-stack-tn new-start dst)))
(sc-case new-count
(any-reg (inst move num new-count))
(control-stack (store-stack-tn new-count num)))
(inst beq num done)
(emit-label loop)
(loadw temp src)
(inst lda src vm:word-bytes src)
(storew temp dst)
(inst lda num (fixnum -1) num)
(inst lda dst vm:word-bytes dst)
(inst bne num loop)
(emit-label done)
(inst move dst csp-tn))))
This VOP is just to force the TNs used in the cleanup onto the stack .
(define-vop (uwp-entry)
(:info label)
(:save-p :force-to-stack)
(:results (block) (start) (count))
(:ignore block start count)
(:vop-var vop)
(:generator 0
(emit-return-pc label)
(note-this-location vop :non-local-entry)))
|
9bce828a5942a313f6b4919e41aaa571f8f19936986efb4f35dedb67f2f00113 | TheInnerLight/dormouse | RequestBuilding.hs | # LANGUAGE QuasiQuotes #
# LANGUAGE DataKinds #
module RequestBuilding where
import Control.Exception.Safe (MonadThrow)
import Dormouse.Client
import Dormouse.Url.QQ
import Dormouse.Url.Builder
githubHttpsUrl :: Url "https"
githubHttpsUrl = [https||]
githubHttpUrl :: Url "http"
githubHttpUrl = [http||]
githubAnyUrl :: AnyUrl
githubAnyUrl = [url||]
dormouseHttpsUrl :: Url "https"
dormouseHttpsUrl = githubHttpsUrl </> "TheInnerLight" </> "dormouse"
searchUrl :: Url "https"
searchUrl = [https||] </> "search" ? "q" =: ("haskell" :: String)
postmanEchoGetUrl :: Url "http"
postmanEchoGetUrl = [http|-echo.com/get?foo1=bar1&foo2=bar2/|]
postmanEchoGetReq :: HttpRequest (Url "http") "GET" Empty EmptyPayload acceptTag
postmanEchoGetReq = get postmanEchoGetUrl
postmanEchoGetReq' :: HttpRequest (Url "http") "GET" Empty EmptyPayload JsonPayload
postmanEchoGetReq' = accept json $ get postmanEchoGetUrl
postmanEchoPostUrl :: Url "https"
postmanEchoPostUrl = [https|-echo.com/post|]
postmanEchoPostReq :: HttpRequest (Url "https") "POST" Empty EmptyPayload JsonPayload
postmanEchoPostReq = accept json $ post postmanEchoPostUrl
sendPostmanEchoGetReq :: (MonadDormouseClient m, MonadThrow m) => m ()
sendPostmanEchoGetReq = do
(_ :: HttpResponse ()) <- expect postmanEchoGetReq'
return()
| null | https://raw.githubusercontent.com/TheInnerLight/dormouse/c790c32d2ec234a2c2c4e54c604df15f9a2e25a7/examples/src/RequestBuilding.hs | haskell | # LANGUAGE QuasiQuotes #
# LANGUAGE DataKinds #
module RequestBuilding where
import Control.Exception.Safe (MonadThrow)
import Dormouse.Client
import Dormouse.Url.QQ
import Dormouse.Url.Builder
githubHttpsUrl :: Url "https"
githubHttpsUrl = [https||]
githubHttpUrl :: Url "http"
githubHttpUrl = [http||]
githubAnyUrl :: AnyUrl
githubAnyUrl = [url||]
dormouseHttpsUrl :: Url "https"
dormouseHttpsUrl = githubHttpsUrl </> "TheInnerLight" </> "dormouse"
searchUrl :: Url "https"
searchUrl = [https||] </> "search" ? "q" =: ("haskell" :: String)
postmanEchoGetUrl :: Url "http"
postmanEchoGetUrl = [http|-echo.com/get?foo1=bar1&foo2=bar2/|]
postmanEchoGetReq :: HttpRequest (Url "http") "GET" Empty EmptyPayload acceptTag
postmanEchoGetReq = get postmanEchoGetUrl
postmanEchoGetReq' :: HttpRequest (Url "http") "GET" Empty EmptyPayload JsonPayload
postmanEchoGetReq' = accept json $ get postmanEchoGetUrl
postmanEchoPostUrl :: Url "https"
postmanEchoPostUrl = [https|-echo.com/post|]
postmanEchoPostReq :: HttpRequest (Url "https") "POST" Empty EmptyPayload JsonPayload
postmanEchoPostReq = accept json $ post postmanEchoPostUrl
sendPostmanEchoGetReq :: (MonadDormouseClient m, MonadThrow m) => m ()
sendPostmanEchoGetReq = do
(_ :: HttpResponse ()) <- expect postmanEchoGetReq'
return()
| |
9a9826bd24da86ec33fc05e3205bed5f046890f5f60036b7e04626511767c1d8 | minad/henk | HenkParser.hs | module Henk.HenkParser where
import Text.Megaparsec
import Text.Megaparsec.Text.Lazy
import qualified Text.Megaparsec.Lexer as L
import Henk.HenkAS
import Henk.HenkPP()
integer :: Parser Integer
integer = lexeme L.integer
symbol :: String -> Parser String
symbol = L.symbol whiteSpace
semi :: Parser String
semi = symbol ";"
comma :: Parser String
comma = symbol ","
whiteSpace :: Parser ()
whiteSpace = L.space (() <$ spaceChar) (L.skipLineComment "--") (L.skipBlockComment "{-" "-}")
braces, parens, lexeme :: Parser a -> Parser a
lexeme = L.lexeme whiteSpace
braces = between (symbol "{") (symbol "}")
parens = between (symbol "(") (symbol ")")
identifier :: Parser String
identifier = do
n <- lexeme $ try $ (:) <$> (letterChar <|> char '_') <*> many (alphaNumChar <|> oneOf "'_?")
if elem n [ "case", "data", "letrec", "type", "import", "in", "let", "of", "at", "Int"]
then fail $ "reserved keyword " ++ n
else return n
reserved :: String -> Parser String
reserved s = lexeme $ try $ string s
----------------------------------------------------------------
The Program
----------------------------------------------------------------
program :: Parser Program
program = do{whiteSpace
;(tds,vds) <- manyAlternate tDecl vDecl
;eof
;return $ Program tds vds
}
manyAlternate :: Parser a -> Parser b -> Parser ([a],[b])
manyAlternate pa pb = do{as<-some pa; (as',bs') <- manyAlternate pa pb; return (as++as',bs')}
<|>
do{bs<-some pb; (as',bs') <- manyAlternate pa pb; return (as',bs++bs')}
<|>
return ([],[])
----------------------------------------------------------------
-- Type Declaration
----------------------------------------------------------------
tDecl :: Parser TDecl
tDecl = do{reserved "data"
;t <- bindVar
;symbol "="
;ts <- braces $ sepBy1 bindVar semi
;return $ TDecl t ts
}
<?> "type declaration"
----------------------------------------------------------------
-- Value Declaration
----------------------------------------------------------------
vDecl :: Parser VDecl
vDecl = letnonrec <?> "value Declaration"
letnonrec :: Parser VDecl
letnonrec = do{reserved "let"
;tv <- bindVar'
;symbol "="
;ex <- expr
;return $ VDecl tv ex
}
----------------------------------------------------------------
Expression
----------------------------------------------------------------
expr :: Parser Expr
expr = choice
pi ( \/ ) before lambda ( \ ) to improve parser efficiency .
,lamExpr
,caseExpr
,funExpr
]
<?> "expression"
atomExpr :: Parser Expr
atomExpr = choice
[try varExpr
,litExpr
,sort
,unknown
,parens expr
]
<?> "atomic expression"
--single expression
single_expr :: Parser Expr
single_expr =do { whiteSpace
; ex <- expr
; return ex
}
-----------------------------------------------------------------
-- Application
-----------------------------------------------------------------
appExpr :: Parser Expr
appExpr = do{atoms <- some atomExpr;
return $ foldl1 AppExpr atoms}
<?> "application"
----------------------------------------------------------------
-- (Capital) Lambda Expression
----------------------------------------------------------------
lamExpr :: Parser Expr
lamExpr = do{symbol "\\" <|> symbol "/\\"
;tvs <- sepBy1 bindVar comma
;symbol "."
;e <- expr
;return $ foldr LamExpr e tvs}
<?> "lambda expression"
----------------------------------------------------------------
-- Pi Expression / ForAll Expression
----------------------------------------------------------------
piExpr :: Parser Expr
piExpr = do{ (symbol "|~|") <|> try (symbol ("\\/"))
;tvs <- sepBy1 bindVar comma
;symbol "."
;e <- expr
;return $ foldr PiExpr e tvs}
<?> "pi expression"
----------------------------------------------------------------
-- Function Expression
----------------------------------------------------------------
funExpr :: Parser Expr
funExpr = chainr1 appExpr arrow
where
arrow = do{symbol "->"; return $ \ex1 ex2 -> PiExpr (TVar Anonymous ex1) ex2}
----------------------------------------------------------------
-- Case Expression
----------------------------------------------------------------
caseExpr :: Parser Expr
caseExpr = do{reserved "case"
;ex <- expr
;reserved "of"
;as <- braces $ sepBy1 alt semi
;case_type <- option Unknown (do{reserved ":"; case_type <- expr ; return case_type})
;return $ CaseExpr ex as case_type
}
<?> "Case Expression"
alt :: Parser Alt
alt = do{tc <- boundVar
;tcas <- many var
;tcas <- return $ map (\v -> TVar v Unknown) tcas
;symbol "=>"
;res <- expr
;return $ Alt tc tcas [] res
}
<?> "case alternative"
----------------------------------------------------------------
-- Variable Expression
----------------------------------------------------------------
varExpr = do{tv <- boundVar
;return $ VarExpr tv
}
<?> "variable expression"
----------------------------------------------------------------
-- Variable
----------------------------------------------------------------
var :: Parser Var
var = do{v <- identifier
;return $ Var v
}
anonymousvar :: Parser Var
anonymousvar =
do{symbol "_"
;v <- option "" identifier
;return $ Var ('_':v)
}
----------------------------------------------------------------
-- Binding Variable
----------------------------------------------------------------
bindVar :: Parser TVar
bindVar = do{v <- (anonymousvar <|> var)
;(do {e <- isOfType
; return $ TVar v e
}
<|>
(return $ TVar v (SortExpr Star))) -- convention for binding variables
}
<?> "variable"
bindVar' :: Parser TVar
bindVar' = do{v <- (anonymousvar <|> var)
;(do {e <- isOfType
; return $ TVar v e
}
<|>
(return $ TVar v Unknown)) -- convention for lets
}
<?> "variable"
isOfType :: Parser Expr
isOfType = do{symbol ":"
;aex <- expr
;return aex}
----------------------------------------------------------------
-- Bound Variable
----------------------------------------------------------------
boundVar :: Parser TVar
boundVar = do{v <- var
;(do {e <- isOfType
;return $ TVar v e
}
<|>
(return $ TVar v Unknown)) -- convention for bound variables
}
<?> "variable"
----------------------------------------------------------------
Literal Expression
----------------------------------------------------------------
litExpr :: Parser Expr
litExpr = do {l <- lit
;return $ LitExpr l
}
<?> "literal expression"
----------------------------------------------------------------
-- Literal
----------------------------------------------------------------
lit :: Parser Lit
lit = do{i <- integer
;return $ LitInt i
}
<|>
do{reserved "Int"
;return $ IntType
}
----------------------------------------------------------------
-- Sort
----------------------------------------------------------------
sort :: Parser Expr
sort = do{s <- try (sortNum)
<|> star
<|> box
;return $ SortExpr s}
sortNum :: Parser Sort
sortNum = do{ symbol "*"
; n <- integer
; return $ SortNum n
}
star :: Parser Sort
star = do{ symbol "*"
; return Star
}
box :: Parser Sort
box = do{ symbol "||"
; return Box
}
----------------------------------------------------------------
-- Unknown
----------------------------------------------------------------
unknown :: Parser Expr
unknown = do{ symbol "?"
; return Unknown
}
| null | https://raw.githubusercontent.com/minad/henk/73135b5fd2ce2b8f03fe0082bc7be621eb432e94/Henk/HenkParser.hs | haskell | --------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
Type Declaration
--------------------------------------------------------------
--------------------------------------------------------------
Value Declaration
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
single expression
---------------------------------------------------------------
Application
---------------------------------------------------------------
--------------------------------------------------------------
(Capital) Lambda Expression
--------------------------------------------------------------
--------------------------------------------------------------
Pi Expression / ForAll Expression
--------------------------------------------------------------
--------------------------------------------------------------
Function Expression
--------------------------------------------------------------
--------------------------------------------------------------
Case Expression
--------------------------------------------------------------
--------------------------------------------------------------
Variable Expression
--------------------------------------------------------------
--------------------------------------------------------------
Variable
--------------------------------------------------------------
--------------------------------------------------------------
Binding Variable
--------------------------------------------------------------
convention for binding variables
convention for lets
--------------------------------------------------------------
Bound Variable
--------------------------------------------------------------
convention for bound variables
--------------------------------------------------------------
--------------------------------------------------------------
--------------------------------------------------------------
Literal
--------------------------------------------------------------
--------------------------------------------------------------
Sort
--------------------------------------------------------------
--------------------------------------------------------------
Unknown
-------------------------------------------------------------- | module Henk.HenkParser where
import Text.Megaparsec
import Text.Megaparsec.Text.Lazy
import qualified Text.Megaparsec.Lexer as L
import Henk.HenkAS
import Henk.HenkPP()
integer :: Parser Integer
integer = lexeme L.integer
symbol :: String -> Parser String
symbol = L.symbol whiteSpace
semi :: Parser String
semi = symbol ";"
comma :: Parser String
comma = symbol ","
whiteSpace :: Parser ()
whiteSpace = L.space (() <$ spaceChar) (L.skipLineComment "--") (L.skipBlockComment "{-" "-}")
braces, parens, lexeme :: Parser a -> Parser a
lexeme = L.lexeme whiteSpace
braces = between (symbol "{") (symbol "}")
parens = between (symbol "(") (symbol ")")
identifier :: Parser String
identifier = do
n <- lexeme $ try $ (:) <$> (letterChar <|> char '_') <*> many (alphaNumChar <|> oneOf "'_?")
if elem n [ "case", "data", "letrec", "type", "import", "in", "let", "of", "at", "Int"]
then fail $ "reserved keyword " ++ n
else return n
reserved :: String -> Parser String
reserved s = lexeme $ try $ string s
The Program
program :: Parser Program
program = do{whiteSpace
;(tds,vds) <- manyAlternate tDecl vDecl
;eof
;return $ Program tds vds
}
manyAlternate :: Parser a -> Parser b -> Parser ([a],[b])
manyAlternate pa pb = do{as<-some pa; (as',bs') <- manyAlternate pa pb; return (as++as',bs')}
<|>
do{bs<-some pb; (as',bs') <- manyAlternate pa pb; return (as',bs++bs')}
<|>
return ([],[])
tDecl :: Parser TDecl
tDecl = do{reserved "data"
;t <- bindVar
;symbol "="
;ts <- braces $ sepBy1 bindVar semi
;return $ TDecl t ts
}
<?> "type declaration"
vDecl :: Parser VDecl
vDecl = letnonrec <?> "value Declaration"
letnonrec :: Parser VDecl
letnonrec = do{reserved "let"
;tv <- bindVar'
;symbol "="
;ex <- expr
;return $ VDecl tv ex
}
Expression
expr :: Parser Expr
expr = choice
pi ( \/ ) before lambda ( \ ) to improve parser efficiency .
,lamExpr
,caseExpr
,funExpr
]
<?> "expression"
atomExpr :: Parser Expr
atomExpr = choice
[try varExpr
,litExpr
,sort
,unknown
,parens expr
]
<?> "atomic expression"
single_expr :: Parser Expr
single_expr =do { whiteSpace
; ex <- expr
; return ex
}
appExpr :: Parser Expr
appExpr = do{atoms <- some atomExpr;
return $ foldl1 AppExpr atoms}
<?> "application"
lamExpr :: Parser Expr
lamExpr = do{symbol "\\" <|> symbol "/\\"
;tvs <- sepBy1 bindVar comma
;symbol "."
;e <- expr
;return $ foldr LamExpr e tvs}
<?> "lambda expression"
piExpr :: Parser Expr
piExpr = do{ (symbol "|~|") <|> try (symbol ("\\/"))
;tvs <- sepBy1 bindVar comma
;symbol "."
;e <- expr
;return $ foldr PiExpr e tvs}
<?> "pi expression"
funExpr :: Parser Expr
funExpr = chainr1 appExpr arrow
where
arrow = do{symbol "->"; return $ \ex1 ex2 -> PiExpr (TVar Anonymous ex1) ex2}
caseExpr :: Parser Expr
caseExpr = do{reserved "case"
;ex <- expr
;reserved "of"
;as <- braces $ sepBy1 alt semi
;case_type <- option Unknown (do{reserved ":"; case_type <- expr ; return case_type})
;return $ CaseExpr ex as case_type
}
<?> "Case Expression"
alt :: Parser Alt
alt = do{tc <- boundVar
;tcas <- many var
;tcas <- return $ map (\v -> TVar v Unknown) tcas
;symbol "=>"
;res <- expr
;return $ Alt tc tcas [] res
}
<?> "case alternative"
varExpr = do{tv <- boundVar
;return $ VarExpr tv
}
<?> "variable expression"
var :: Parser Var
var = do{v <- identifier
;return $ Var v
}
anonymousvar :: Parser Var
anonymousvar =
do{symbol "_"
;v <- option "" identifier
;return $ Var ('_':v)
}
bindVar :: Parser TVar
bindVar = do{v <- (anonymousvar <|> var)
;(do {e <- isOfType
; return $ TVar v e
}
<|>
}
<?> "variable"
bindVar' :: Parser TVar
bindVar' = do{v <- (anonymousvar <|> var)
;(do {e <- isOfType
; return $ TVar v e
}
<|>
}
<?> "variable"
isOfType :: Parser Expr
isOfType = do{symbol ":"
;aex <- expr
;return aex}
boundVar :: Parser TVar
boundVar = do{v <- var
;(do {e <- isOfType
;return $ TVar v e
}
<|>
}
<?> "variable"
Literal Expression
litExpr :: Parser Expr
litExpr = do {l <- lit
;return $ LitExpr l
}
<?> "literal expression"
lit :: Parser Lit
lit = do{i <- integer
;return $ LitInt i
}
<|>
do{reserved "Int"
;return $ IntType
}
sort :: Parser Expr
sort = do{s <- try (sortNum)
<|> star
<|> box
;return $ SortExpr s}
sortNum :: Parser Sort
sortNum = do{ symbol "*"
; n <- integer
; return $ SortNum n
}
star :: Parser Sort
star = do{ symbol "*"
; return Star
}
box :: Parser Sort
box = do{ symbol "||"
; return Box
}
unknown :: Parser Expr
unknown = do{ symbol "?"
; return Unknown
}
|
9913c33abc98b03b669cce5bbedc7eab73c22d8c89184bf8100aa847ec5549c9 | tpapp/cl-random | design-matrix.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Coding : utf-8 -*-
(in-package #:cl-random)
;;; building blocks for a DSL for design matrices
( list ... )
;;;
;;; term := constant | covariate | interaction
;;;
;;; covariate := symbol | (^ symbol exponent)
;;;
;;; interaction := (* covariate1 covariate2 ...)
(defun interaction-matrix (&rest matrices)
"Return the interaction matrix. Last indexes change the fastest."
(let+ ((matrices (mapcar (lambda (matrix)
(typecase matrix
(dense-matrix-like
(set-restricted matrix)
matrix)
(vector (as-column matrix))
(otherwise (as-matrix matrix))))
matrices))
(nrow (nrow (first matrices)))
(ncols (mapcar #'ncol matrices))
(n (length ncols))
(elements (map 'vector #'elements matrices))
(lla-type (reduce #'lla::common-lla-type elements :key #'array-lla-type))
(zero (zero* lla-type))
(interaction (make-matrix nrow (reduce #'* ncols) lla-type)))
(assert (every (lambda (matrix) (= nrow (nrow matrix))) matrices))
(with-indexing ((make-array n :initial-element t)
ncols next-index :end? end? :counters counters)
(iter
(let ((indexes (map 'vector (lambda (counter)
(cm-index2 nrow 0 counter))
counters))
(col (next-index)))
(dotimes (row nrow)
(setf (mref interaction row col)
(iter interaction
(for index :in-vector indexes)
(for elements% :in-vector elements)
(let ((element (aref elements% (+ index row))))
(when (zerop element)
(return-from interaction zero))
(multiplying element))))))
(until end?)))
interaction))
(defun process-factor (vector &key (key #'identity)
(predicate #'<) (test #'=))
"Return (VALUES INDEXES LEVELS), where KEYS is a vector that contains the
levels of the factor (formed using KEY, tested for uniqueness using TEST), and
INDEXES is a vector of FIXNUMS, containing the index of the level
corresponding to the elements of VECTOR. If PREDICATE is given, the levels
are sorted."
(let+ ((keys (coerce (delete-duplicates (map 'list key vector) :test test)
'vector)))
(when predicate
(setf keys (sort keys predicate)))
(values (map '(simple-array fixnum (*))
(lambda (element)
(position (funcall key element) keys :test test))
vector)
keys)))
(defun factor-matrix (indexes levels)
"Return a design matrix for a factor. First column is dropped, otherwise
the matrix would be full rank."
(let* ((nrow (length indexes))
(matrix (make-matrix nrow (1- (length levels)) :integer)))
(iter
(for row :from 0)
(for index :in-vector indexes)
(unless (zerop index)
(setf (mref matrix row (1- index)) 1)))
matrix))
(defun polynomial-matrix (vector power)
"Matrix for a polynomial."
(check-type power (integer 1))
(let* ((length (length vector))
(matrix (make-matrix length power (array-lla-type vector))))
(iter
(for row :from 0)
(for v :in-vector vector)
(dotimes (col power)
(setf (mref matrix row col) (expt v (1+ col)))))
matrix))
(defun interaction-name (names)
"Names for interactions. Return either a symbol (for size 1), or (list
symbol size)."
(iter
(for name :in names)
(unless (first-iteration-p)
(collecting '#:* :into interaction-names))
(if (atom name)
(collecting name :into interaction-names)
(progn
(collecting (first name) :into interaction-names)
(multiplying (second name) :into size)))
(finally
(let ((interaction-name (apply #'make-symbol* interaction-names)))
(return
(if (= 1 size)
interaction-name
(list interaction-name size)))))))
(defun design-matrix (matrix ix specifications &key factors
(constant :constant) rescale?)
"Build design matrix from columns of MATRIX, using SPECIFICATIONS, which
refers to columns via the index IX. FACTORS should be a list, of either a name
in IX or (IX &rest OPTIONS), where OPTIONS are passed directly to
PROCESS-FACTOR. When CONSTANT is non-nil, a constant column with this name will
be added. Return IX specification, the matrix, and a list of factors and levels
as values.
Example:
(design-matrix (clo :integer
1 2 3 :/
4 5 6)
(make-ix '(a b c))
'(a b (:poly c 2) (* b c))
:factors '(a))(:CONSTANT (A 1) B (C-POLY 2) B*C)
=>
(:CONSTANT (A 1) B (C-POLY 2) B*C)
#<DENSE-MATRIX :INTEGER with 2 x 6 elements
1 0 2 3 9 6
1 1 5 6 36 30>,
((A #(1 4)))
"
(let+ (((&flet column (name)
(sub matrix t (ix ix name))))
(factors (mapcar (lambda (factor)
(let+ (((name &rest options) (ensure-list factor))
((&values indexes levels)
(apply #'process-factor
(column name) options)))
(list name (sub levels '(1 . 0))
(factor-matrix indexes levels))))
factors)))
(labels ((find-factor (name)
(find name factors :key #'car))
(traverse-list (specifications &optional (top-level? t))
(iter
(for spec :in specifications)
(let+ (((&values ix matrix) (traverse spec top-level?)))
(collecting ix :into ixs)
(collecting matrix :into matrices))
(finally
(return (values ixs matrices)))))
(traverse (spec top-level?)
Return two values : an ix spec , and matrices ; or lists of
;; these, for interactions.
(cond
((atom spec) (aif (find-factor spec)
(let ((matrix (third it)))
(values (list spec (ncol matrix))
matrix))
(values spec (as-column (column spec)))))
((eq (car spec) :poly)
(let+ (((name power) (cdr spec)))
(assert (not (find-factor name)) ()
"Can't use factors of polynomials.")
(values (list (make-symbol* name '#:-poly) power)
(polynomial-matrix (column name) power))))
((eq (car spec) '*)
(assert top-level? ()
"Interactions are only allowed at the top level.")
(let+ (((&values names matrices) (traverse-list (cdr spec) nil)))
(values (interaction-name names)
(apply #'interaction-matrix matrices))))
(t (error "Invalid spec ~A" spec)))))
(let+ (((&values ixs matrices) (traverse-list specifications)))
(when constant
(setf ixs (cons constant ixs)
matrices (cons (lla-array (nrow (first matrices)) :integer 1)
matrices)))
(values (make-ix ixs)
(apply #'stack :matrix :h (flatten matrices))
(mapcar (lambda (factor)
(list (first factor) (second factor)))
factors))))))
| null | https://raw.githubusercontent.com/tpapp/cl-random/5bb65911037f95a4260bd29a594a09df3849f4ea/src/design-matrix.lisp | lisp | Syntax : ANSI - Common - Lisp ; Coding : utf-8 -*-
building blocks for a DSL for design matrices
term := constant | covariate | interaction
covariate := symbol | (^ symbol exponent)
interaction := (* covariate1 covariate2 ...)
or lists of
these, for interactions. |
(in-package #:cl-random)
( list ... )
(defun interaction-matrix (&rest matrices)
"Return the interaction matrix. Last indexes change the fastest."
(let+ ((matrices (mapcar (lambda (matrix)
(typecase matrix
(dense-matrix-like
(set-restricted matrix)
matrix)
(vector (as-column matrix))
(otherwise (as-matrix matrix))))
matrices))
(nrow (nrow (first matrices)))
(ncols (mapcar #'ncol matrices))
(n (length ncols))
(elements (map 'vector #'elements matrices))
(lla-type (reduce #'lla::common-lla-type elements :key #'array-lla-type))
(zero (zero* lla-type))
(interaction (make-matrix nrow (reduce #'* ncols) lla-type)))
(assert (every (lambda (matrix) (= nrow (nrow matrix))) matrices))
(with-indexing ((make-array n :initial-element t)
ncols next-index :end? end? :counters counters)
(iter
(let ((indexes (map 'vector (lambda (counter)
(cm-index2 nrow 0 counter))
counters))
(col (next-index)))
(dotimes (row nrow)
(setf (mref interaction row col)
(iter interaction
(for index :in-vector indexes)
(for elements% :in-vector elements)
(let ((element (aref elements% (+ index row))))
(when (zerop element)
(return-from interaction zero))
(multiplying element))))))
(until end?)))
interaction))
(defun process-factor (vector &key (key #'identity)
(predicate #'<) (test #'=))
"Return (VALUES INDEXES LEVELS), where KEYS is a vector that contains the
levels of the factor (formed using KEY, tested for uniqueness using TEST), and
INDEXES is a vector of FIXNUMS, containing the index of the level
corresponding to the elements of VECTOR. If PREDICATE is given, the levels
are sorted."
(let+ ((keys (coerce (delete-duplicates (map 'list key vector) :test test)
'vector)))
(when predicate
(setf keys (sort keys predicate)))
(values (map '(simple-array fixnum (*))
(lambda (element)
(position (funcall key element) keys :test test))
vector)
keys)))
(defun factor-matrix (indexes levels)
"Return a design matrix for a factor. First column is dropped, otherwise
the matrix would be full rank."
(let* ((nrow (length indexes))
(matrix (make-matrix nrow (1- (length levels)) :integer)))
(iter
(for row :from 0)
(for index :in-vector indexes)
(unless (zerop index)
(setf (mref matrix row (1- index)) 1)))
matrix))
(defun polynomial-matrix (vector power)
"Matrix for a polynomial."
(check-type power (integer 1))
(let* ((length (length vector))
(matrix (make-matrix length power (array-lla-type vector))))
(iter
(for row :from 0)
(for v :in-vector vector)
(dotimes (col power)
(setf (mref matrix row col) (expt v (1+ col)))))
matrix))
(defun interaction-name (names)
"Names for interactions. Return either a symbol (for size 1), or (list
symbol size)."
(iter
(for name :in names)
(unless (first-iteration-p)
(collecting '#:* :into interaction-names))
(if (atom name)
(collecting name :into interaction-names)
(progn
(collecting (first name) :into interaction-names)
(multiplying (second name) :into size)))
(finally
(let ((interaction-name (apply #'make-symbol* interaction-names)))
(return
(if (= 1 size)
interaction-name
(list interaction-name size)))))))
(defun design-matrix (matrix ix specifications &key factors
(constant :constant) rescale?)
"Build design matrix from columns of MATRIX, using SPECIFICATIONS, which
refers to columns via the index IX. FACTORS should be a list, of either a name
in IX or (IX &rest OPTIONS), where OPTIONS are passed directly to
PROCESS-FACTOR. When CONSTANT is non-nil, a constant column with this name will
be added. Return IX specification, the matrix, and a list of factors and levels
as values.
Example:
(design-matrix (clo :integer
1 2 3 :/
4 5 6)
(make-ix '(a b c))
'(a b (:poly c 2) (* b c))
:factors '(a))(:CONSTANT (A 1) B (C-POLY 2) B*C)
=>
(:CONSTANT (A 1) B (C-POLY 2) B*C)
#<DENSE-MATRIX :INTEGER with 2 x 6 elements
1 0 2 3 9 6
1 1 5 6 36 30>,
((A #(1 4)))
"
(let+ (((&flet column (name)
(sub matrix t (ix ix name))))
(factors (mapcar (lambda (factor)
(let+ (((name &rest options) (ensure-list factor))
((&values indexes levels)
(apply #'process-factor
(column name) options)))
(list name (sub levels '(1 . 0))
(factor-matrix indexes levels))))
factors)))
(labels ((find-factor (name)
(find name factors :key #'car))
(traverse-list (specifications &optional (top-level? t))
(iter
(for spec :in specifications)
(let+ (((&values ix matrix) (traverse spec top-level?)))
(collecting ix :into ixs)
(collecting matrix :into matrices))
(finally
(return (values ixs matrices)))))
(traverse (spec top-level?)
(cond
((atom spec) (aif (find-factor spec)
(let ((matrix (third it)))
(values (list spec (ncol matrix))
matrix))
(values spec (as-column (column spec)))))
((eq (car spec) :poly)
(let+ (((name power) (cdr spec)))
(assert (not (find-factor name)) ()
"Can't use factors of polynomials.")
(values (list (make-symbol* name '#:-poly) power)
(polynomial-matrix (column name) power))))
((eq (car spec) '*)
(assert top-level? ()
"Interactions are only allowed at the top level.")
(let+ (((&values names matrices) (traverse-list (cdr spec) nil)))
(values (interaction-name names)
(apply #'interaction-matrix matrices))))
(t (error "Invalid spec ~A" spec)))))
(let+ (((&values ixs matrices) (traverse-list specifications)))
(when constant
(setf ixs (cons constant ixs)
matrices (cons (lla-array (nrow (first matrices)) :integer 1)
matrices)))
(values (make-ix ixs)
(apply #'stack :matrix :h (flatten matrices))
(mapcar (lambda (factor)
(list (first factor) (second factor)))
factors))))))
|
e151a429a2e7d53d09ea8e854dcd92071a274cb19fa7e5af87894fbbf50eac19 | Archenoth/clj-anki | hash_test.clj | (ns clj-anki.hash-test
(:require [clojure.test :refer :all]
[clj-anki.hash :refer :all]))
(deftest sha1-test
(testing "We can get an SHA1 hex string of the passed in value"
(is (= (sha1-hex "embarrasing") "f20ee772b10c517107620d3be38697967a7a2e96"))
(is (= (sha1-hex "漢字") "50008262c76205f015248f124c87b9fe463ead9f"))))
(deftest anki-checksum-test
(testing "We mangle strings into integers the right way for Anki"
(is (= (anki-checksum "embarrasing") 4061063026))
(is (= (anki-checksum "漢字") 1342210658))))
| null | https://raw.githubusercontent.com/Archenoth/clj-anki/c9ae899862038de21eeca7ca1b7a48a1ce24d03e/test/clj_anki/hash_test.clj | clojure | (ns clj-anki.hash-test
(:require [clojure.test :refer :all]
[clj-anki.hash :refer :all]))
(deftest sha1-test
(testing "We can get an SHA1 hex string of the passed in value"
(is (= (sha1-hex "embarrasing") "f20ee772b10c517107620d3be38697967a7a2e96"))
(is (= (sha1-hex "漢字") "50008262c76205f015248f124c87b9fe463ead9f"))))
(deftest anki-checksum-test
(testing "We mangle strings into integers the right way for Anki"
(is (= (anki-checksum "embarrasing") 4061063026))
(is (= (anki-checksum "漢字") 1342210658))))
| |
4d9afb0ba2a393ea430a39ae362da3860e830fc37667f28a924ba46eda53a933 | ghc/packages-Cabal | QuickCheck.hs | # LANGUAGE CPP #
# LANGUAGE DeriveGeneric #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -fno - warn - orphans #
module UnitTests.Distribution.Solver.Modular.QuickCheck (tests) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Control.Arrow ((&&&))
import Data.Either (lefts)
import Data.Hashable (Hashable(..))
import Data.List (groupBy, isInfixOf)
import Text.Show.Pretty (parseValue, valToStr)
import Test.Tasty (TestTree)
import Test.QuickCheck (Arbitrary (..), Gen, Positive (..), frequency, oneof, shrinkList, shuffle, listOf, shrinkNothing, vectorOf, elements, sublistOf, counterexample, (===), (==>), Blind (..))
import Test.QuickCheck.Instances.Cabal ()
import Distribution.Types.Flag (FlagName)
import Distribution.Utils.ShortText (ShortText)
import Distribution.Client.Setup (defaultMaxBackjumps)
import Distribution.Types.LibraryVisibility
import Distribution.Types.PackageName
import Distribution.Types.UnqualComponentName
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps
( Component(..), ComponentDep, ComponentDeps )
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageConstraint
import qualified Distribution.Solver.Types.PackagePath as P
import Distribution.Solver.Types.PkgConfigDb
(pkgConfigDbFromList)
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.Variable
import Distribution.Verbosity
import Distribution.Version
import UnitTests.Distribution.Solver.Modular.DSL
import UnitTests.Distribution.Solver.Modular.QuickCheck.Utils
( testPropertyWithSeed )
tests :: [TestTree]
tests = [
-- This test checks that certain solver parameters do not affect the
-- existence of a solution. It runs the solver twice, and only sets those
parameters on the second run . The test also applies parameters that
-- can affect the existence of a solution to both runs.
testPropertyWithSeed "target and goal order do not affect solvability" $
\test targetOrder mGoalOrder1 mGoalOrder2 indepGoals ->
let r1 = solve' mGoalOrder1 test
r2 = solve' mGoalOrder2 test { testTargets = targets2 }
solve' goalOrder =
solve (EnableBackjumping True) (FineGrainedConflicts True)
(ReorderGoals False) (CountConflicts True) indepGoals
(getBlind <$> goalOrder)
targets = testTargets test
targets2 = case targetOrder of
SameOrder -> targets
ReverseOrder -> reverse targets
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
, testPropertyWithSeed
"solvable without --independent-goals => solvable with --independent-goals" $
\test reorderGoals ->
let r1 = solve' (IndependentGoals False) test
r2 = solve' (IndependentGoals True) test
solve' indep =
solve (EnableBackjumping True) (FineGrainedConflicts True)
reorderGoals (CountConflicts True) indep Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) `implies` isRight (resultPlan r2)
, testPropertyWithSeed "backjumping does not affect solvability" $
\test reorderGoals indepGoals ->
let r1 = solve' (EnableBackjumping True) test
r2 = solve' (EnableBackjumping False) test
solve' enableBj =
solve enableBj (FineGrainedConflicts False) reorderGoals
(CountConflicts True) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
, testPropertyWithSeed "fine-grained conflicts does not affect solvability" $
\test reorderGoals indepGoals ->
let r1 = solve' (FineGrainedConflicts True) test
r2 = solve' (FineGrainedConflicts False) test
solve' fineGrainedConflicts =
solve (EnableBackjumping True) fineGrainedConflicts
reorderGoals (CountConflicts True) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
The next two tests use --no - count - conflicts , because the goal order used
-- with --count-conflicts depends on the total set of conflicts seen by the
-- solver. The solver explores more of the tree and encounters more
-- conflicts when it doesn't backjump. The different goal orders can lead to
-- different solutions and cause the test to fail.
-- TODO: Find a faster way to randomly sort goals, and then use a random
-- goal order in these tests.
, testPropertyWithSeed
"backjumping does not affect the result (with static goal order)" $
\test reorderGoals indepGoals ->
let r1 = solve' (EnableBackjumping True) test
r2 = solve' (EnableBackjumping False) test
solve' enableBj =
solve enableBj (FineGrainedConflicts False) reorderGoals
(CountConflicts False) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
resultPlan r1 === resultPlan r2
, testPropertyWithSeed
"fine-grained conflicts does not affect the result (with static goal order)" $
\test reorderGoals indepGoals ->
let r1 = solve' (FineGrainedConflicts True) test
r2 = solve' (FineGrainedConflicts False) test
solve' fineGrainedConflicts =
solve (EnableBackjumping True) fineGrainedConflicts
reorderGoals (CountConflicts False) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
resultPlan r1 === resultPlan r2
]
where
noneReachedBackjumpLimit :: [Result] -> Bool
noneReachedBackjumpLimit =
not . any (\r -> resultPlan r == Left BackjumpLimitReached)
showResults :: Result -> Result -> String
showResults r1 r2 = showResult 1 r1 ++ showResult 2 r2
showResult :: Int -> Result -> String
showResult n result =
unlines $ ["", "Run " ++ show n ++ ":"]
++ resultLog result
++ ["result: " ++ show (resultPlan result)]
implies :: Bool -> Bool -> Bool
implies x y = not x || y
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
newtype VarOrdering = VarOrdering {
unVarOrdering :: Variable P.QPN -> Variable P.QPN -> Ordering
}
solve :: EnableBackjumping
-> FineGrainedConflicts
-> ReorderGoals
-> CountConflicts
-> IndependentGoals
-> Maybe VarOrdering
-> SolverTest
-> Result
solve enableBj fineGrainedConflicts reorder countConflicts indep goalOrder test =
let (lg, result) =
runProgress $ exResolve (unTestDb (testDb test)) Nothing Nothing
(pkgConfigDbFromList [])
(map unPN (testTargets test))
-- The backjump limit prevents individual tests from using
-- too much time and memory.
(Just defaultMaxBackjumps)
countConflicts fineGrainedConflicts
(MinimizeConflictSet False) indep reorder
(AllowBootLibInstalls False) OnlyConstrainedNone enableBj
(SolveExecutables True) (unVarOrdering <$> goalOrder)
(testConstraints test) (testPreferences test) normal
(EnableAllTests False)
failure :: String -> Failure
failure msg
| "Backjump limit reached" `isInfixOf` msg = BackjumpLimitReached
| otherwise = OtherFailure
in Result {
resultLog = lg
, resultPlan =
-- Force the result so that we check for internal errors when we check
for success or failure . See .
force $ either (Left . failure) (Right . extractInstallPlan) result
}
-- | How to modify the order of the input targets.
data TargetOrder = SameOrder | ReverseOrder
deriving Show
instance Arbitrary TargetOrder where
arbitrary = elements [SameOrder, ReverseOrder]
shrink SameOrder = []
shrink ReverseOrder = [SameOrder]
data Result = Result {
resultLog :: [String]
, resultPlan :: Either Failure [(ExamplePkgName, ExamplePkgVersion)]
}
data Failure = BackjumpLimitReached | OtherFailure
deriving (Eq, Generic, Show)
instance NFData Failure
-- | Package name.
newtype PN = PN { unPN :: String }
deriving (Eq, Ord, Show)
instance Arbitrary PN where
arbitrary = PN <$> elements ("base" : [[pn] | pn <- ['A'..'G']])
-- | Package version.
newtype PV = PV { unPV :: Int }
deriving (Eq, Ord, Show)
instance Arbitrary PV where
arbitrary = PV <$> elements [1..10]
type TestPackage = Either ExampleInstalled ExampleAvailable
getName :: TestPackage -> PN
getName = PN . either exInstName exAvName
getVersion :: TestPackage -> PV
getVersion = PV . either exInstVersion exAvVersion
data SolverTest = SolverTest {
testDb :: TestDb
, testTargets :: [PN]
, testConstraints :: [ExConstraint]
, testPreferences :: [ExPreference]
}
| Pretty - print the test when quickcheck calls ' show ' .
instance Show SolverTest where
show test =
let str = "SolverTest {testDb = " ++ show (testDb test)
++ ", testTargets = " ++ show (testTargets test)
++ ", testConstraints = " ++ show (testConstraints test)
++ ", testPreferences = " ++ show (testPreferences test)
++ "}"
in maybe str valToStr $ parseValue str
instance Arbitrary SolverTest where
arbitrary = do
db <- arbitrary
let pkgVersions = nub $ map (getName &&& getVersion) (unTestDb db)
pkgs = nub $ map fst pkgVersions
Positive n <- arbitrary
targets <- randomSubset n pkgs
constraints <- case pkgVersions of
[] -> return []
_ -> boundedListOf 1 $ arbitraryConstraint pkgVersions
prefs <- case pkgVersions of
[] -> return []
_ -> boundedListOf 3 $ arbitraryPreference pkgVersions
return (SolverTest db targets constraints prefs)
shrink test =
[test { testDb = db } | db <- shrink (testDb test)]
++ [test { testTargets = targets } | targets <- shrink (testTargets test)]
++ [test { testConstraints = cs } | cs <- shrink (testConstraints test)]
++ [test { testPreferences = prefs } | prefs <- shrink (testPreferences test)]
-- | Collection of source and installed packages.
newtype TestDb = TestDb { unTestDb :: ExampleDb }
deriving Show
instance Arbitrary TestDb where
arbitrary = do
-- Avoid cyclic dependencies by grouping packages by name and only
-- allowing each package to depend on packages in the groups before it.
groupedPkgs <- shuffle . groupBy ((==) `on` fst) . nub . sort =<<
boundedListOf 10 arbitrary
db <- foldM nextPkgs (TestDb []) groupedPkgs
TestDb <$> shuffle (unTestDb db)
where
nextPkgs :: TestDb -> [(PN, PV)] -> Gen TestDb
nextPkgs db pkgs = TestDb . (++ unTestDb db) <$> traverse (nextPkg db) pkgs
nextPkg :: TestDb -> (PN, PV) -> Gen TestPackage
nextPkg db (pn, v) = do
installed <- arbitrary
if installed
then Left <$> arbitraryExInst pn v (lefts $ unTestDb db)
else Right <$> arbitraryExAv pn v db
shrink (TestDb pkgs) = map TestDb $ shrink pkgs
arbitraryExAv :: PN -> PV -> TestDb -> Gen ExampleAvailable
arbitraryExAv pn v db =
(\cds -> ExAv (unPN pn) (unPV v) cds []) <$> arbitraryComponentDeps pn db
arbitraryExInst :: PN -> PV -> [ExampleInstalled] -> Gen ExampleInstalled
arbitraryExInst pn v pkgs = do
pkgHash <- vectorOf 10 $ elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']
numDeps <- min 3 <$> arbitrary
deps <- randomSubset numDeps pkgs
return $ ExInst (unPN pn) (unPV v) pkgHash (map exInstHash deps)
arbitraryComponentDeps :: PN -> TestDb -> Gen (ComponentDeps Dependencies)
arbitraryComponentDeps _ (TestDb []) = return $ CD.fromLibraryDeps (dependencies [])
arbitraryComponentDeps pn db = do
-- dedupComponentNames removes components with duplicate names, for example,
' ComponentExe x ' and ' ComponentTest x ' , and then CD.fromList combines
-- duplicate unnamed components.
cds <- CD.fromList . dedupComponentNames . filter (isValid . fst)
<$> boundedListOf 5 (arbitraryComponentDep db)
return $ if isCompleteComponentDeps cds
then cds
Add a library if the ComponentDeps is n't complete .
CD.fromLibraryDeps (dependencies []) <> cds
where
isValid :: Component -> Bool
isValid (ComponentSubLib name) = name /= mkUnqualComponentName (unPN pn)
isValid _ = True
dedupComponentNames =
nubBy ((\x y -> isJust x && isJust y && x == y) `on` componentName . fst)
componentName :: Component -> Maybe UnqualComponentName
componentName ComponentLib = Nothing
componentName ComponentSetup = Nothing
componentName (ComponentSubLib n) = Just n
componentName (ComponentFLib n) = Just n
componentName (ComponentExe n) = Just n
componentName (ComponentTest n) = Just n
componentName (ComponentBench n) = Just n
| Returns true if the ComponentDeps forms a complete package , i.e. , it
-- contains a library, exe, test, or benchmark.
isCompleteComponentDeps :: ComponentDeps a -> Bool
isCompleteComponentDeps = any (completesPkg . fst) . CD.toList
where
completesPkg ComponentLib = True
completesPkg (ComponentExe _) = True
completesPkg (ComponentTest _) = True
completesPkg (ComponentBench _) = True
completesPkg (ComponentSubLib _) = False
completesPkg (ComponentFLib _) = False
completesPkg ComponentSetup = False
arbitraryComponentDep :: TestDb -> Gen (ComponentDep Dependencies)
arbitraryComponentDep db = do
comp <- arbitrary
deps <- case comp of
ComponentSetup -> smallListOf (arbitraryExDep db SetupDep)
_ -> boundedListOf 5 (arbitraryExDep db NonSetupDep)
return ( comp
, Dependencies {
depsExampleDependencies = deps
-- TODO: Test different values for visibility and buildability.
, depsVisibility = LibraryVisibilityPublic
, depsIsBuildable = True
} )
-- | Location of an 'ExampleDependency'. It determines which values are valid.
data ExDepLocation = SetupDep | NonSetupDep
arbitraryExDep :: TestDb -> ExDepLocation -> Gen ExampleDependency
arbitraryExDep db@(TestDb pkgs) level =
let flag = ExFlagged <$> arbitraryFlagName
<*> arbitraryDeps db
<*> arbitraryDeps db
other =
-- Package checks require dependencies on "base" to have bounds.
let notBase = filter ((/= PN "base") . getName) pkgs
in [ExAny . unPN <$> elements (map getName notBase) | not (null notBase)]
++ [
-- existing version
let fixed pkg = ExFix (unPN $ getName pkg) (unPV $ getVersion pkg)
in fixed <$> elements pkgs
-- random version of an existing package
, ExFix . unPN . getName <$> elements pkgs <*> (unPV <$> arbitrary)
]
in oneof $
case level of
NonSetupDep -> flag : other
SetupDep -> other
arbitraryDeps :: TestDb -> Gen Dependencies
arbitraryDeps db = frequency
[ (1, return unbuildableDependencies)
, (20, dependencies <$> smallListOf (arbitraryExDep db NonSetupDep))
]
arbitraryFlagName :: Gen String
arbitraryFlagName = (:[]) <$> elements ['A'..'E']
arbitraryConstraint :: [(PN, PV)] -> Gen ExConstraint
arbitraryConstraint pkgs = do
(PN pn, v) <- elements pkgs
let anyQualifier = ScopeAnyQualifier (mkPackageName pn)
oneof [
ExVersionConstraint anyQualifier <$> arbitraryVersionRange v
, ExStanzaConstraint anyQualifier <$> sublistOf [TestStanzas, BenchStanzas]
]
arbitraryPreference :: [(PN, PV)] -> Gen ExPreference
arbitraryPreference pkgs = do
(PN pn, v) <- elements pkgs
oneof [
ExStanzaPref pn <$> sublistOf [TestStanzas, BenchStanzas]
, ExPkgPref pn <$> arbitraryVersionRange v
]
arbitraryVersionRange :: PV -> Gen VersionRange
arbitraryVersionRange (PV v) =
let version = mkSimpleVersion v
in elements [
thisVersion version
, notThisVersion version
, earlierVersion version
, orLaterVersion version
, noVersion
]
instance Arbitrary ReorderGoals where
arbitrary = ReorderGoals <$> arbitrary
shrink (ReorderGoals reorder) = [ReorderGoals False | reorder]
instance Arbitrary IndependentGoals where
arbitrary = IndependentGoals <$> arbitrary
shrink (IndependentGoals indep) = [IndependentGoals False | indep]
instance Arbitrary Component where
arbitrary = oneof [ return ComponentLib
, ComponentSubLib <$> arbitraryUQN
, ComponentExe <$> arbitraryUQN
, ComponentFLib <$> arbitraryUQN
, ComponentTest <$> arbitraryUQN
, ComponentBench <$> arbitraryUQN
, return ComponentSetup
]
shrink ComponentLib = []
shrink _ = [ComponentLib]
-- The "component-" prefix prevents component names and build-depends
-- dependency names from overlapping.
TODO : Remove the prefix once the QuickCheck tests support dependencies on
-- internal libraries.
arbitraryUQN :: Gen UnqualComponentName
arbitraryUQN =
mkUnqualComponentName <$> (\c -> "component-" ++ [c]) <$> elements "ABC"
instance Arbitrary ExampleInstalled where
arbitrary = error "arbitrary not implemented: ExampleInstalled"
shrink ei = [ ei { exInstBuildAgainst = deps }
| deps <- shrinkList shrinkNothing (exInstBuildAgainst ei)]
instance Arbitrary ExampleAvailable where
arbitrary = error "arbitrary not implemented: ExampleAvailable"
shrink ea = [ea { exAvDeps = deps } | deps <- shrink (exAvDeps ea)]
instance (Arbitrary a, Monoid a) => Arbitrary (ComponentDeps a) where
arbitrary = error "arbitrary not implemented: ComponentDeps"
shrink = filter isCompleteComponentDeps . map CD.fromList . shrink . CD.toList
instance Arbitrary ExampleDependency where
arbitrary = error "arbitrary not implemented: ExampleDependency"
shrink (ExAny _) = []
shrink (ExFix "base" _) = [] -- preserve bounds on base
shrink (ExFix pn _) = [ExAny pn]
shrink (ExFlagged flag th el) =
depsExampleDependencies th ++ depsExampleDependencies el
++ [ExFlagged flag th' el | th' <- shrink th]
++ [ExFlagged flag th el' | el' <- shrink el]
shrink dep = error $ "Dependency not handled: " ++ show dep
instance Arbitrary Dependencies where
arbitrary = error "arbitrary not implemented: Dependencies"
shrink deps =
[ deps { depsVisibility = v } | v <- shrink $ depsVisibility deps ]
++ [ deps { depsIsBuildable = b } | b <- shrink $ depsIsBuildable deps ]
++ [ deps { depsExampleDependencies = ds } | ds <- shrink $ depsExampleDependencies deps ]
instance Arbitrary ExConstraint where
arbitrary = error "arbitrary not implemented: ExConstraint"
shrink (ExStanzaConstraint scope stanzas) =
[ExStanzaConstraint scope stanzas' | stanzas' <- shrink stanzas]
shrink (ExVersionConstraint scope vr) =
[ExVersionConstraint scope vr' | vr' <- shrink vr]
shrink _ = []
instance Arbitrary ExPreference where
arbitrary = error "arbitrary not implemented: ExPreference"
shrink (ExStanzaPref pn stanzas) =
[ExStanzaPref pn stanzas' | stanzas' <- shrink stanzas]
shrink (ExPkgPref pn vr) = [ExPkgPref pn vr' | vr' <- shrink vr]
instance Arbitrary OptionalStanza where
arbitrary = error "arbitrary not implemented: OptionalStanza"
shrink BenchStanzas = [TestStanzas]
shrink TestStanzas = []
-- Randomly sorts solver variables using 'hash'.
-- TODO: Sorting goals with this function is very slow.
instance Arbitrary VarOrdering where
arbitrary = do
f <- arbitrary :: Gen (Int -> Int)
return $ VarOrdering (comparing (f . hash))
instance Hashable pn => Hashable (Variable pn)
instance Hashable a => Hashable (P.Qualified a)
instance Hashable P.PackagePath
instance Hashable P.Qualifier
instance Hashable P.Namespace
instance Hashable OptionalStanza
instance Hashable FlagName
instance Hashable PackageName
instance Hashable ShortText
deriving instance Generic (Variable pn)
deriving instance Generic (P.Qualified a)
deriving instance Generic P.PackagePath
deriving instance Generic P.Namespace
deriving instance Generic P.Qualifier
randomSubset :: Int -> [a] -> Gen [a]
randomSubset n xs = take n <$> shuffle xs
boundedListOf :: Int -> Gen a -> Gen [a]
boundedListOf n gen = take n <$> listOf gen
| Generates lists with average length less than 1 .
smallListOf :: Gen a -> Gen [a]
smallListOf gen =
frequency [ (fr, vectorOf n gen)
| (fr, n) <- [(3, 0), (5, 1), (2, 2)]]
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-install/tests/UnitTests/Distribution/Solver/Modular/QuickCheck.hs | haskell | This test checks that certain solver parameters do not affect the
existence of a solution. It runs the solver twice, and only sets those
can affect the existence of a solution to both runs.
no - count - conflicts , because the goal order used
with --count-conflicts depends on the total set of conflicts seen by the
solver. The solver explores more of the tree and encounters more
conflicts when it doesn't backjump. The different goal orders can lead to
different solutions and cause the test to fail.
TODO: Find a faster way to randomly sort goals, and then use a random
goal order in these tests.
The backjump limit prevents individual tests from using
too much time and memory.
Force the result so that we check for internal errors when we check
| How to modify the order of the input targets.
| Package name.
| Package version.
| Collection of source and installed packages.
Avoid cyclic dependencies by grouping packages by name and only
allowing each package to depend on packages in the groups before it.
dedupComponentNames removes components with duplicate names, for example,
duplicate unnamed components.
contains a library, exe, test, or benchmark.
TODO: Test different values for visibility and buildability.
| Location of an 'ExampleDependency'. It determines which values are valid.
Package checks require dependencies on "base" to have bounds.
existing version
random version of an existing package
The "component-" prefix prevents component names and build-depends
dependency names from overlapping.
internal libraries.
preserve bounds on base
Randomly sorts solver variables using 'hash'.
TODO: Sorting goals with this function is very slow. | # LANGUAGE CPP #
# LANGUAGE DeriveGeneric #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -fno - warn - orphans #
module UnitTests.Distribution.Solver.Modular.QuickCheck (tests) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Control.Arrow ((&&&))
import Data.Either (lefts)
import Data.Hashable (Hashable(..))
import Data.List (groupBy, isInfixOf)
import Text.Show.Pretty (parseValue, valToStr)
import Test.Tasty (TestTree)
import Test.QuickCheck (Arbitrary (..), Gen, Positive (..), frequency, oneof, shrinkList, shuffle, listOf, shrinkNothing, vectorOf, elements, sublistOf, counterexample, (===), (==>), Blind (..))
import Test.QuickCheck.Instances.Cabal ()
import Distribution.Types.Flag (FlagName)
import Distribution.Utils.ShortText (ShortText)
import Distribution.Client.Setup (defaultMaxBackjumps)
import Distribution.Types.LibraryVisibility
import Distribution.Types.PackageName
import Distribution.Types.UnqualComponentName
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps
( Component(..), ComponentDep, ComponentDeps )
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PackageConstraint
import qualified Distribution.Solver.Types.PackagePath as P
import Distribution.Solver.Types.PkgConfigDb
(pkgConfigDbFromList)
import Distribution.Solver.Types.Settings
import Distribution.Solver.Types.Variable
import Distribution.Verbosity
import Distribution.Version
import UnitTests.Distribution.Solver.Modular.DSL
import UnitTests.Distribution.Solver.Modular.QuickCheck.Utils
( testPropertyWithSeed )
tests :: [TestTree]
tests = [
parameters on the second run . The test also applies parameters that
testPropertyWithSeed "target and goal order do not affect solvability" $
\test targetOrder mGoalOrder1 mGoalOrder2 indepGoals ->
let r1 = solve' mGoalOrder1 test
r2 = solve' mGoalOrder2 test { testTargets = targets2 }
solve' goalOrder =
solve (EnableBackjumping True) (FineGrainedConflicts True)
(ReorderGoals False) (CountConflicts True) indepGoals
(getBlind <$> goalOrder)
targets = testTargets test
targets2 = case targetOrder of
SameOrder -> targets
ReverseOrder -> reverse targets
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
, testPropertyWithSeed
"solvable without --independent-goals => solvable with --independent-goals" $
\test reorderGoals ->
let r1 = solve' (IndependentGoals False) test
r2 = solve' (IndependentGoals True) test
solve' indep =
solve (EnableBackjumping True) (FineGrainedConflicts True)
reorderGoals (CountConflicts True) indep Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) `implies` isRight (resultPlan r2)
, testPropertyWithSeed "backjumping does not affect solvability" $
\test reorderGoals indepGoals ->
let r1 = solve' (EnableBackjumping True) test
r2 = solve' (EnableBackjumping False) test
solve' enableBj =
solve enableBj (FineGrainedConflicts False) reorderGoals
(CountConflicts True) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
, testPropertyWithSeed "fine-grained conflicts does not affect solvability" $
\test reorderGoals indepGoals ->
let r1 = solve' (FineGrainedConflicts True) test
r2 = solve' (FineGrainedConflicts False) test
solve' fineGrainedConflicts =
solve (EnableBackjumping True) fineGrainedConflicts
reorderGoals (CountConflicts True) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
isRight (resultPlan r1) === isRight (resultPlan r2)
, testPropertyWithSeed
"backjumping does not affect the result (with static goal order)" $
\test reorderGoals indepGoals ->
let r1 = solve' (EnableBackjumping True) test
r2 = solve' (EnableBackjumping False) test
solve' enableBj =
solve enableBj (FineGrainedConflicts False) reorderGoals
(CountConflicts False) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
resultPlan r1 === resultPlan r2
, testPropertyWithSeed
"fine-grained conflicts does not affect the result (with static goal order)" $
\test reorderGoals indepGoals ->
let r1 = solve' (FineGrainedConflicts True) test
r2 = solve' (FineGrainedConflicts False) test
solve' fineGrainedConflicts =
solve (EnableBackjumping True) fineGrainedConflicts
reorderGoals (CountConflicts False) indepGoals Nothing
in counterexample (showResults r1 r2) $
noneReachedBackjumpLimit [r1, r2] ==>
resultPlan r1 === resultPlan r2
]
where
noneReachedBackjumpLimit :: [Result] -> Bool
noneReachedBackjumpLimit =
not . any (\r -> resultPlan r == Left BackjumpLimitReached)
showResults :: Result -> Result -> String
showResults r1 r2 = showResult 1 r1 ++ showResult 2 r2
showResult :: Int -> Result -> String
showResult n result =
unlines $ ["", "Run " ++ show n ++ ":"]
++ resultLog result
++ ["result: " ++ show (resultPlan result)]
implies :: Bool -> Bool -> Bool
implies x y = not x || y
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
newtype VarOrdering = VarOrdering {
unVarOrdering :: Variable P.QPN -> Variable P.QPN -> Ordering
}
solve :: EnableBackjumping
-> FineGrainedConflicts
-> ReorderGoals
-> CountConflicts
-> IndependentGoals
-> Maybe VarOrdering
-> SolverTest
-> Result
solve enableBj fineGrainedConflicts reorder countConflicts indep goalOrder test =
let (lg, result) =
runProgress $ exResolve (unTestDb (testDb test)) Nothing Nothing
(pkgConfigDbFromList [])
(map unPN (testTargets test))
(Just defaultMaxBackjumps)
countConflicts fineGrainedConflicts
(MinimizeConflictSet False) indep reorder
(AllowBootLibInstalls False) OnlyConstrainedNone enableBj
(SolveExecutables True) (unVarOrdering <$> goalOrder)
(testConstraints test) (testPreferences test) normal
(EnableAllTests False)
failure :: String -> Failure
failure msg
| "Backjump limit reached" `isInfixOf` msg = BackjumpLimitReached
| otherwise = OtherFailure
in Result {
resultLog = lg
, resultPlan =
for success or failure . See .
force $ either (Left . failure) (Right . extractInstallPlan) result
}
data TargetOrder = SameOrder | ReverseOrder
deriving Show
instance Arbitrary TargetOrder where
arbitrary = elements [SameOrder, ReverseOrder]
shrink SameOrder = []
shrink ReverseOrder = [SameOrder]
data Result = Result {
resultLog :: [String]
, resultPlan :: Either Failure [(ExamplePkgName, ExamplePkgVersion)]
}
data Failure = BackjumpLimitReached | OtherFailure
deriving (Eq, Generic, Show)
instance NFData Failure
newtype PN = PN { unPN :: String }
deriving (Eq, Ord, Show)
instance Arbitrary PN where
arbitrary = PN <$> elements ("base" : [[pn] | pn <- ['A'..'G']])
newtype PV = PV { unPV :: Int }
deriving (Eq, Ord, Show)
instance Arbitrary PV where
arbitrary = PV <$> elements [1..10]
type TestPackage = Either ExampleInstalled ExampleAvailable
getName :: TestPackage -> PN
getName = PN . either exInstName exAvName
getVersion :: TestPackage -> PV
getVersion = PV . either exInstVersion exAvVersion
data SolverTest = SolverTest {
testDb :: TestDb
, testTargets :: [PN]
, testConstraints :: [ExConstraint]
, testPreferences :: [ExPreference]
}
| Pretty - print the test when quickcheck calls ' show ' .
instance Show SolverTest where
show test =
let str = "SolverTest {testDb = " ++ show (testDb test)
++ ", testTargets = " ++ show (testTargets test)
++ ", testConstraints = " ++ show (testConstraints test)
++ ", testPreferences = " ++ show (testPreferences test)
++ "}"
in maybe str valToStr $ parseValue str
instance Arbitrary SolverTest where
arbitrary = do
db <- arbitrary
let pkgVersions = nub $ map (getName &&& getVersion) (unTestDb db)
pkgs = nub $ map fst pkgVersions
Positive n <- arbitrary
targets <- randomSubset n pkgs
constraints <- case pkgVersions of
[] -> return []
_ -> boundedListOf 1 $ arbitraryConstraint pkgVersions
prefs <- case pkgVersions of
[] -> return []
_ -> boundedListOf 3 $ arbitraryPreference pkgVersions
return (SolverTest db targets constraints prefs)
shrink test =
[test { testDb = db } | db <- shrink (testDb test)]
++ [test { testTargets = targets } | targets <- shrink (testTargets test)]
++ [test { testConstraints = cs } | cs <- shrink (testConstraints test)]
++ [test { testPreferences = prefs } | prefs <- shrink (testPreferences test)]
newtype TestDb = TestDb { unTestDb :: ExampleDb }
deriving Show
instance Arbitrary TestDb where
arbitrary = do
groupedPkgs <- shuffle . groupBy ((==) `on` fst) . nub . sort =<<
boundedListOf 10 arbitrary
db <- foldM nextPkgs (TestDb []) groupedPkgs
TestDb <$> shuffle (unTestDb db)
where
nextPkgs :: TestDb -> [(PN, PV)] -> Gen TestDb
nextPkgs db pkgs = TestDb . (++ unTestDb db) <$> traverse (nextPkg db) pkgs
nextPkg :: TestDb -> (PN, PV) -> Gen TestPackage
nextPkg db (pn, v) = do
installed <- arbitrary
if installed
then Left <$> arbitraryExInst pn v (lefts $ unTestDb db)
else Right <$> arbitraryExAv pn v db
shrink (TestDb pkgs) = map TestDb $ shrink pkgs
arbitraryExAv :: PN -> PV -> TestDb -> Gen ExampleAvailable
arbitraryExAv pn v db =
(\cds -> ExAv (unPN pn) (unPV v) cds []) <$> arbitraryComponentDeps pn db
arbitraryExInst :: PN -> PV -> [ExampleInstalled] -> Gen ExampleInstalled
arbitraryExInst pn v pkgs = do
pkgHash <- vectorOf 10 $ elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']
numDeps <- min 3 <$> arbitrary
deps <- randomSubset numDeps pkgs
return $ ExInst (unPN pn) (unPV v) pkgHash (map exInstHash deps)
arbitraryComponentDeps :: PN -> TestDb -> Gen (ComponentDeps Dependencies)
arbitraryComponentDeps _ (TestDb []) = return $ CD.fromLibraryDeps (dependencies [])
arbitraryComponentDeps pn db = do
' ComponentExe x ' and ' ComponentTest x ' , and then CD.fromList combines
cds <- CD.fromList . dedupComponentNames . filter (isValid . fst)
<$> boundedListOf 5 (arbitraryComponentDep db)
return $ if isCompleteComponentDeps cds
then cds
Add a library if the ComponentDeps is n't complete .
CD.fromLibraryDeps (dependencies []) <> cds
where
isValid :: Component -> Bool
isValid (ComponentSubLib name) = name /= mkUnqualComponentName (unPN pn)
isValid _ = True
dedupComponentNames =
nubBy ((\x y -> isJust x && isJust y && x == y) `on` componentName . fst)
componentName :: Component -> Maybe UnqualComponentName
componentName ComponentLib = Nothing
componentName ComponentSetup = Nothing
componentName (ComponentSubLib n) = Just n
componentName (ComponentFLib n) = Just n
componentName (ComponentExe n) = Just n
componentName (ComponentTest n) = Just n
componentName (ComponentBench n) = Just n
| Returns true if the ComponentDeps forms a complete package , i.e. , it
isCompleteComponentDeps :: ComponentDeps a -> Bool
isCompleteComponentDeps = any (completesPkg . fst) . CD.toList
where
completesPkg ComponentLib = True
completesPkg (ComponentExe _) = True
completesPkg (ComponentTest _) = True
completesPkg (ComponentBench _) = True
completesPkg (ComponentSubLib _) = False
completesPkg (ComponentFLib _) = False
completesPkg ComponentSetup = False
arbitraryComponentDep :: TestDb -> Gen (ComponentDep Dependencies)
arbitraryComponentDep db = do
comp <- arbitrary
deps <- case comp of
ComponentSetup -> smallListOf (arbitraryExDep db SetupDep)
_ -> boundedListOf 5 (arbitraryExDep db NonSetupDep)
return ( comp
, Dependencies {
depsExampleDependencies = deps
, depsVisibility = LibraryVisibilityPublic
, depsIsBuildable = True
} )
data ExDepLocation = SetupDep | NonSetupDep
arbitraryExDep :: TestDb -> ExDepLocation -> Gen ExampleDependency
arbitraryExDep db@(TestDb pkgs) level =
let flag = ExFlagged <$> arbitraryFlagName
<*> arbitraryDeps db
<*> arbitraryDeps db
other =
let notBase = filter ((/= PN "base") . getName) pkgs
in [ExAny . unPN <$> elements (map getName notBase) | not (null notBase)]
++ [
let fixed pkg = ExFix (unPN $ getName pkg) (unPV $ getVersion pkg)
in fixed <$> elements pkgs
, ExFix . unPN . getName <$> elements pkgs <*> (unPV <$> arbitrary)
]
in oneof $
case level of
NonSetupDep -> flag : other
SetupDep -> other
arbitraryDeps :: TestDb -> Gen Dependencies
arbitraryDeps db = frequency
[ (1, return unbuildableDependencies)
, (20, dependencies <$> smallListOf (arbitraryExDep db NonSetupDep))
]
arbitraryFlagName :: Gen String
arbitraryFlagName = (:[]) <$> elements ['A'..'E']
arbitraryConstraint :: [(PN, PV)] -> Gen ExConstraint
arbitraryConstraint pkgs = do
(PN pn, v) <- elements pkgs
let anyQualifier = ScopeAnyQualifier (mkPackageName pn)
oneof [
ExVersionConstraint anyQualifier <$> arbitraryVersionRange v
, ExStanzaConstraint anyQualifier <$> sublistOf [TestStanzas, BenchStanzas]
]
arbitraryPreference :: [(PN, PV)] -> Gen ExPreference
arbitraryPreference pkgs = do
(PN pn, v) <- elements pkgs
oneof [
ExStanzaPref pn <$> sublistOf [TestStanzas, BenchStanzas]
, ExPkgPref pn <$> arbitraryVersionRange v
]
arbitraryVersionRange :: PV -> Gen VersionRange
arbitraryVersionRange (PV v) =
let version = mkSimpleVersion v
in elements [
thisVersion version
, notThisVersion version
, earlierVersion version
, orLaterVersion version
, noVersion
]
instance Arbitrary ReorderGoals where
arbitrary = ReorderGoals <$> arbitrary
shrink (ReorderGoals reorder) = [ReorderGoals False | reorder]
instance Arbitrary IndependentGoals where
arbitrary = IndependentGoals <$> arbitrary
shrink (IndependentGoals indep) = [IndependentGoals False | indep]
instance Arbitrary Component where
arbitrary = oneof [ return ComponentLib
, ComponentSubLib <$> arbitraryUQN
, ComponentExe <$> arbitraryUQN
, ComponentFLib <$> arbitraryUQN
, ComponentTest <$> arbitraryUQN
, ComponentBench <$> arbitraryUQN
, return ComponentSetup
]
shrink ComponentLib = []
shrink _ = [ComponentLib]
TODO : Remove the prefix once the QuickCheck tests support dependencies on
arbitraryUQN :: Gen UnqualComponentName
arbitraryUQN =
mkUnqualComponentName <$> (\c -> "component-" ++ [c]) <$> elements "ABC"
instance Arbitrary ExampleInstalled where
arbitrary = error "arbitrary not implemented: ExampleInstalled"
shrink ei = [ ei { exInstBuildAgainst = deps }
| deps <- shrinkList shrinkNothing (exInstBuildAgainst ei)]
instance Arbitrary ExampleAvailable where
arbitrary = error "arbitrary not implemented: ExampleAvailable"
shrink ea = [ea { exAvDeps = deps } | deps <- shrink (exAvDeps ea)]
instance (Arbitrary a, Monoid a) => Arbitrary (ComponentDeps a) where
arbitrary = error "arbitrary not implemented: ComponentDeps"
shrink = filter isCompleteComponentDeps . map CD.fromList . shrink . CD.toList
instance Arbitrary ExampleDependency where
arbitrary = error "arbitrary not implemented: ExampleDependency"
shrink (ExAny _) = []
shrink (ExFix pn _) = [ExAny pn]
shrink (ExFlagged flag th el) =
depsExampleDependencies th ++ depsExampleDependencies el
++ [ExFlagged flag th' el | th' <- shrink th]
++ [ExFlagged flag th el' | el' <- shrink el]
shrink dep = error $ "Dependency not handled: " ++ show dep
instance Arbitrary Dependencies where
arbitrary = error "arbitrary not implemented: Dependencies"
shrink deps =
[ deps { depsVisibility = v } | v <- shrink $ depsVisibility deps ]
++ [ deps { depsIsBuildable = b } | b <- shrink $ depsIsBuildable deps ]
++ [ deps { depsExampleDependencies = ds } | ds <- shrink $ depsExampleDependencies deps ]
instance Arbitrary ExConstraint where
arbitrary = error "arbitrary not implemented: ExConstraint"
shrink (ExStanzaConstraint scope stanzas) =
[ExStanzaConstraint scope stanzas' | stanzas' <- shrink stanzas]
shrink (ExVersionConstraint scope vr) =
[ExVersionConstraint scope vr' | vr' <- shrink vr]
shrink _ = []
instance Arbitrary ExPreference where
arbitrary = error "arbitrary not implemented: ExPreference"
shrink (ExStanzaPref pn stanzas) =
[ExStanzaPref pn stanzas' | stanzas' <- shrink stanzas]
shrink (ExPkgPref pn vr) = [ExPkgPref pn vr' | vr' <- shrink vr]
instance Arbitrary OptionalStanza where
arbitrary = error "arbitrary not implemented: OptionalStanza"
shrink BenchStanzas = [TestStanzas]
shrink TestStanzas = []
instance Arbitrary VarOrdering where
arbitrary = do
f <- arbitrary :: Gen (Int -> Int)
return $ VarOrdering (comparing (f . hash))
instance Hashable pn => Hashable (Variable pn)
instance Hashable a => Hashable (P.Qualified a)
instance Hashable P.PackagePath
instance Hashable P.Qualifier
instance Hashable P.Namespace
instance Hashable OptionalStanza
instance Hashable FlagName
instance Hashable PackageName
instance Hashable ShortText
deriving instance Generic (Variable pn)
deriving instance Generic (P.Qualified a)
deriving instance Generic P.PackagePath
deriving instance Generic P.Namespace
deriving instance Generic P.Qualifier
randomSubset :: Int -> [a] -> Gen [a]
randomSubset n xs = take n <$> shuffle xs
boundedListOf :: Int -> Gen a -> Gen [a]
boundedListOf n gen = take n <$> listOf gen
| Generates lists with average length less than 1 .
smallListOf :: Gen a -> Gen [a]
smallListOf gen =
frequency [ (fr, vectorOf n gen)
| (fr, n) <- [(3, 0), (5, 1), (2, 2)]]
|
d7fd1f2477f03bb2dfb344c425ac1cda5e783fd7598a7fd49a8c2e7ed7982ade | abdulapopoola/SICPBook | 5.35.scm | ;; Compiled expression
;; (define (f x)
( + x ( g ( + x 2 ) ) ) )
;; DERIVATION
(assign val (op make-compiled-procedure)
(label entry16)
(reg env))
(goto (label after-lambda15))
entry16
(assign env (op compiled-procedure-env)
(reg proc))
;; x is defined and environment extended
(assign env (op extend-environment)
(const (x))
(reg argl)
(reg env))
;; operation + is added -> (+
(assign proc (op lookup-variable-value)
(const +)
(reg env))
(save continue) (save proc) (save env)
;; operation g (a procedure) is added -> (g
(assign proc (op lookup-variable-value)
(const g)
(reg env))
(save proc)
operation + is added again - > ( g ( + ( x 2 ) )
(assign proc (op lookup-variable-value)
(const +)
(reg env))
(assign val (const 2))
(assign argl (op list) (reg val))
(assign val (op lookup-variable-value)
(const x)
(reg env))
(assign argl (op cons)
(reg val)
(reg argl))
argl now contains x and 2 - > ( x 2 )
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch19))
compiled-branch18
(assign continue (label after-call17))
(assign val
(op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch19
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
after-call17
(assign argl (op list) (reg val))
(restore proc)
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch22))
compiled-branch21
(assign continue (label after-call20))
(assign val
(op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch22
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
after-call20
(assign argl (op list) (reg val))
(restore env)
(assign val
(op lookup-variable-value)
(const x)
(reg env))
(assign argl
(op cons)
(reg val)
(reg argl))
(restore proc)
(restore continue)
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch25))
compiled-branch24
(assign val (op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch25
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
(goto (reg continue))
after-call23
after-lambda15
(perform (op define-variable!)
(const f)
(reg val)
(reg env))
(assign val (const ok)) | null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%205/5.5/5.35.scm | scheme | Compiled expression
(define (f x)
DERIVATION
x is defined and environment extended
operation + is added -> (+
operation g (a procedure) is added -> (g | ( + x ( g ( + x 2 ) ) ) )
(assign val (op make-compiled-procedure)
(label entry16)
(reg env))
(goto (label after-lambda15))
entry16
(assign env (op compiled-procedure-env)
(reg proc))
(assign env (op extend-environment)
(const (x))
(reg argl)
(reg env))
(assign proc (op lookup-variable-value)
(const +)
(reg env))
(save continue) (save proc) (save env)
(assign proc (op lookup-variable-value)
(const g)
(reg env))
(save proc)
operation + is added again - > ( g ( + ( x 2 ) )
(assign proc (op lookup-variable-value)
(const +)
(reg env))
(assign val (const 2))
(assign argl (op list) (reg val))
(assign val (op lookup-variable-value)
(const x)
(reg env))
(assign argl (op cons)
(reg val)
(reg argl))
argl now contains x and 2 - > ( x 2 )
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch19))
compiled-branch18
(assign continue (label after-call17))
(assign val
(op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch19
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
after-call17
(assign argl (op list) (reg val))
(restore proc)
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch22))
compiled-branch21
(assign continue (label after-call20))
(assign val
(op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch22
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
after-call20
(assign argl (op list) (reg val))
(restore env)
(assign val
(op lookup-variable-value)
(const x)
(reg env))
(assign argl
(op cons)
(reg val)
(reg argl))
(restore proc)
(restore continue)
(test (op primitive-procedure?)
(reg proc))
(branch (label primitive-branch25))
compiled-branch24
(assign val (op compiled-procedure-entry)
(reg proc))
(goto (reg val))
primitive-branch25
(assign val
(op apply-primitive-procedure)
(reg proc)
(reg argl))
(goto (reg continue))
after-call23
after-lambda15
(perform (op define-variable!)
(const f)
(reg val)
(reg env))
(assign val (const ok)) |
3daa5da5afbbaac1bafdae2ffcce89d17635c314fbe75368a6a929aa7d3777c0 | nadeemabdulhamid/racketui | web-launch.rkt | #lang racket
(require "tfield.rkt" "syntax.rkt" "web.rkt")
(require racket/runtime-path
web-server/safety-limits
web-server/servlet
web-server/servlet-env
web-server/managers/lru)
(require (for-syntax syntax/parse))
(define-runtime-path htdocs "./htdocs")
launch - web : / function - > ...
(define (launch-web func/tf)
(define crunched (match (crunch (tfield-label func/tf)) ["" "index"] [c c]))
(serve/servlet (start func/tf)
#:extra-files-paths (list htdocs)
#:safety-limits (make-unlimited-safety-limits)
#:manager (make-threshold-LRU-manager
expiration-handler (* 128 1024 1024))
#:servlet-path (format "/~a.rkt" crunched)))
;; user syntax
( define - syntax - rule ( web - launch title / func )
( launch - web ( ( parse / web - spec ( web - spec / func ) ) title ) ) )
(define-syntax (web-launch stx)
(syntax-parse stx
[(web-launch title tfield/func)
#`(launch-web ((parse/web-spec (web-spec tfield/func)) title))]
[(web-launch lab+spec)
#`(launch-web ((parse/web-spec (second lab+spec)) (first lab+spec)))]))
;; ============================================================================
(provide web-launch
define/web)
| null | https://raw.githubusercontent.com/nadeemabdulhamid/racketui/045e0e647439623397cdf67e8e045ec7aa5e2def/web-launch.rkt | racket | user syntax
============================================================================ | #lang racket
(require "tfield.rkt" "syntax.rkt" "web.rkt")
(require racket/runtime-path
web-server/safety-limits
web-server/servlet
web-server/servlet-env
web-server/managers/lru)
(require (for-syntax syntax/parse))
(define-runtime-path htdocs "./htdocs")
launch - web : / function - > ...
(define (launch-web func/tf)
(define crunched (match (crunch (tfield-label func/tf)) ["" "index"] [c c]))
(serve/servlet (start func/tf)
#:extra-files-paths (list htdocs)
#:safety-limits (make-unlimited-safety-limits)
#:manager (make-threshold-LRU-manager
expiration-handler (* 128 1024 1024))
#:servlet-path (format "/~a.rkt" crunched)))
( define - syntax - rule ( web - launch title / func )
( launch - web ( ( parse / web - spec ( web - spec / func ) ) title ) ) )
(define-syntax (web-launch stx)
(syntax-parse stx
[(web-launch title tfield/func)
#`(launch-web ((parse/web-spec (web-spec tfield/func)) title))]
[(web-launch lab+spec)
#`(launch-web ((parse/web-spec (second lab+spec)) (first lab+spec)))]))
(provide web-launch
define/web)
|
322586796d08f8c431c211f226fcca73d3ec4e9c24d64a5aff9b51a25c572064 | jtza8/interact | package.lisp | ; Use of this source code is governed by a BSD-style
license that can be found in the license.txt file
; in the root directory of this project.
(in-package :interact)
(use-package :xlunit)
(defparameter *test-sprite-path*
(asdf:system-relative-pathname :interact "tests/test-sprites"))
(defparameter *test-image-path*
(asdf:system-relative-pathname :interact-tests "test-images/"))
(defparameter *test-fonts-path*
(asdf:system-relative-pathname :interact "tests/test-fonts/")) | null | https://raw.githubusercontent.com/jtza8/interact/ea2121d7e900dac4fe2a085bd5f2783a640e71f8/src/tests/package.lisp | lisp | Use of this source code is governed by a BSD-style
in the root directory of this project. | license that can be found in the license.txt file
(in-package :interact)
(use-package :xlunit)
(defparameter *test-sprite-path*
(asdf:system-relative-pathname :interact "tests/test-sprites"))
(defparameter *test-image-path*
(asdf:system-relative-pathname :interact-tests "test-images/"))
(defparameter *test-fonts-path*
(asdf:system-relative-pathname :interact "tests/test-fonts/")) |
a7062ed7f7824c735ad6df7017d8fbad3afe94ae8d8b8358a390c6394b7f8ad3 | generateme/metadoc | codox.clj | (ns metadoc.writers.codox
"Documentation writer that outputs HTML."
(:use [hiccup core page element util])
(:import [java.net URLEncoder]
[com.vladsch.flexmark.ast Link LinkRef]
[com.vladsch.flexmark.ext.wikilink WikiLink WikiLinkExtension]
[com.vladsch.flexmark.html HtmlRenderer
HtmlRenderer$HtmlRendererExtension LinkResolver LinkResolverFactory]
[com.vladsch.flexmark.html.renderer LinkResolverBasicContext
LinkStatus ResolvedLink]
[com.vladsch.flexmark.parser Parser]
[com.vladsch.flexmark.profile.pegdown Extensions
PegdownOptionsAdapter]
[com.vladsch.flexmark.util.misc Extension])
(:require [clojure.edn :as edn]
[clojure.java.io :as io]
[clojure.pprint :as pp]
[clojure.string :as str]
[clojure.walk :as walk]
[net.cgrand.enlive-html :as enlive-html]
[net.cgrand.jsoup :as jsoup]
[metadoc.examples :as ex]
[metadoc.reader :as er]))
(def ^:private escape-map {(char 0xffff) "\\0xffff"
(char 13) "\\r"
(char 10) "\\n"})
(def enlive-operations
{:append enlive-html/append
:prepend enlive-html/prepend
:after enlive-html/after
:before enlive-html/before
:substitute enlive-html/substitute})
(defn- enlive-transformer [[op & args]]
(apply (enlive-operations op) (map enlive-html/html args)))
(defn- enlive-transform [nodes transforms]
(reduce
(fn [out [s t]]
(enlive-html/transform out s (enlive-transformer t)))
nodes
(partition 2 transforms)))
(defn- enlive-emit [nodes]
(apply str (enlive-html/emit* nodes)))
(defn- enlive-parse [^String s]
(let [stream (io/input-stream (.getBytes s "UTF-8"))]
(enlive-html/html-resource stream {:parser jsoup/parser})))
(defn- transform-html [project s]
(-> (enlive-parse s)
(enlive-transform (-> project :html :transforms))
(enlive-emit)))
(defn- var-id [var]
(str "var-" (-> var name URLEncoder/encode (str/replace "%" "."))))
(def ^:private url-regex
#"((https?|ftp|file)://[-A-Za-z0-9+()&@#/%?=~_|!:,.;]+[-A-Za-z0-9+()&@#/%=~_|])")
(defn- add-anchors [text]
(when text
(str/replace text url-regex "<a href=\"$1\">$1</a>")))
(defmulti format-docstring
"Format the docstring of a var or namespace into HTML."
(fn [_ _ var] (:doc/format var))
:default :plaintext)
(defmethod format-docstring :plaintext [_ _ metadata]
[:pre.plaintext (add-anchors (h (:doc metadata)))])
#_(def ^:private pegdown
(PegDownProcessor.
(bit-or Extensions/AUTOLINKS
Extensions/QUOTES
Extensions/SMARTS
Extensions/STRIKETHROUGH
Extensions/TABLES
Extensions/FENCED_CODE_BLOCKS
Extensions/WIKILINKS
Extensions/DEFINITIONS
Extensions/ABBREVIATIONS
Extensions/ATXHEADERSPACE
Extensions/RELAXEDHRULES
Extensions/EXTANCHORLINKS)
2000))
(defn- public-vars
"Return a list of all public var names in a collection of namespaces from one
of the reader functions."
[namespaces]
(for [ns namespaces
var (:publics ns)
v (concat [var] (:members var))]
(symbol (str (:name ns)) (str (:name v)))))
(def ^:private re-chars (set "\\.*+|?()[]{}$^"))
(defn re-escape
"Escape a string so it can be safely placed in a regex."
[s]
(str/escape s #(when (re-chars %) (str \\ %))))
(defn- search-vars
"Find the best-matching var given a partial var string, a list of namespaces,
and an optional starting namespace."
[namespaces partial-var & [starting-ns]]
(let [regex (if (.contains ^String partial-var "/")
(re-pattern (str (re-escape partial-var) "$"))
(re-pattern (str "/" (re-escape partial-var) "$")))
matches (filter
#(re-find regex (str %))
(public-vars namespaces))]
(or (first (filter #(= (str starting-ns) (namespace %)) matches))
(first matches))))
(defn- find-wiki-link [project ns text]
(let [ns-strs (map (comp str :name) (:namespaces project))]
(if (contains? (set ns-strs) text)
(str text ".html")
(when-let [var (search-vars (:namespaces project) text (:name ns))]
(str (namespace var) ".html#" (var-id var))))))
#_(defn- parse-wikilink [text]
(let [pos (.indexOf text "|")]
(if (>= pos 0)
[(subs text 0 pos) (subs text (inc pos))]
[text text])))
(defn- absolute-url? [url]
(re-find #"^([a-z]+:)?//" url))
(defn- fix-markdown-url [url]
(if-not (absolute-url? url)
(str/replace url #"\.(md|markdown)$" ".html")
url))
#_(defn- encode-title [rendering title]
(if (str/blank? title)
rendering
(.withAttribute rendering "title" (FastEncoder/encode title))))
#_(defn- link-renderer [project & [ns]]
(proxy [LinkRenderer] []
(render
([node]
(if (instance? WikiLinkNode node)
(let [[page text] (parse-wikilink (.getText node))]
(LinkRenderer$Rendering. (find-wikilink project ns page) text))
(proxy-super render node)))
([node text]
(if (instance? ExpLinkNode node)
(-> (LinkRenderer$Rendering. (fix-markdown-url (.url node)) text)
(encode-title (.title node)))
(proxy-super render node text)))
([node url title text]
(if (instance? RefLinkNode node)
(-> (LinkRenderer$Rendering. (fix-markdown-url url) text)
(encode-title title))
(proxy-super render node url title text))))))
(defn- update-link-url [^ResolvedLink link f]
(-> link
(.withStatus LinkStatus/VALID)
(.withUrl (f (.getUrl link)))))
(defn- correct-internal-links [node link project ns]
(condp instance? node
WikiLink (update-link-url link #(find-wiki-link project ns %))
LinkRef (update-link-url link fix-markdown-url)
Link (update-link-url link fix-markdown-url)
link))
(defn- make-renderer-extension
[project ns]
(reify HtmlRenderer$HtmlRendererExtension
(rendererOptions [_ _])
(extend [_ htmlRendererBuilder _]
(.linkResolverFactory
htmlRendererBuilder
(reify LinkResolverFactory
(getAfterDependents [_] nil)
(getBeforeDependents [_] nil)
(affectsGlobalScope [_] false)
(^LinkResolver apply [_ ^LinkResolverBasicContext _]
(reify LinkResolver
(resolveLink [_ node _ link]
(correct-internal-links node link project ns)))))))))
(defn- make-flexmark-options
[project ns]
(-> (PegdownOptionsAdapter/flexmarkOptions
(bit-or Extensions/AUTOLINKS
Extensions/QUOTES
Extensions/SMARTS
Extensions/STRIKETHROUGH
Extensions/TABLES
Extensions/FENCED_CODE_BLOCKS
Extensions/WIKILINKS
Extensions/DEFINITIONS
Extensions/ABBREVIATIONS
Extensions/ATXHEADERSPACE
Extensions/RELAXEDHRULES
Extensions/EXTANCHORLINKS)
(into-array Extension [(make-renderer-extension project ns)]))
(.toMutable)
(.set WikiLinkExtension/LINK_FIRST_SYNTAX true)
(.toImmutable)))
(defn- markdown-to-html
([doc project]
(markdown-to-html doc project nil))
([doc project ns]
(let [options (make-flexmark-options project ns)
parser (.build (Parser/builder options))
renderer (.build (HtmlRenderer/builder options))]
(->> doc (.parse parser) (.render renderer)))))
(defn- format-markdown
[doc project ns]
#_(.markdownToHtml pegdown doc (link-renderer project ns))
(markdown-to-html doc project ns))
(defmethod format-docstring :markdown [project ns metadata]
[:div.markdown
(when-let [doc (:doc metadata)]
(format-markdown doc project ns))])
(defn- ns-filename [namespace]
(str (:name namespace) ".html"))
(defn- ns-filepath [output-dir namespace]
(str output-dir "/" (ns-filename namespace)))
(defn- doc-filename [doc]
(str (:name doc) ".html"))
(defn- doc-filepath [output-dir doc]
(str output-dir "/" (doc-filename doc)))
(defn- var-uri [namespace var]
(str (ns-filename namespace) "#" (var-id (:name var))))
(defn- get-source-uri [source-uris path]
(some (fn [[re f]] (when (re-find re path) f)) source-uris))
(defn- uri-basename [path]
(second (re-find #"/([^/]+?)$" path)))
(defn- uri-path [path]
(str/replace (str path) java.io.File/separator "/"))
(defn- var-source-uri
[{:keys [source-uri version]}
{:keys [path file line]}]
(let [path (uri-path path)
uri (if (map? source-uri) (get-source-uri source-uri path) source-uri)]
(-> uri
(str/replace "{filepath}" path)
(str/replace "{classpath}" (uri-path file))
(str/replace "{basename}" (uri-basename path))
(str/replace "{line}" (str line))
(str/replace "{version}" version))))
(defn- split-ns [namespace]
(str/split (str namespace) #"\."))
(defn- namespace-parts [namespace]
(->> (split-ns namespace)
(reductions #(str %1 "." %2))
(map symbol)))
(defn- add-depths [namespaces]
(->> namespaces
(map (juxt identity (comp count split-ns)))
(reductions (fn [[_ ds] [ns d]] [ns (cons d ds)]) [nil nil])
(rest)))
(defn- add-heights [namespaces]
(for [[ns ds] namespaces]
(let [d (first ds)
h (count (take-while #(not (or (= d %) (= (dec d) %))) (rest ds)))]
[ns d h])))
(defn- add-branches [namespaces]
(->> (partition-all 2 1 namespaces)
(map (fn [[[ns d0 h] [_ d1 _]]] [ns d0 h (= d0 d1)]))))
(defn- namespace-hierarchy [namespaces]
(->> (map :name namespaces)
(sort)
(mapcat namespace-parts)
(distinct)
(add-depths)
(add-heights)
(add-branches)))
(defn- index-by [f m]
(into {} (map (juxt f identity) m)))
;; The values in ns-tree-part are chosen for aesthetic reasons, based
;; on a text size of 15px and a line height of 31px.
(defn- ns-tree-part [height]
(if (zero? height)
[:span.tree [:span.top] [:span.bottom]]
(let [row-height 31
top (- 0 21 (* height row-height))
height (+ 0 30 (* height row-height))]
[:span.tree {:style (str "top: " top "px;")}
[:span.top {:style (str "height: " height "px;")}]
[:span.bottom]])))
(defn- index-link [_ on-index?]
(list
[:h3.no-link [:span.inner "Project"]]
[:ul.index-link
[:li.depth-1 {:class (when on-index? "current")}
(link-to "index.html" [:div.inner "Index"])]]))
(defn- topics-menu [project current-doc]
(when-let [docs (seq (:documents project))]
(list
[:h3.no-link [:span.inner "Topics"]]
[:ul
(for [doc docs]
[:li.depth-1
{:class (if (= doc current-doc) " current")}
(link-to (doc-filename doc) [:div.inner [:span (h (:title doc))]])])])))
(defn- nested-namespaces [namespaces current-ns]
(let [ns-map (index-by :name namespaces)]
[:ul
(for [[name depth height branch?] (namespace-hierarchy namespaces)]
(let [class (str "depth-" depth (when branch? " branch"))
short (last (split-ns name))
inner [:div.inner (ns-tree-part height) [:span (h short)]]]
(if-let [ns (ns-map name)]
(let [class (str class (when (= ns current-ns) " current"))]
[:li {:class class} (link-to (ns-filename ns) inner)])
[:li {:class class} [:div.no-link inner]])))]))
(defn- flat-namespaces [namespaces current-ns]
[:ul
(for [ns (sort-by :name namespaces)]
[:li.depth-1
{:class (when (= ns current-ns) "current")}
(link-to (ns-filename ns) [:div.inner [:span (h (:name ns))]])])])
(defn- namespace-list-type [project]
(let [default (if (> (-> project :namespaces count) 1) :nested :flat)]
(get-in project [:html :namespace-list] default)))
(defn- namespaces-menu [project current-ns]
(let [namespaces (:namespaces project)]
(list
[:h3.no-link [:span.inner "Namespaces"]]
(case (namespace-list-type project)
:flat (flat-namespaces namespaces current-ns)
:nested (nested-namespaces namespaces current-ns)))))
(defn- primary-sidebar [project & [current]]
[:div.sidebar.primary
(index-link project (nil? current))
(topics-menu project current)
(namespaces-menu project current)])
(defn- sorted-public-vars [namespace]
(sort-by (comp str/lower-case :name) (:publics namespace)))
(defn- vars-sidebar [namespace]
[:div.sidebar.secondary
[:h3 (link-to "#top" [:span.inner "Public Vars"])]
[:ul
(for [var (sorted-public-vars namespace)]
(list*
[:li.depth-1
(link-to (var-uri namespace var) [:div.inner [:span (h (:name var))]])]
(for [mem (:members var)]
(let [branch? (not= mem (last (:members var)))
class (if branch? "depth-2 branch" "depth-2")
inner [:div.inner (ns-tree-part 0) [:span (h (:name mem))]]]
[:li {:class class}
(link-to (var-uri namespace mem) inner)]))))]])
(def ^:private default-meta
[:meta {:charset "UTF-8"}])
(defn- project-title [project]
[:span.project-title
[:span.project-name (h (:name project))] " "
[:span.project-version (h (:version project))]])
(defn- header [project]
[:div#header
[:h2 "Generated by " (link-to "" "Codox")]
[:h1 (link-to "index.html" (project-title project))]])
(defn- package [project]
(when-let [p (:package project)]
(if (= (namespace p) (name p))
(symbol (name p))
p)))
(defn- add-ending [^String s ^String ending]
(if (.endsWith s ending) s (str s ending)))
(defn- strip-prefix [s prefix]
(when s (str/replace s (re-pattern (str "(?i)^" prefix)) "")))
(defn- summary
"Return the summary of a docstring.
The summary is the first portion of the string, from the first
character to the first page break (\f) character OR the first TWO
newlines."
[s]
(when s
(->> (str/trim s)
(re-find #"(?s).*?(?=\f)|.*?(?=\n\n)|.*"))))
(defn- category-line
[project namespace categories n]
(interpose " " (for [v (sort (n categories))]
[:a {:href (find-wiki-link project namespace (str v))} v])))
(defn- categories-part
[project namespace]
(let [categories (:categories-list namespace)]
(when-not (empty? categories)
(let [categories-names (:metadoc/categories namespace)]
[:div.markdown
[:h4 "Categories"]
[:ul
(for [n (sort (keys categories))
:let [nn (or (n categories-names) (name n))]
:when (not (= n :other))]
[:li nn ": " (category-line project namespace categories n)])]
(when (:other categories)
[:p "Other vars: " (category-line project namespace categories :other)])]))))
(defn- index-page [project]
(html5
[:head
default-meta
[:title (h (:name project)) " " (h (:version project))]]
[:body
(header project)
(primary-sidebar project)
[:div#content.namespace-index
[:h1 (project-title project)]
(when-let [license (-> (get-in project [:license :name]) (strip-prefix "the "))]
[:h5.license
"Released under the "
(if-let [url (get-in project [:license :url])]
(link-to url license)
license)])
(when-let [description (:description project)]
[:div.doc [:p (h (add-ending description "."))]])
(when-let [package (package project)]
(list
[:h2 "Installation"]
[:p "To install, add the following dependency to your project or build file:"]
[:pre.deps (h (str "[" package " " (pr-str (:version project)) "]"))]))
(when-let [docs (seq (:documents project))]
(list
[:h2 "Topics"]
[:ul.topics
(for [doc docs]
[:li (link-to (doc-filename doc) (h (:title doc)))])]))
[:h2 "Namespaces"]
(for [namespace (sort-by :name (:namespaces project))]
[:div.namespace
[:h3 (link-to (ns-filename namespace) (h (:name namespace)))]
[:div.doc (format-docstring project nil (update-in namespace [:doc] summary))]
[:div.doc (categories-part project namespace)]])]]))
(defmulti format-document
"Format a document into HTML."
(fn [_ doc] (:format doc)))
(defmethod format-document :markdown [project doc]
#_[:div.markdown (.markdownToHtml pegdown (:content doc) (link-renderer project))]
[:div.markdown (markdown-to-html (:content doc) project)])
(defn- document-page [project doc]
(html5
[:head
default-meta
[:title (h (:title doc))]]
[:body
(header project)
(primary-sidebar project doc)
[:div#content.document
[:div.doc (format-document project doc)]]]))
(defn- var-usage [var]
(for [arglist (:arglists var)]
(list* (:name var) arglist)))
(defn- added-and-deprecated-docs [var]
(list
(when-let [added (:added var)]
[:h4.added "added in " added])
(when-let [deprecated (:deprecated var)]
[:h4.deprecated "deprecated" (when (string? deprecated) (str " in " deprecated))])))
(defn- remove-namespaces [x namespaces]
(if (and (symbol? x) (contains? namespaces (namespace x)))
(symbol (name x))
x))
(defn- normalize-types [types]
(read-string (pr-str types)))
(defn- pprint-str [x]
(with-out-str (pp/pprint x)))
(defn- type-sig [namespace var]
(let [implied-namespaces #{(str (:name namespace)) "clojure.core.typed"}]
(->> (:type-sig var)
(normalize-types)
(walk/postwalk #(remove-namespaces % implied-namespaces))
(pprint-str))))
(defn- escape-value
[s]
(escape-html (str/escape (str s) escape-map)))
(defn- var-docs [project namespace var]
(let [constant-value ((:name var) (:constants namespace))
examples ((:name var) (:examples namespace))]
[:div.public.anchor {:id (h (var-id (:name var)))}
[:h3 (h (:name var))]
(when-not (= (:type var) :var)
[:h4.type (name (:type var))])
(when constant-value
[:h4.dynamic "const"])
(when (:dynamic var)
[:h4.dynamic "dynamic"])
(added-and-deprecated-docs var)
(when (:type-sig var)
[:div.type-sig
[:pre (h (type-sig namespace var))]])
[:div.usage
(for [form (var-usage var)]
[:code (h (pr-str form))])]
(when constant-value
[:div [:div.markdown [:code {:class "hljs clojure"} ";;=> " (escape-value constant-value)]]])
[:div.doc (format-docstring project namespace var)]
(when (seq examples)
[:div.markdown
[:h4 "Examples"]
(for [ex examples]
(ex/format-example :html (update ex :doc #(format-markdown % project namespace))))])
(when-let [members (seq (:members var))]
[:div.members
[:h4 "members"]
[:div.inner
(let [project (dissoc project :source-uri)]
(map (partial var-docs project namespace) members))]])
(when (:source-uri project)
(if (:path var)
[:div.src-link (link-to (var-source-uri project var) "view source")]
(println "Could not generate source link for" (:name var))))]))
(defn- constants-part
[project namespace]
(when-not (empty? (:constants namespace))
[:div.markdown
[:h4 "Constants"]
[:ul
(for [[n v] (sort (:constants namespace))]
[:li [:a {:href (find-wiki-link project namespace (str n))} n] " = " [:code (escape-value v)]])]]))
(defn- snippets-part
[project namespace]
(when-let [snippets (seq (filter (comp not :hidden) (:snippets namespace)))]
[:div.markdown
[:h4 "Code snippets"]
(for [{:keys [doc fn-str]} (vals snippets)]
[:div
[:blockquote (format-markdown doc project namespace)]
[:pre [:code fn-str]]])]))
(defn- namespace-page [project namespace]
(html5
[:head
default-meta
[:title (h (:name namespace)) " documentation"]]
[:body
(header project)
(primary-sidebar project namespace)
(vars-sidebar namespace)
[:div#content.namespace-docs
[:h1#top.anchor (h (:name namespace))]
(added-and-deprecated-docs namespace)
[:div.doc
(format-docstring project namespace namespace)
(categories-part project namespace)
(constants-part project namespace)
(snippets-part project namespace)]
(for [var (sorted-public-vars namespace)]
(var-docs project namespace var))]]))
#_(defn- mkdirs [output-dir & dirs]
(doseq [dir dirs]
(.mkdirs (io/file output-dir dir))))
(defn- write-index [output-dir project]
(spit (io/file output-dir "index.html") (transform-html project (index-page project))))
(defn- write-namespaces [output-dir project]
(doseq [namespace (:namespaces project)]
(spit (ns-filepath output-dir namespace)
(transform-html project (namespace-page project namespace)))))
(defn- write-documents [output-dir project]
(doseq [document (:documents project)]
(spit (doc-filepath output-dir document)
(transform-html project (document-page project document)))))
(defn- theme-path [theme]
(let [theme-name (if (vector? theme) (first theme) theme)]
(str "codox/theme/" (name theme-name))))
(defn- insert-params [theme-data theme]
(let [params (if (vector? theme) (or (second theme) {}) {})
defaults (:defaults theme-data {})]
(assert (map? params) "Theme parameters must be a map")
(assert (map? defaults) "Theme defaults must be a map")
(->> (dissoc theme-data :defaults)
(walk/postwalk #(if (keyword? %) (params % (defaults % %)) %)))))
(defn- read-theme [theme]
(some-> (theme-path theme)
(str "/theme.edn")
io/resource slurp
edn/read-string
(insert-params theme)))
(defn- make-parent-dir [file]
(-> file io/file .getParentFile .mkdirs))
(defn- copy-resource [resource output-path]
(io/copy (io/input-stream (io/resource resource)) output-path))
(defn- copy-theme-resources [output-dir project]
(doseq [theme (:themes project)]
(let [root (theme-path theme)]
(doseq [path (:resources (read-theme theme))]
(let [output-file (io/file output-dir path)]
(make-parent-dir output-file)
(copy-resource (str root "/" path) output-file))))))
(defn- apply-one-theme [project theme]
(if-let [{:keys [transforms]} (read-theme theme)]
(update-in project [:html :transforms] concat transforms)
(throw (IllegalArgumentException. (format "Could not find Codox theme: %s" theme)))))
(defn- apply-theme-transforms [{:keys [themes] :as project}]
(reduce apply-one-theme project themes))
;; update namespaces and variables
(defn- maybe-assoc
[project ns n key-in key-target f]
(if-not (contains? (:exclude-metadoc project) key-in)
(assoc n key-target (f ns))
n))
(defn- add-sections
"Add constants, categories and snippets"
[project]
(assoc project :namespaces
(map #(let [ns (find-ns (:name %))
ma (partial maybe-assoc project ns)]
(as-> % n
(ma n :constants :constants er/extract-constants)
(ma n :examples :examples er/extract-examples)
(ma n :categories :categories-list er/extract-categories)
(ma n :snippets :snippets er/extract-snippets)))
(:namespaces project))))
(defn write-docs
"Take raw documentation info and turn it into formatted HTML."
[{:keys [output-path] :as project}]
(er/load-examples)
(let [project (-> project
(apply-theme-transforms)
(add-sections))]
(doto output-path
(copy-theme-resources project)
(write-index project)
(write-namespaces project)
(write-documents project))
(println "Done")))
| null | https://raw.githubusercontent.com/generateme/metadoc/ac773832e4f677517800b14f7b3973a5ad52a9ae/src/metadoc/writers/codox.clj | clojure | The values in ns-tree-part are chosen for aesthetic reasons, based
on a text size of 15px and a line height of 31px.
update namespaces and variables | (ns metadoc.writers.codox
"Documentation writer that outputs HTML."
(:use [hiccup core page element util])
(:import [java.net URLEncoder]
[com.vladsch.flexmark.ast Link LinkRef]
[com.vladsch.flexmark.ext.wikilink WikiLink WikiLinkExtension]
[com.vladsch.flexmark.html HtmlRenderer
HtmlRenderer$HtmlRendererExtension LinkResolver LinkResolverFactory]
[com.vladsch.flexmark.html.renderer LinkResolverBasicContext
LinkStatus ResolvedLink]
[com.vladsch.flexmark.parser Parser]
[com.vladsch.flexmark.profile.pegdown Extensions
PegdownOptionsAdapter]
[com.vladsch.flexmark.util.misc Extension])
(:require [clojure.edn :as edn]
[clojure.java.io :as io]
[clojure.pprint :as pp]
[clojure.string :as str]
[clojure.walk :as walk]
[net.cgrand.enlive-html :as enlive-html]
[net.cgrand.jsoup :as jsoup]
[metadoc.examples :as ex]
[metadoc.reader :as er]))
(def ^:private escape-map {(char 0xffff) "\\0xffff"
(char 13) "\\r"
(char 10) "\\n"})
(def enlive-operations
{:append enlive-html/append
:prepend enlive-html/prepend
:after enlive-html/after
:before enlive-html/before
:substitute enlive-html/substitute})
(defn- enlive-transformer [[op & args]]
(apply (enlive-operations op) (map enlive-html/html args)))
(defn- enlive-transform [nodes transforms]
(reduce
(fn [out [s t]]
(enlive-html/transform out s (enlive-transformer t)))
nodes
(partition 2 transforms)))
(defn- enlive-emit [nodes]
(apply str (enlive-html/emit* nodes)))
(defn- enlive-parse [^String s]
(let [stream (io/input-stream (.getBytes s "UTF-8"))]
(enlive-html/html-resource stream {:parser jsoup/parser})))
(defn- transform-html [project s]
(-> (enlive-parse s)
(enlive-transform (-> project :html :transforms))
(enlive-emit)))
(defn- var-id [var]
(str "var-" (-> var name URLEncoder/encode (str/replace "%" "."))))
(def ^:private url-regex
#"((https?|ftp|file)://[-A-Za-z0-9+()&@#/%?=~_|!:,.;]+[-A-Za-z0-9+()&@#/%=~_|])")
(defn- add-anchors [text]
(when text
(str/replace text url-regex "<a href=\"$1\">$1</a>")))
(defmulti format-docstring
"Format the docstring of a var or namespace into HTML."
(fn [_ _ var] (:doc/format var))
:default :plaintext)
(defmethod format-docstring :plaintext [_ _ metadata]
[:pre.plaintext (add-anchors (h (:doc metadata)))])
#_(def ^:private pegdown
(PegDownProcessor.
(bit-or Extensions/AUTOLINKS
Extensions/QUOTES
Extensions/SMARTS
Extensions/STRIKETHROUGH
Extensions/TABLES
Extensions/FENCED_CODE_BLOCKS
Extensions/WIKILINKS
Extensions/DEFINITIONS
Extensions/ABBREVIATIONS
Extensions/ATXHEADERSPACE
Extensions/RELAXEDHRULES
Extensions/EXTANCHORLINKS)
2000))
(defn- public-vars
"Return a list of all public var names in a collection of namespaces from one
of the reader functions."
[namespaces]
(for [ns namespaces
var (:publics ns)
v (concat [var] (:members var))]
(symbol (str (:name ns)) (str (:name v)))))
(def ^:private re-chars (set "\\.*+|?()[]{}$^"))
(defn re-escape
"Escape a string so it can be safely placed in a regex."
[s]
(str/escape s #(when (re-chars %) (str \\ %))))
(defn- search-vars
"Find the best-matching var given a partial var string, a list of namespaces,
and an optional starting namespace."
[namespaces partial-var & [starting-ns]]
(let [regex (if (.contains ^String partial-var "/")
(re-pattern (str (re-escape partial-var) "$"))
(re-pattern (str "/" (re-escape partial-var) "$")))
matches (filter
#(re-find regex (str %))
(public-vars namespaces))]
(or (first (filter #(= (str starting-ns) (namespace %)) matches))
(first matches))))
(defn- find-wiki-link [project ns text]
(let [ns-strs (map (comp str :name) (:namespaces project))]
(if (contains? (set ns-strs) text)
(str text ".html")
(when-let [var (search-vars (:namespaces project) text (:name ns))]
(str (namespace var) ".html#" (var-id var))))))
#_(defn- parse-wikilink [text]
(let [pos (.indexOf text "|")]
(if (>= pos 0)
[(subs text 0 pos) (subs text (inc pos))]
[text text])))
(defn- absolute-url? [url]
(re-find #"^([a-z]+:)?//" url))
(defn- fix-markdown-url [url]
(if-not (absolute-url? url)
(str/replace url #"\.(md|markdown)$" ".html")
url))
#_(defn- encode-title [rendering title]
(if (str/blank? title)
rendering
(.withAttribute rendering "title" (FastEncoder/encode title))))
#_(defn- link-renderer [project & [ns]]
(proxy [LinkRenderer] []
(render
([node]
(if (instance? WikiLinkNode node)
(let [[page text] (parse-wikilink (.getText node))]
(LinkRenderer$Rendering. (find-wikilink project ns page) text))
(proxy-super render node)))
([node text]
(if (instance? ExpLinkNode node)
(-> (LinkRenderer$Rendering. (fix-markdown-url (.url node)) text)
(encode-title (.title node)))
(proxy-super render node text)))
([node url title text]
(if (instance? RefLinkNode node)
(-> (LinkRenderer$Rendering. (fix-markdown-url url) text)
(encode-title title))
(proxy-super render node url title text))))))
(defn- update-link-url [^ResolvedLink link f]
(-> link
(.withStatus LinkStatus/VALID)
(.withUrl (f (.getUrl link)))))
(defn- correct-internal-links [node link project ns]
(condp instance? node
WikiLink (update-link-url link #(find-wiki-link project ns %))
LinkRef (update-link-url link fix-markdown-url)
Link (update-link-url link fix-markdown-url)
link))
(defn- make-renderer-extension
[project ns]
(reify HtmlRenderer$HtmlRendererExtension
(rendererOptions [_ _])
(extend [_ htmlRendererBuilder _]
(.linkResolverFactory
htmlRendererBuilder
(reify LinkResolverFactory
(getAfterDependents [_] nil)
(getBeforeDependents [_] nil)
(affectsGlobalScope [_] false)
(^LinkResolver apply [_ ^LinkResolverBasicContext _]
(reify LinkResolver
(resolveLink [_ node _ link]
(correct-internal-links node link project ns)))))))))
(defn- make-flexmark-options
[project ns]
(-> (PegdownOptionsAdapter/flexmarkOptions
(bit-or Extensions/AUTOLINKS
Extensions/QUOTES
Extensions/SMARTS
Extensions/STRIKETHROUGH
Extensions/TABLES
Extensions/FENCED_CODE_BLOCKS
Extensions/WIKILINKS
Extensions/DEFINITIONS
Extensions/ABBREVIATIONS
Extensions/ATXHEADERSPACE
Extensions/RELAXEDHRULES
Extensions/EXTANCHORLINKS)
(into-array Extension [(make-renderer-extension project ns)]))
(.toMutable)
(.set WikiLinkExtension/LINK_FIRST_SYNTAX true)
(.toImmutable)))
(defn- markdown-to-html
([doc project]
(markdown-to-html doc project nil))
([doc project ns]
(let [options (make-flexmark-options project ns)
parser (.build (Parser/builder options))
renderer (.build (HtmlRenderer/builder options))]
(->> doc (.parse parser) (.render renderer)))))
(defn- format-markdown
[doc project ns]
#_(.markdownToHtml pegdown doc (link-renderer project ns))
(markdown-to-html doc project ns))
(defmethod format-docstring :markdown [project ns metadata]
[:div.markdown
(when-let [doc (:doc metadata)]
(format-markdown doc project ns))])
(defn- ns-filename [namespace]
(str (:name namespace) ".html"))
(defn- ns-filepath [output-dir namespace]
(str output-dir "/" (ns-filename namespace)))
(defn- doc-filename [doc]
(str (:name doc) ".html"))
(defn- doc-filepath [output-dir doc]
(str output-dir "/" (doc-filename doc)))
(defn- var-uri [namespace var]
(str (ns-filename namespace) "#" (var-id (:name var))))
(defn- get-source-uri [source-uris path]
(some (fn [[re f]] (when (re-find re path) f)) source-uris))
(defn- uri-basename [path]
(second (re-find #"/([^/]+?)$" path)))
(defn- uri-path [path]
(str/replace (str path) java.io.File/separator "/"))
(defn- var-source-uri
[{:keys [source-uri version]}
{:keys [path file line]}]
(let [path (uri-path path)
uri (if (map? source-uri) (get-source-uri source-uri path) source-uri)]
(-> uri
(str/replace "{filepath}" path)
(str/replace "{classpath}" (uri-path file))
(str/replace "{basename}" (uri-basename path))
(str/replace "{line}" (str line))
(str/replace "{version}" version))))
(defn- split-ns [namespace]
(str/split (str namespace) #"\."))
(defn- namespace-parts [namespace]
(->> (split-ns namespace)
(reductions #(str %1 "." %2))
(map symbol)))
(defn- add-depths [namespaces]
(->> namespaces
(map (juxt identity (comp count split-ns)))
(reductions (fn [[_ ds] [ns d]] [ns (cons d ds)]) [nil nil])
(rest)))
(defn- add-heights [namespaces]
(for [[ns ds] namespaces]
(let [d (first ds)
h (count (take-while #(not (or (= d %) (= (dec d) %))) (rest ds)))]
[ns d h])))
(defn- add-branches [namespaces]
(->> (partition-all 2 1 namespaces)
(map (fn [[[ns d0 h] [_ d1 _]]] [ns d0 h (= d0 d1)]))))
(defn- namespace-hierarchy [namespaces]
(->> (map :name namespaces)
(sort)
(mapcat namespace-parts)
(distinct)
(add-depths)
(add-heights)
(add-branches)))
(defn- index-by [f m]
(into {} (map (juxt f identity) m)))
(defn- ns-tree-part [height]
(if (zero? height)
[:span.tree [:span.top] [:span.bottom]]
(let [row-height 31
top (- 0 21 (* height row-height))
height (+ 0 30 (* height row-height))]
[:span.tree {:style (str "top: " top "px;")}
[:span.top {:style (str "height: " height "px;")}]
[:span.bottom]])))
(defn- index-link [_ on-index?]
(list
[:h3.no-link [:span.inner "Project"]]
[:ul.index-link
[:li.depth-1 {:class (when on-index? "current")}
(link-to "index.html" [:div.inner "Index"])]]))
(defn- topics-menu [project current-doc]
(when-let [docs (seq (:documents project))]
(list
[:h3.no-link [:span.inner "Topics"]]
[:ul
(for [doc docs]
[:li.depth-1
{:class (if (= doc current-doc) " current")}
(link-to (doc-filename doc) [:div.inner [:span (h (:title doc))]])])])))
(defn- nested-namespaces [namespaces current-ns]
(let [ns-map (index-by :name namespaces)]
[:ul
(for [[name depth height branch?] (namespace-hierarchy namespaces)]
(let [class (str "depth-" depth (when branch? " branch"))
short (last (split-ns name))
inner [:div.inner (ns-tree-part height) [:span (h short)]]]
(if-let [ns (ns-map name)]
(let [class (str class (when (= ns current-ns) " current"))]
[:li {:class class} (link-to (ns-filename ns) inner)])
[:li {:class class} [:div.no-link inner]])))]))
(defn- flat-namespaces [namespaces current-ns]
[:ul
(for [ns (sort-by :name namespaces)]
[:li.depth-1
{:class (when (= ns current-ns) "current")}
(link-to (ns-filename ns) [:div.inner [:span (h (:name ns))]])])])
(defn- namespace-list-type [project]
(let [default (if (> (-> project :namespaces count) 1) :nested :flat)]
(get-in project [:html :namespace-list] default)))
(defn- namespaces-menu [project current-ns]
(let [namespaces (:namespaces project)]
(list
[:h3.no-link [:span.inner "Namespaces"]]
(case (namespace-list-type project)
:flat (flat-namespaces namespaces current-ns)
:nested (nested-namespaces namespaces current-ns)))))
(defn- primary-sidebar [project & [current]]
[:div.sidebar.primary
(index-link project (nil? current))
(topics-menu project current)
(namespaces-menu project current)])
(defn- sorted-public-vars [namespace]
(sort-by (comp str/lower-case :name) (:publics namespace)))
(defn- vars-sidebar [namespace]
[:div.sidebar.secondary
[:h3 (link-to "#top" [:span.inner "Public Vars"])]
[:ul
(for [var (sorted-public-vars namespace)]
(list*
[:li.depth-1
(link-to (var-uri namespace var) [:div.inner [:span (h (:name var))]])]
(for [mem (:members var)]
(let [branch? (not= mem (last (:members var)))
class (if branch? "depth-2 branch" "depth-2")
inner [:div.inner (ns-tree-part 0) [:span (h (:name mem))]]]
[:li {:class class}
(link-to (var-uri namespace mem) inner)]))))]])
(def ^:private default-meta
[:meta {:charset "UTF-8"}])
(defn- project-title [project]
[:span.project-title
[:span.project-name (h (:name project))] " "
[:span.project-version (h (:version project))]])
(defn- header [project]
[:div#header
[:h2 "Generated by " (link-to "" "Codox")]
[:h1 (link-to "index.html" (project-title project))]])
(defn- package [project]
(when-let [p (:package project)]
(if (= (namespace p) (name p))
(symbol (name p))
p)))
(defn- add-ending [^String s ^String ending]
(if (.endsWith s ending) s (str s ending)))
(defn- strip-prefix [s prefix]
(when s (str/replace s (re-pattern (str "(?i)^" prefix)) "")))
(defn- summary
"Return the summary of a docstring.
The summary is the first portion of the string, from the first
character to the first page break (\f) character OR the first TWO
newlines."
[s]
(when s
(->> (str/trim s)
(re-find #"(?s).*?(?=\f)|.*?(?=\n\n)|.*"))))
(defn- category-line
[project namespace categories n]
(interpose " " (for [v (sort (n categories))]
[:a {:href (find-wiki-link project namespace (str v))} v])))
(defn- categories-part
[project namespace]
(let [categories (:categories-list namespace)]
(when-not (empty? categories)
(let [categories-names (:metadoc/categories namespace)]
[:div.markdown
[:h4 "Categories"]
[:ul
(for [n (sort (keys categories))
:let [nn (or (n categories-names) (name n))]
:when (not (= n :other))]
[:li nn ": " (category-line project namespace categories n)])]
(when (:other categories)
[:p "Other vars: " (category-line project namespace categories :other)])]))))
(defn- index-page [project]
(html5
[:head
default-meta
[:title (h (:name project)) " " (h (:version project))]]
[:body
(header project)
(primary-sidebar project)
[:div#content.namespace-index
[:h1 (project-title project)]
(when-let [license (-> (get-in project [:license :name]) (strip-prefix "the "))]
[:h5.license
"Released under the "
(if-let [url (get-in project [:license :url])]
(link-to url license)
license)])
(when-let [description (:description project)]
[:div.doc [:p (h (add-ending description "."))]])
(when-let [package (package project)]
(list
[:h2 "Installation"]
[:p "To install, add the following dependency to your project or build file:"]
[:pre.deps (h (str "[" package " " (pr-str (:version project)) "]"))]))
(when-let [docs (seq (:documents project))]
(list
[:h2 "Topics"]
[:ul.topics
(for [doc docs]
[:li (link-to (doc-filename doc) (h (:title doc)))])]))
[:h2 "Namespaces"]
(for [namespace (sort-by :name (:namespaces project))]
[:div.namespace
[:h3 (link-to (ns-filename namespace) (h (:name namespace)))]
[:div.doc (format-docstring project nil (update-in namespace [:doc] summary))]
[:div.doc (categories-part project namespace)]])]]))
(defmulti format-document
"Format a document into HTML."
(fn [_ doc] (:format doc)))
(defmethod format-document :markdown [project doc]
#_[:div.markdown (.markdownToHtml pegdown (:content doc) (link-renderer project))]
[:div.markdown (markdown-to-html (:content doc) project)])
(defn- document-page [project doc]
(html5
[:head
default-meta
[:title (h (:title doc))]]
[:body
(header project)
(primary-sidebar project doc)
[:div#content.document
[:div.doc (format-document project doc)]]]))
(defn- var-usage [var]
(for [arglist (:arglists var)]
(list* (:name var) arglist)))
(defn- added-and-deprecated-docs [var]
(list
(when-let [added (:added var)]
[:h4.added "added in " added])
(when-let [deprecated (:deprecated var)]
[:h4.deprecated "deprecated" (when (string? deprecated) (str " in " deprecated))])))
(defn- remove-namespaces [x namespaces]
(if (and (symbol? x) (contains? namespaces (namespace x)))
(symbol (name x))
x))
(defn- normalize-types [types]
(read-string (pr-str types)))
(defn- pprint-str [x]
(with-out-str (pp/pprint x)))
(defn- type-sig [namespace var]
(let [implied-namespaces #{(str (:name namespace)) "clojure.core.typed"}]
(->> (:type-sig var)
(normalize-types)
(walk/postwalk #(remove-namespaces % implied-namespaces))
(pprint-str))))
(defn- escape-value
[s]
(escape-html (str/escape (str s) escape-map)))
(defn- var-docs [project namespace var]
(let [constant-value ((:name var) (:constants namespace))
examples ((:name var) (:examples namespace))]
[:div.public.anchor {:id (h (var-id (:name var)))}
[:h3 (h (:name var))]
(when-not (= (:type var) :var)
[:h4.type (name (:type var))])
(when constant-value
[:h4.dynamic "const"])
(when (:dynamic var)
[:h4.dynamic "dynamic"])
(added-and-deprecated-docs var)
(when (:type-sig var)
[:div.type-sig
[:pre (h (type-sig namespace var))]])
[:div.usage
(for [form (var-usage var)]
[:code (h (pr-str form))])]
(when constant-value
[:div [:div.markdown [:code {:class "hljs clojure"} ";;=> " (escape-value constant-value)]]])
[:div.doc (format-docstring project namespace var)]
(when (seq examples)
[:div.markdown
[:h4 "Examples"]
(for [ex examples]
(ex/format-example :html (update ex :doc #(format-markdown % project namespace))))])
(when-let [members (seq (:members var))]
[:div.members
[:h4 "members"]
[:div.inner
(let [project (dissoc project :source-uri)]
(map (partial var-docs project namespace) members))]])
(when (:source-uri project)
(if (:path var)
[:div.src-link (link-to (var-source-uri project var) "view source")]
(println "Could not generate source link for" (:name var))))]))
(defn- constants-part
[project namespace]
(when-not (empty? (:constants namespace))
[:div.markdown
[:h4 "Constants"]
[:ul
(for [[n v] (sort (:constants namespace))]
[:li [:a {:href (find-wiki-link project namespace (str n))} n] " = " [:code (escape-value v)]])]]))
(defn- snippets-part
[project namespace]
(when-let [snippets (seq (filter (comp not :hidden) (:snippets namespace)))]
[:div.markdown
[:h4 "Code snippets"]
(for [{:keys [doc fn-str]} (vals snippets)]
[:div
[:blockquote (format-markdown doc project namespace)]
[:pre [:code fn-str]]])]))
(defn- namespace-page [project namespace]
(html5
[:head
default-meta
[:title (h (:name namespace)) " documentation"]]
[:body
(header project)
(primary-sidebar project namespace)
(vars-sidebar namespace)
[:div#content.namespace-docs
[:h1#top.anchor (h (:name namespace))]
(added-and-deprecated-docs namespace)
[:div.doc
(format-docstring project namespace namespace)
(categories-part project namespace)
(constants-part project namespace)
(snippets-part project namespace)]
(for [var (sorted-public-vars namespace)]
(var-docs project namespace var))]]))
#_(defn- mkdirs [output-dir & dirs]
(doseq [dir dirs]
(.mkdirs (io/file output-dir dir))))
(defn- write-index [output-dir project]
(spit (io/file output-dir "index.html") (transform-html project (index-page project))))
(defn- write-namespaces [output-dir project]
(doseq [namespace (:namespaces project)]
(spit (ns-filepath output-dir namespace)
(transform-html project (namespace-page project namespace)))))
(defn- write-documents [output-dir project]
(doseq [document (:documents project)]
(spit (doc-filepath output-dir document)
(transform-html project (document-page project document)))))
(defn- theme-path [theme]
(let [theme-name (if (vector? theme) (first theme) theme)]
(str "codox/theme/" (name theme-name))))
(defn- insert-params [theme-data theme]
(let [params (if (vector? theme) (or (second theme) {}) {})
defaults (:defaults theme-data {})]
(assert (map? params) "Theme parameters must be a map")
(assert (map? defaults) "Theme defaults must be a map")
(->> (dissoc theme-data :defaults)
(walk/postwalk #(if (keyword? %) (params % (defaults % %)) %)))))
(defn- read-theme [theme]
(some-> (theme-path theme)
(str "/theme.edn")
io/resource slurp
edn/read-string
(insert-params theme)))
(defn- make-parent-dir [file]
(-> file io/file .getParentFile .mkdirs))
(defn- copy-resource [resource output-path]
(io/copy (io/input-stream (io/resource resource)) output-path))
(defn- copy-theme-resources [output-dir project]
(doseq [theme (:themes project)]
(let [root (theme-path theme)]
(doseq [path (:resources (read-theme theme))]
(let [output-file (io/file output-dir path)]
(make-parent-dir output-file)
(copy-resource (str root "/" path) output-file))))))
(defn- apply-one-theme [project theme]
(if-let [{:keys [transforms]} (read-theme theme)]
(update-in project [:html :transforms] concat transforms)
(throw (IllegalArgumentException. (format "Could not find Codox theme: %s" theme)))))
(defn- apply-theme-transforms [{:keys [themes] :as project}]
(reduce apply-one-theme project themes))
(defn- maybe-assoc
[project ns n key-in key-target f]
(if-not (contains? (:exclude-metadoc project) key-in)
(assoc n key-target (f ns))
n))
(defn- add-sections
"Add constants, categories and snippets"
[project]
(assoc project :namespaces
(map #(let [ns (find-ns (:name %))
ma (partial maybe-assoc project ns)]
(as-> % n
(ma n :constants :constants er/extract-constants)
(ma n :examples :examples er/extract-examples)
(ma n :categories :categories-list er/extract-categories)
(ma n :snippets :snippets er/extract-snippets)))
(:namespaces project))))
(defn write-docs
"Take raw documentation info and turn it into formatted HTML."
[{:keys [output-path] :as project}]
(er/load-examples)
(let [project (-> project
(apply-theme-transforms)
(add-sections))]
(doto output-path
(copy-theme-resources project)
(write-index project)
(write-namespaces project)
(write-documents project))
(println "Done")))
|
15d2b1700c1608bb59cd32fb0dec64bbc967fecd55aa2f7ce69224a142182a22 | drym-org/qi | qi.rkt | #lang racket/base
(provide cond-fn
compose-fn
root-mean-square
fact
ping
eratos
collatz
filter-map-fn
filter-map-values
double-list
double-values)
(require (only-in math sqr)
(only-in racket/list range)
qi)
(define-switch cond-fn
[(< 5) sqr]
[(> 5) add1]
[else _])
(define-flow compose-fn
(~> add1 sqr sub1))
(define-flow root-mean-square
(~> (-< (~>> △ (>< sqr) +)
length) / sqrt))
(define-switch fact
[(< 2) 1]
[else (~> (-< _ (~> sub1 fact)) *)])
(define-switch ping
[(< 2) _]
[else (~> (-< sub1
(- 2)) (>< ping) +)])
(define-flow (eratos n)
(~> (-< (gen null) (~>> add1 (range 2) △))
(feedback (while (~> (block 1) live?))
(then (~> 1> reverse))
(-< (~> (select 1 2) X cons)
(~> (-< (~>> 2> (clos (~> remainder (not (= 0)))))
(block 1 2)) pass)))))
(define-flow collatz
(switch
[(<= 1) list]
[odd? (~> (-< _ (~> (* 3) (+ 1) collatz))
cons)]
[even? (~> (-< _ (~> (quotient 2) collatz))
cons)]))
(define-flow filter-map-fn
(~> △ (>< (if odd? sqr ⏚)) ▽))
(define-flow filter-map-values
(>< (if odd? sqr ⏚)))
(define-flow double-list
(~> △ (>< (-< _ _)) ▽))
(define-flow double-values
(>< (-< _ _)))
| null | https://raw.githubusercontent.com/drym-org/qi/a8bd930eda09e07b8f44fd2e7100b7be96d446ea/qi-sdk/profile/qi.rkt | racket | #lang racket/base
(provide cond-fn
compose-fn
root-mean-square
fact
ping
eratos
collatz
filter-map-fn
filter-map-values
double-list
double-values)
(require (only-in math sqr)
(only-in racket/list range)
qi)
(define-switch cond-fn
[(< 5) sqr]
[(> 5) add1]
[else _])
(define-flow compose-fn
(~> add1 sqr sub1))
(define-flow root-mean-square
(~> (-< (~>> △ (>< sqr) +)
length) / sqrt))
(define-switch fact
[(< 2) 1]
[else (~> (-< _ (~> sub1 fact)) *)])
(define-switch ping
[(< 2) _]
[else (~> (-< sub1
(- 2)) (>< ping) +)])
(define-flow (eratos n)
(~> (-< (gen null) (~>> add1 (range 2) △))
(feedback (while (~> (block 1) live?))
(then (~> 1> reverse))
(-< (~> (select 1 2) X cons)
(~> (-< (~>> 2> (clos (~> remainder (not (= 0)))))
(block 1 2)) pass)))))
(define-flow collatz
(switch
[(<= 1) list]
[odd? (~> (-< _ (~> (* 3) (+ 1) collatz))
cons)]
[even? (~> (-< _ (~> (quotient 2) collatz))
cons)]))
(define-flow filter-map-fn
(~> △ (>< (if odd? sqr ⏚)) ▽))
(define-flow filter-map-values
(>< (if odd? sqr ⏚)))
(define-flow double-list
(~> △ (>< (-< _ _)) ▽))
(define-flow double-values
(>< (-< _ _)))
| |
b7a8985d475ddd5bc0d741d742774ed9aab20904edd7601aebf2043dd895b918 | na4zagin3/satyrographos | template_docMake_ja.ml |
SPDX - License - Identifier : CC0 - 1.0
SPDX-License-Identifier: CC0-1.0
*)
let name = "[experimental]doc-make@ja"
let local_satyh_template =
"local.satyh",
{|% プロジェクト用函数・コマンド定義用ファイル
@require: code
@require: math
let-block ctx +frame content =
let pads = (10pt, 10pt, 10pt, 10pt) in
let decoset = VDecoSet.simple-frame-stroke 1pt (Color.gray 0.75) in
block-frame-breakable ctx pads decoset (fun ctx -> read-block ctx content)
let-block ctx +display-boxes content code =
read-block (ctx |> set-paragraph-margin 12pt 0pt) '<+frame(content);>
+++ read-block (ctx |> set-paragraph-margin 0pt 12pt) '<+code(code);>
% 数式コマンドの定義
let-math \factorial x =
${#x \mathpunct{\mathrm-token!(`!`)}}
|}
let main_saty_template =
"main.saty",
{|% 文書ファイル
% 文書クラスパッケージ
@require: stdjabook
% SATySFi標準パッケージ
@require: annot
@require: code
@require: math
@require: itemize
% Satyrographosパッケージ
@require: fss/fss
@require: fss/fonts
@require: fss/style
% プロジェクト内パッケージ
@import: local
document (|
title = {表題};
author = {名前};
show-title = true;
show-toc = false;
|) '<
+p {
このテンプレートは\SATySFi; 0.0.5用であり、
\SATySFi;はいまだ開発段階にあるので、
\font-style[bold]{破壊的変更に注意すべし}。
}
+p {
オンライン
\listing{
* \href(``){`demo.saty`} is a brief introduction to \SATySFi;.
* Please join \href(`-Wiki#satsysfi-slack`){\emph{SATySFi Slack}}!
* \SATySFi;本体に付属している\href(``){デモファイル}も参考にすべし。
}%
}
+p {
`+p { ... }`は段落を表す。
細かく言えば、`+p`は行内テキスト`{ ... }`を引数として取る段落コマンドである。
}
+p {
行内数式は数式オブジェクト`${ ... }`で表される。例:${x^2 - x + 1}。
}
+p {
基本的な数式コマンドは\LaTeX;のものに似ている。例:${f: A \to \mathbb{R}}。
}
+p {
数式コマンドや\LaTeX;のコマンドとは異なり、行内コマンドや段落コマンドは終端文字`;`を要する。但し、最後の引数が行内テキスト`{ ... }`か段落テキスト`< ... >`である場合を除く。例:\emph{emph}、\code(`code`);。
}
+p({
テキストコマンドの各引数は括弧で囲まれる。
例:\emph{abc}、\emph({abc});。
});
+p {
別行立て数式は`\eqn`に数式オブジェクトを適用することで得られる。例:
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha.
});%
同様に別行立てコード例は`\d-code`で得られる。
\d-code(```
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha
});
```);%
}
+p {
`\math-list`コマンドは数式オブジェクトの排列を一つ引数として取る。
\math-list[
${\delta_{ij} = \cases![
(${1}, {${i = j}});
(${0}, {otherwise});
]};
${\epsilon_{a_{1}a_{2}\cdots a_{n}} =
\lower{\prod}{1\leq i\leq j\leq n}
\mathop{\mathrm{sgn}}\paren{a_{j} - a_{i}}
};
];%
`\align`コマンドは数式オブジェクトの排列の排列を一つ引数として取る。
\align[
[ ${\pi};
${=\paren{
\frac{2\sqrt{2}}{99^{2}}\upper{\lower{\sum}{n=0}}{\infty}
\frac{
\factorial{\paren{4n}}
\paren{1103 + 26390n}
}{
\paren{4^{n} 99^{n} \factorial{n}}^{4}
}
}^{-1}
};
];
[ ${};
${=\paren{
\int_{-\infty}^{\infty}
e^{
-x^2
}
\mathrm{d}x
}^{ 2 }
};
];
];%
}
+section{節} <
+p {
節は
\code(`+section{節題} < 段落コマンド... >`);.
の形式で表される。
}
+subsection{項} <
+p {
`+subsection`コマンドもある。
}
>
>
+section{パッケージ} <
+p {
`@require`指令を用いることで、\SATySFi;標準パッケージやSatyrographosパッケージを読み込むことができる。
}
+code (`
@require: math
`);
+p {
`@import`指令は現在のファイルからの相対パスに存在するパッケージを読み込む。
}
+code (`
% この指令は local.satyh ファイルを読み込む
@import: ./local
`);
>
>
|}
let satyristes_template =
"Satyristes",
{|(lang "0.0.3")
(doc
(name "main")
(build ((make)))
(dependencies
(;; Standard library
dist
;; Third-party library
fss
)))
|}
let readme_template =
"README.md",
{|# @@library@@
素敵な文書
## 処理方法
`satyrographos build`コマンドを走らせること。
|}
let files = [
main_saty_template;
local_satyh_template;
satyristes_template;
Template_docMake_en.gitignore_template;
Template_docMake_en.makefile_template;
readme_template;
]
let template =
name, ("Document with Makefile (ja)", files)
| null | https://raw.githubusercontent.com/na4zagin3/satyrographos/9dbccf05138510c977a67c859bbbb48755470c7f/src/template/template_docMake_ja.ml | ocaml |
SPDX - License - Identifier : CC0 - 1.0
SPDX-License-Identifier: CC0-1.0
*)
let name = "[experimental]doc-make@ja"
let local_satyh_template =
"local.satyh",
{|% プロジェクト用函数・コマンド定義用ファイル
@require: code
@require: math
let-block ctx +frame content =
let pads = (10pt, 10pt, 10pt, 10pt) in
let decoset = VDecoSet.simple-frame-stroke 1pt (Color.gray 0.75) in
block-frame-breakable ctx pads decoset (fun ctx -> read-block ctx content)
let-block ctx +display-boxes content code =
read-block (ctx |> set-paragraph-margin 12pt 0pt) '<+frame(content);>
+++ read-block (ctx |> set-paragraph-margin 0pt 12pt) '<+code(code);>
% 数式コマンドの定義
let-math \factorial x =
${#x \mathpunct{\mathrm-token!(`!`)}}
|}
let main_saty_template =
"main.saty",
{|% 文書ファイル
% 文書クラスパッケージ
@require: stdjabook
% SATySFi標準パッケージ
@require: annot
@require: code
@require: math
@require: itemize
% Satyrographosパッケージ
@require: fss/fss
@require: fss/fonts
@require: fss/style
% プロジェクト内パッケージ
@import: local
document (|
title = {表題};
author = {名前};
show-title = true;
show-toc = false;
|) '<
+p {
このテンプレートは\SATySFi; 0.0.5用であり、
\SATySFi;はいまだ開発段階にあるので、
\font-style[bold]{破壊的変更に注意すべし}。
}
+p {
オンライン
\listing{
* \href(``){`demo.saty`} is a brief introduction to \SATySFi;.
* Please join \href(`-Wiki#satsysfi-slack`){\emph{SATySFi Slack}}!
* \SATySFi;本体に付属している\href(``){デモファイル}も参考にすべし。
}%
}
+p {
`+p { ... }`は段落を表す。
細かく言えば、`+p`は行内テキスト`{ ... }`を引数として取る段落コマンドである。
}
+p {
行内数式は数式オブジェクト`${ ... }`で表される。例:${x^2 - x + 1}。
}
+p {
基本的な数式コマンドは\LaTeX;のものに似ている。例:${f: A \to \mathbb{R}}。
}
+p {
数式コマンドや\LaTeX;のコマンドとは異なり、行内コマンドや段落コマンドは終端文字`;`を要する。但し、最後の引数が行内テキスト`{ ... }`か段落テキスト`< ... >`である場合を除く。例:\emph{emph}、\code(`code`);。
}
+p({
テキストコマンドの各引数は括弧で囲まれる。
例:\emph{abc}、\emph({abc});。
});
+p {
別行立て数式は`\eqn`に数式オブジェクトを適用することで得られる。例:
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha.
});%
同様に別行立てコード例は`\d-code`で得られる。
\d-code(```
\eqn(${
\int_{M} d\alpha = \int_{\partial M}\alpha
});
```);%
}
+p {
`\math-list`コマンドは数式オブジェクトの排列を一つ引数として取る。
\math-list[
${\delta_{ij} = \cases![
(${1}, {${i = j}});
(${0}, {otherwise});
]};
${\epsilon_{a_{1}a_{2}\cdots a_{n}} =
\lower{\prod}{1\leq i\leq j\leq n}
\mathop{\mathrm{sgn}}\paren{a_{j} - a_{i}}
};
];%
`\align`コマンドは数式オブジェクトの排列の排列を一つ引数として取る。
\align[
[ ${\pi};
${=\paren{
\frac{2\sqrt{2}}{99^{2}}\upper{\lower{\sum}{n=0}}{\infty}
\frac{
\factorial{\paren{4n}}
\paren{1103 + 26390n}
}{
\paren{4^{n} 99^{n} \factorial{n}}^{4}
}
}^{-1}
};
];
[ ${};
${=\paren{
\int_{-\infty}^{\infty}
e^{
-x^2
}
\mathrm{d}x
}^{ 2 }
};
];
];%
}
+section{節} <
+p {
節は
\code(`+section{節題} < 段落コマンド... >`);.
の形式で表される。
}
+subsection{項} <
+p {
`+subsection`コマンドもある。
}
>
>
+section{パッケージ} <
+p {
`@require`指令を用いることで、\SATySFi;標準パッケージやSatyrographosパッケージを読み込むことができる。
}
+code (`
@require: math
`);
+p {
`@import`指令は現在のファイルからの相対パスに存在するパッケージを読み込む。
}
+code (`
% この指令は local.satyh ファイルを読み込む
@import: ./local
`);
>
>
|}
let satyristes_template =
"Satyristes",
{|(lang "0.0.3")
(doc
(name "main")
(build ((make)))
(dependencies
(;; Standard library
dist
;; Third-party library
fss
)))
|}
let readme_template =
"README.md",
{|# @@library@@
素敵な文書
## 処理方法
`satyrographos build`コマンドを走らせること。
|}
let files = [
main_saty_template;
local_satyh_template;
satyristes_template;
Template_docMake_en.gitignore_template;
Template_docMake_en.makefile_template;
readme_template;
]
let template =
name, ("Document with Makefile (ja)", files)
| |
89f1b61d3b502f76cc051f3bdacfd1dffbafd3daf43aa6ed0d744b223d98faf8 | mu-chaco/ReWire | X.hs | module Mods.C.X (x, y, X(..), Y, Y(YB)) where
data X = XA | XB
data Y = YA | YB
x :: X
x = XA
y :: Y
y = YA
| null | https://raw.githubusercontent.com/mu-chaco/ReWire/a8dcea6ab0989474988a758179a1d876e2c32370/tests/regression/Mods/C/X.hs | haskell | module Mods.C.X (x, y, X(..), Y, Y(YB)) where
data X = XA | XB
data Y = YA | YB
x :: X
x = XA
y :: Y
y = YA
| |
1d66d021b720380410e50a50b775c1cd933a6e44306e586a39c0c54b4c44263f | skrah/minicaml | Shared.mli |
* Copyright ( c ) 2015 . All rights reserved .
*
* This file is distributed under the terms of the Q Public License
* version 1.0 .
* Copyright (c) 2015 Stefan Krah. All rights reserved.
*
* This file is distributed under the terms of the Q Public License
* version 1.0.
*)
val internal_error : string -> 'a
val error : Location.t -> string -> 'a
type level = int
type rec_flag = Nonrecursive | Recursive
type mutable_flag = Immutable | Mutable
type value_kind = Vardec | Parameter | Loopvar | External | Ignore
type param_kind = Param_tuple | Param_curried
type binary_operator =
Op_plus
| Op_minus
| Op_times
| Op_divide
| Op_plusdot
| Op_minusdot
| Op_timesdot
| Op_dividedot
| Op_eq
| Op_eqeq
| Op_ne
| Op_lt
| Op_le
| Op_gt
| Op_ge
| Op_and
| Op_or
type assign_operator = Op_assign_arrow | Op_assign_ref
val var_kind_repr : value_kind -> string
val op_repr : binary_operator -> string
val assign_op_repr : assign_operator -> string
val is_boolop : binary_operator -> bool
val is_intop : binary_operator -> bool
val is_floatop : binary_operator -> bool
val is_cmpop : binary_operator -> bool
val is_eqop : binary_operator -> bool
| null | https://raw.githubusercontent.com/skrah/minicaml/e5f5cad7fdbcfc11561f717042fae73fa743823f/Shared.mli | ocaml |
* Copyright ( c ) 2015 . All rights reserved .
*
* This file is distributed under the terms of the Q Public License
* version 1.0 .
* Copyright (c) 2015 Stefan Krah. All rights reserved.
*
* This file is distributed under the terms of the Q Public License
* version 1.0.
*)
val internal_error : string -> 'a
val error : Location.t -> string -> 'a
type level = int
type rec_flag = Nonrecursive | Recursive
type mutable_flag = Immutable | Mutable
type value_kind = Vardec | Parameter | Loopvar | External | Ignore
type param_kind = Param_tuple | Param_curried
type binary_operator =
Op_plus
| Op_minus
| Op_times
| Op_divide
| Op_plusdot
| Op_minusdot
| Op_timesdot
| Op_dividedot
| Op_eq
| Op_eqeq
| Op_ne
| Op_lt
| Op_le
| Op_gt
| Op_ge
| Op_and
| Op_or
type assign_operator = Op_assign_arrow | Op_assign_ref
val var_kind_repr : value_kind -> string
val op_repr : binary_operator -> string
val assign_op_repr : assign_operator -> string
val is_boolop : binary_operator -> bool
val is_intop : binary_operator -> bool
val is_floatop : binary_operator -> bool
val is_cmpop : binary_operator -> bool
val is_eqop : binary_operator -> bool
| |
ed4b0aa2e36c2ea7df70f3f826fdd616e8f41d8e3f33e255f067511fc6cc7635 | NorfairKing/smos | Gen.hs | # OPTIONS_GHC -fno - warn - orphans #
module Smos.GitHub.Issue.Gen where
import Data.GenValidity
import Data.GenValidity.Text ()
import GitHub
import Smos.GitHub.Issue
instance GenValid GitHubUrl
instance GenValid IssueNumber
instance GenValid (Name a)
| null | https://raw.githubusercontent.com/NorfairKing/smos/3b7021c22915ae16ae721c7da60d715e24f4e6bb/smos-github/test/Smos/GitHub/Issue/Gen.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Smos.GitHub.Issue.Gen where
import Data.GenValidity
import Data.GenValidity.Text ()
import GitHub
import Smos.GitHub.Issue
instance GenValid GitHubUrl
instance GenValid IssueNumber
instance GenValid (Name a)
| |
433c0144a613ffb6693da436026cf7ca325ca111c8420146b6d155d826bbb654 | crategus/cl-cffi-gtk | gtk.scrollable.lisp | ;;; ----------------------------------------------------------------------------
gtk.scrollable.lisp
;;;
The documentation of this file is taken from the GTK+ 3 Reference Manual
Version 3.24 and modified to document the Lisp binding to the GTK+ library .
;;; See <>. The API documentation of the Lisp binding is
available from < -cffi-gtk/ > .
;;;
Copyright ( C ) 2012 - 2021
;;;
;;; This program is free software: you can redistribute it and/or modify
;;; it under the terms of the GNU Lesser General Public License for Lisp
as published by the Free Software Foundation , either version 3 of the
;;; License, or (at your option) any later version and with a preamble to
the GNU Lesser General Public License that clarifies the terms for use
;;; with Lisp programs and is referred as the LLGPL.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
;;;
You should have received a copy of the GNU Lesser General Public
License along with this program and the preamble to the Gnu Lesser
;;; General Public License. If not, see </>
;;; and <>.
;;; ----------------------------------------------------------------------------
;;;
GtkScrollable
;;;
;;; An interface for scrollable widgets
;;;
;;; Types and Values
;;;
GtkScrollable
;;; GtkScrollablePolicy
;;;
;;; Functions
;;;
;;; gtk_scrollable_get_hadjustment Accessor
;;; gtk_scrollable_set_hadjustment Accessor
;;; gtk_scrollable_get_vadjustment Accessor
gtk_scrollable_set_vadjustment Accessor
;;;
;;; gtk_scrollable_get_hscroll_policy Accessor
;;; gtk_scrollable_set_hscroll_policy Accessor
;;; gtk_scrollable_get_vscroll_policy Accessor
;;; gtk_scrollable_set_vscroll_policy Accessor
;;;
;;; gtk_scrollable_get_border
;;;
;;; Properties
;;;
;;; GtkAdjustment* hadjustment Read / Write / Construct
GtkScrollablePolicy - policy Read / Write
GtkAdjustment * vadjustment Read / Write / Construct
;;; GtkScrollablePolicy vscroll-policy Read / Write
;;;
;;;Object Hierarchy
;;;
;;; GInterface
╰ ─ ─ GtkScrollable
;;; ----------------------------------------------------------------------------
(in-package :gtk)
;;; ----------------------------------------------------------------------------
;;; enum GtkScrollablePolicy
;;; ----------------------------------------------------------------------------
(define-g-enum "GtkScrollablePolicy" gtk-scrollable-policy
(:export t
:type-initializer "gtk_scrollable_policy_get_type")
(:minimum 0)
(:natural 1))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-policy atdoc:*symbol-name-alias*) "Enum"
(gethash 'gtk-scrollable-policy atdoc:*external-symbols*)
"@version{2021-3-19}
@begin{short}
Defines the policy to be used in a scrollable widget when updating the
scrolled window adjustments in a given orientation.
@end{short}
@begin{pre}
(define-g-enum \"GtkScrollablePolicy\" gtk-scrollable-policy
(:export t
:type-initializer \"gtk_scrollable_policy_get_type\")
(:minimum 0)
(:natural 1))
@end{pre}
@begin[code]{table}
@entry[:minimum]{Scrollable adjustments are based on the minimum size.}
@entry[:natural]{Scrollable adjustments are based on the natural size.}
@end{table}
@see-class{gtk-scrollable}")
;;; ----------------------------------------------------------------------------
GtkScrollable
;;; ----------------------------------------------------------------------------
(define-g-interface "GtkScrollable" gtk-scrollable
(:export t
:type-initializer "gtk_scrollable_get_type")
(hadjustment
gtk-scrollable-hadjustment
"hadjustment" "GtkAdjustment" t t)
(hscroll-policy
gtk-scrollable-hscroll-policy
"hscroll-policy" "GtkScrollablePolicy" t t)
(vadjustment
gtk-scrollable-vadjustment
"vadjustment" "GtkAdjustment" t t)
(vscroll-policy
gtk-scrollable-vscroll-policy
"vscroll-policy" "GtkScrollablePolicy" t t))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable atdoc:*class-name-alias*) "Interface"
(documentation 'gtk-scrollable 'type)
"@version{2021-3-19}
@begin{short}
The @sym{gtk-scrollable} interface is an interface that is implemented by
widgets with native scrolling ability.
@end{short}
To implement this interface you should override the @code{hadjustment}
and @code{vadjustment} properties.
@subheading{Creating a scrollable widget}
All scrollable widgets should do the following.
@begin{itemize}
@begin{item}
When a parent widget sets the scrollable child widget’s adjustments,
the widget should populate the adjustments’ @slot[gtk-adjustment]{lower},
@slot[gtk-adjustment]{upper}, @slot[gtk-adjustment]{step-increment},
@slot[gtk-adjustment]{page-increment} and @slot[gtk-adjustment]{page-size}
properties and connect to the \"value-changed\" signal.
@end{item}
@begin{item}
Because its preferred size is the size for a fully expanded widget, the
scrollable widget must be able to cope with underallocations. This means
that it must accept any value passed to its
@code{GtkWidgetClass.size_allocate()} function.
@end{item}
@begin{item}
When the parent allocates space to the scrollable child widget, the
widget should update the adjustments’ properties with new values.
@end{item}
@begin{item}
When any of the adjustments emits the \"value-changed\" signal, the
scrollable widget should scroll its contents.
@end{item}
@end{itemize}
@see-slot{gtk-scrollable-hadjustment}
@see-slot{gtk-scrollable-hscroll-policy}
@see-slot{gtk-scrollable-vadjustment}
@see-slot{gtk-scrollable-vscroll-policy}
@see-class{gtk-scrollbar}
@see-class{gtk-scrolled-window}")
;;; ----------------------------------------------------------------------------
;;; Property and Accessor Details
;;; ----------------------------------------------------------------------------
;;; --- gtk-scrollable-hadjustment ---------------------------------------------
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "hadjustment"
'gtk-scrollable) 't)
"The @code{hadjustment} property of type @class{gtk-adjustment}
(Read / Write / Construct) @br{}
Horizontal adjustment of the scrollable widget. This adjustment is shared
between the scrollable widget and its parent.")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-hadjustment atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-hadjustment 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-hadjustment object) => hadjustment}
@syntax[]{(setf (gtk-scrollable-hadjustment object) hadjustment)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[hadjustment]{a @class{gtk-adjustment} object}
@begin{short}
Accessor of the @slot[gtk-scrollable]{hadjustment} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollabe-hadjustment} retrieves the
adjustment used for horizontal scrolling. The slot access function
@sym{(setf gtk-scrollabe-hadjustment)} sets the horizontal adjustment.
@see-class{gtk-scrollabe}
@see-class{gtk-adjustment}
@see-function{gtk-scrollable-vadjustment}")
;;; --- gtk-scrollable-hscroll-policy ------------------------------------------
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "hscroll-policy"
'gtk-scrollable) 't)
"The @code{hscroll-policy} property of type @symbol{gtk-scrollable-policy}
(Read / Write) @br{}
Determines whether horizontal scrolling should start once the scrollable
widget is allocated less than its minimum width or less than its natural
width. @br{}
Default value: @code{:minimum}")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-hscroll-policy atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-hscroll-policy 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-hscroll-policy object) => policy}
@syntax[]{(setf (gtk-scrollable-hscroll-policy object) policy)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[policy]{a @symbol{gtk-scrollable-policy} value for the horizontal
scrolling policy}
@begin{short}
Accessor of the @slot[gtk-scrollable]{hscroll-policy} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-hscroll-policy} gets the
horizontal scrolling policy which determines whether horizontal scrolling
should start below the minimum width or below the natural width. The slot
access function @sym{(setf gtk-scrollable-hscroll-policy)} sets the
horizontal scrolling policy.
@see-class{gtk-scrollable}
@see-symbol{gtk-scrollable-policy}
@see-function{gtk-scrollable-vscroll-policy}")
;;; --- gtk-scrollabe-vadjustment ----------------------------------------------
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "vadjustment"
'gtk-scrollable) 't)
"The @code{vadjustment} property of type @class{gtk-adjustment}
(Read / Write / Construct) @br{}
Vertical adjustment of the scrollable widget. This adjustment is shared
between the scrollable widget and its parent.")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-vadjustment atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-vadjustment 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-vadjustment object) => vadjustment}
@syntax[]{(setf (gtk-scrollable-vadjustment object) vadjustment)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[vadjustment]{a @class{gtk-adjustment} object}
@begin{short}
Accessor of the @slot[gtk-scrollable]{vadjustment} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-vadjustment} retrieves the
adjustment used for vertical scrolling. The slot access function
@sym{(setf gtk-scrollable-vadjustment)} sets the vertical adjustment.
@see-class{gtk-scrollable}
@see-class{gtk-adjustment}
@see-function{gtk-scrollable-hadjustment}")
;;; --- gtk-scrollable-vscroll-policy ------------------------------------------
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "vscroll-policy"
'gtk-scrollable) 't)
"The @code{vscroll-policy} property of type @symbol{gtk-scrollable-policy}
(Read / Write) @br{}
Determines whether vertical scrolling should start once the scrollable
widget is allocated less than its minimum height or less than its natural
height. @br{}
Default value: @code{:minimum}")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-vscroll-policy atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-vscroll-policy 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-vscroll-policy object) => policy}
@syntax[]{(setf (gtk-scrollable-vscroll-policy object) policy)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[policy]{a @symbol{gtk-scrollable-policy} value for the vertical
scrolling policy}
@begin{short}
Accessor of the @slot[gtk-scrollable]{vscroll-policy} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-vscroll-policy} gets the
vertical scrolling policy which determines whether vertical scrolling
should start below the minimum height or below the natural height. The slot
access function @sym{(setf gtk-scrollable-hscroll-policy)} sets the vertical
scrolling policy.
@see-class{gtk-scrollable}
@see-symbol{gtk-scrollable-policy}
@see-function{gtk-scrollable-hscroll-policy}")
;;; ----------------------------------------------------------------------------
;;; gtk_scrollable_get_border () -> gtk-scrollable-border
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_scrollable_get_border" %gtk-scrollable-border) :boolean
(scrollable (g-object gtk-scrollable))
(border (g-boxed-foreign gtk-border)))
(defun gtk-scrollable-border (scrollable)
"@version{2021-3-19}
@argument[scrollable]{a @class{gtk-scrollable} widget}
@return{A @class{gtk-border} instance.}
@begin{short}
Returns the size of a non-scrolling border around the outside of the
scrollable.
@end{short}
An example for this would be tree view headers. GTK+ can use this information
to display overlayed graphics, like the overshoot indication, at the right
position.
@see-class{gtk-scrollable}
@see-class{gtk-border}"
(let ((border (gtk-border-new)))
(%gtk-scrollable-border scrollable border)
border))
(export 'gtk-scrollable-border)
--- End of file gtk.scrollable.lisp ----------------------------------------
| null | https://raw.githubusercontent.com/crategus/cl-cffi-gtk/27bdcefb703e7ae144f506929f1935468b6987ad/gtk/gtk.scrollable.lisp | lisp | ----------------------------------------------------------------------------
See <>. The API documentation of the Lisp binding is
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License for Lisp
License, or (at your option) any later version and with a preamble to
with Lisp programs and is referred as the LLGPL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
General Public License. If not, see </>
and <>.
----------------------------------------------------------------------------
An interface for scrollable widgets
Types and Values
GtkScrollablePolicy
Functions
gtk_scrollable_get_hadjustment Accessor
gtk_scrollable_set_hadjustment Accessor
gtk_scrollable_get_vadjustment Accessor
gtk_scrollable_get_hscroll_policy Accessor
gtk_scrollable_set_hscroll_policy Accessor
gtk_scrollable_get_vscroll_policy Accessor
gtk_scrollable_set_vscroll_policy Accessor
gtk_scrollable_get_border
Properties
GtkAdjustment* hadjustment Read / Write / Construct
GtkScrollablePolicy vscroll-policy Read / Write
Object Hierarchy
GInterface
----------------------------------------------------------------------------
----------------------------------------------------------------------------
enum GtkScrollablePolicy
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Property and Accessor Details
----------------------------------------------------------------------------
--- gtk-scrollable-hadjustment ---------------------------------------------
--- gtk-scrollable-hscroll-policy ------------------------------------------
--- gtk-scrollabe-vadjustment ----------------------------------------------
--- gtk-scrollable-vscroll-policy ------------------------------------------
----------------------------------------------------------------------------
gtk_scrollable_get_border () -> gtk-scrollable-border
---------------------------------------------------------------------------- | gtk.scrollable.lisp
The documentation of this file is taken from the GTK+ 3 Reference Manual
Version 3.24 and modified to document the Lisp binding to the GTK+ library .
available from < -cffi-gtk/ > .
Copyright ( C ) 2012 - 2021
as published by the Free Software Foundation , either version 3 of the
the GNU Lesser General Public License that clarifies the terms for use
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this program and the preamble to the Gnu Lesser
GtkScrollable
GtkScrollable
gtk_scrollable_set_vadjustment Accessor
GtkScrollablePolicy - policy Read / Write
GtkAdjustment * vadjustment Read / Write / Construct
╰ ─ ─ GtkScrollable
(in-package :gtk)
(define-g-enum "GtkScrollablePolicy" gtk-scrollable-policy
(:export t
:type-initializer "gtk_scrollable_policy_get_type")
(:minimum 0)
(:natural 1))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-policy atdoc:*symbol-name-alias*) "Enum"
(gethash 'gtk-scrollable-policy atdoc:*external-symbols*)
"@version{2021-3-19}
@begin{short}
Defines the policy to be used in a scrollable widget when updating the
scrolled window adjustments in a given orientation.
@end{short}
@begin{pre}
(define-g-enum \"GtkScrollablePolicy\" gtk-scrollable-policy
(:export t
:type-initializer \"gtk_scrollable_policy_get_type\")
(:minimum 0)
(:natural 1))
@end{pre}
@begin[code]{table}
@entry[:minimum]{Scrollable adjustments are based on the minimum size.}
@entry[:natural]{Scrollable adjustments are based on the natural size.}
@end{table}
@see-class{gtk-scrollable}")
GtkScrollable
(define-g-interface "GtkScrollable" gtk-scrollable
(:export t
:type-initializer "gtk_scrollable_get_type")
(hadjustment
gtk-scrollable-hadjustment
"hadjustment" "GtkAdjustment" t t)
(hscroll-policy
gtk-scrollable-hscroll-policy
"hscroll-policy" "GtkScrollablePolicy" t t)
(vadjustment
gtk-scrollable-vadjustment
"vadjustment" "GtkAdjustment" t t)
(vscroll-policy
gtk-scrollable-vscroll-policy
"vscroll-policy" "GtkScrollablePolicy" t t))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable atdoc:*class-name-alias*) "Interface"
(documentation 'gtk-scrollable 'type)
"@version{2021-3-19}
@begin{short}
The @sym{gtk-scrollable} interface is an interface that is implemented by
widgets with native scrolling ability.
@end{short}
To implement this interface you should override the @code{hadjustment}
and @code{vadjustment} properties.
@subheading{Creating a scrollable widget}
All scrollable widgets should do the following.
@begin{itemize}
@begin{item}
When a parent widget sets the scrollable child widget’s adjustments,
the widget should populate the adjustments’ @slot[gtk-adjustment]{lower},
@slot[gtk-adjustment]{upper}, @slot[gtk-adjustment]{step-increment},
@slot[gtk-adjustment]{page-increment} and @slot[gtk-adjustment]{page-size}
properties and connect to the \"value-changed\" signal.
@end{item}
@begin{item}
Because its preferred size is the size for a fully expanded widget, the
scrollable widget must be able to cope with underallocations. This means
that it must accept any value passed to its
@code{GtkWidgetClass.size_allocate()} function.
@end{item}
@begin{item}
When the parent allocates space to the scrollable child widget, the
widget should update the adjustments’ properties with new values.
@end{item}
@begin{item}
When any of the adjustments emits the \"value-changed\" signal, the
scrollable widget should scroll its contents.
@end{item}
@end{itemize}
@see-slot{gtk-scrollable-hadjustment}
@see-slot{gtk-scrollable-hscroll-policy}
@see-slot{gtk-scrollable-vadjustment}
@see-slot{gtk-scrollable-vscroll-policy}
@see-class{gtk-scrollbar}
@see-class{gtk-scrolled-window}")
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "hadjustment"
'gtk-scrollable) 't)
"The @code{hadjustment} property of type @class{gtk-adjustment}
(Read / Write / Construct) @br{}
Horizontal adjustment of the scrollable widget. This adjustment is shared
between the scrollable widget and its parent.")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-hadjustment atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-hadjustment 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-hadjustment object) => hadjustment}
@syntax[]{(setf (gtk-scrollable-hadjustment object) hadjustment)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[hadjustment]{a @class{gtk-adjustment} object}
@begin{short}
Accessor of the @slot[gtk-scrollable]{hadjustment} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollabe-hadjustment} retrieves the
adjustment used for horizontal scrolling. The slot access function
@sym{(setf gtk-scrollabe-hadjustment)} sets the horizontal adjustment.
@see-class{gtk-scrollabe}
@see-class{gtk-adjustment}
@see-function{gtk-scrollable-vadjustment}")
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "hscroll-policy"
'gtk-scrollable) 't)
"The @code{hscroll-policy} property of type @symbol{gtk-scrollable-policy}
(Read / Write) @br{}
Determines whether horizontal scrolling should start once the scrollable
widget is allocated less than its minimum width or less than its natural
width. @br{}
Default value: @code{:minimum}")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-hscroll-policy atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-hscroll-policy 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-hscroll-policy object) => policy}
@syntax[]{(setf (gtk-scrollable-hscroll-policy object) policy)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[policy]{a @symbol{gtk-scrollable-policy} value for the horizontal
scrolling policy}
@begin{short}
Accessor of the @slot[gtk-scrollable]{hscroll-policy} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-hscroll-policy} gets the
horizontal scrolling policy which determines whether horizontal scrolling
should start below the minimum width or below the natural width. The slot
access function @sym{(setf gtk-scrollable-hscroll-policy)} sets the
horizontal scrolling policy.
@see-class{gtk-scrollable}
@see-symbol{gtk-scrollable-policy}
@see-function{gtk-scrollable-vscroll-policy}")
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "vadjustment"
'gtk-scrollable) 't)
"The @code{vadjustment} property of type @class{gtk-adjustment}
(Read / Write / Construct) @br{}
Vertical adjustment of the scrollable widget. This adjustment is shared
between the scrollable widget and its parent.")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-vadjustment atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-vadjustment 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-vadjustment object) => vadjustment}
@syntax[]{(setf (gtk-scrollable-vadjustment object) vadjustment)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[vadjustment]{a @class{gtk-adjustment} object}
@begin{short}
Accessor of the @slot[gtk-scrollable]{vadjustment} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-vadjustment} retrieves the
adjustment used for vertical scrolling. The slot access function
@sym{(setf gtk-scrollable-vadjustment)} sets the vertical adjustment.
@see-class{gtk-scrollable}
@see-class{gtk-adjustment}
@see-function{gtk-scrollable-hadjustment}")
#+cl-cffi-gtk-documentation
(setf (documentation (atdoc:get-slot-from-name "vscroll-policy"
'gtk-scrollable) 't)
"The @code{vscroll-policy} property of type @symbol{gtk-scrollable-policy}
(Read / Write) @br{}
Determines whether vertical scrolling should start once the scrollable
widget is allocated less than its minimum height or less than its natural
height. @br{}
Default value: @code{:minimum}")
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-scrollable-vscroll-policy atdoc:*function-name-alias*)
"Accessor"
(documentation 'gtk-scrollable-vscroll-policy 'function)
"@version{2021-3-19}
@syntax[]{(gtk-scrollable-vscroll-policy object) => policy}
@syntax[]{(setf (gtk-scrollable-vscroll-policy object) policy)}
@argument[object]{a @class{gtk-scrollable} widget}
@argument[policy]{a @symbol{gtk-scrollable-policy} value for the vertical
scrolling policy}
@begin{short}
Accessor of the @slot[gtk-scrollable]{vscroll-policy} slot of the
@class{gtk-scrollable} class.
@end{short}
The slot access function @sym{gtk-scrollable-vscroll-policy} gets the
vertical scrolling policy which determines whether vertical scrolling
should start below the minimum height or below the natural height. The slot
access function @sym{(setf gtk-scrollable-hscroll-policy)} sets the vertical
scrolling policy.
@see-class{gtk-scrollable}
@see-symbol{gtk-scrollable-policy}
@see-function{gtk-scrollable-hscroll-policy}")
(defcfun ("gtk_scrollable_get_border" %gtk-scrollable-border) :boolean
(scrollable (g-object gtk-scrollable))
(border (g-boxed-foreign gtk-border)))
(defun gtk-scrollable-border (scrollable)
"@version{2021-3-19}
@argument[scrollable]{a @class{gtk-scrollable} widget}
@return{A @class{gtk-border} instance.}
@begin{short}
Returns the size of a non-scrolling border around the outside of the
scrollable.
@end{short}
An example for this would be tree view headers. GTK+ can use this information
to display overlayed graphics, like the overshoot indication, at the right
position.
@see-class{gtk-scrollable}
@see-class{gtk-border}"
(let ((border (gtk-border-new)))
(%gtk-scrollable-border scrollable border)
border))
(export 'gtk-scrollable-border)
--- End of file gtk.scrollable.lisp ----------------------------------------
|
bb6482c74a569a63d814226a015b36fb8f2d59097e7fcf50f13c1a38c166266b | jyh/metaprl | sil_state_model.ml |
* Model the state as a dependant record .
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL , a modular , higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages .
*
* See the file doc / htmlman / default.html or visit /
* for more information .
*
* Copyright ( C ) 1999 , Cornell University
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author :
*
* Model the state as a dependant record.
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL, a modular, higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages.
*
* See the file doc/htmlman/default.html or visit /
* for more information.
*
* Copyright (C) 1999 Jason Hickey, Cornell University
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey
*
*)
extends Itt_theory
extends Sil_state
open Dtactic
open Itt_struct
(************************************************************************
* SYNTAX *
************************************************************************)
(*
* Type of labels.
* We carry alnong the type, but we don't use it right now.
*)
declare label_type
declare eq_label{'l1; 'l2}
(*
* Declarations are syntax for constructing state types.
*)
declare empty_decl
declare store_decl{'decl; 'l; 'v}
declare alloc_decl{'decl; 'l; 'v}
(*
* Well-orderings.
*)
declare order_type
declare discrete_order
declare next_order{'order; 'l}
declare order_apply{'order; 'a; 'b}
(************************************************************************
* DISPLAY FORMS *
************************************************************************)
prec prec_next_order
dform label_type_df : label_type =
`"Label"
dform eq_label_df : eq_label{'l1; 'l2} =
slot{'l1} `" =l " slot{'l2}
dform empty_decl_df : empty_decl =
`"[]"
dform store_decl_df : store_decl{'decl; 'l; 'v} =
slot{'decl} `"[" slot{'l} `" = " slot{'v} `"]"
dform alloc_decl_df : alloc_decl{'decl; 'l; 'v} =
slot{'decl} `"+[" slot{'l} `" = " slot{'v} `"]"
dform order_type_df : order_type =
`"OrderType"
dform discrete_order_df : discrete_order =
`"discrete"
dform next_order_df : parens :: "prec"[prec_next_order] :: next_order{'order; 'l} =
`"next(" slot{'order} `" < " slot{'l} `")"
dform order_apply_df : order_apply{'order; 'l1; 'l2} =
`"order_apply(" slot{'order} `", " slot{'l1} `", " slot{'l2} `")"
(************************************************************************
* DEFINITIONS *
************************************************************************)
(*
* We'll model labels as lists, because we'll otherwise run into trouble with wf.
*)
prim_rw unfold_label_type : label_type <--> list{unit}
prim_rw unfold_first : first <--> nil
prim_rw unfold_next : next{'l} <--> cons{it; 'l}
(*
* Comparsions.
*)
prim_rw unfold_eq_label : eq_label{'e1; 'e2} <-->
(list_ind{'e1; lambda{e2. list_ind{'e2; btrue; u, v, g. bfalse}};
u1, v1, g1. lambda{e2. list_ind{'e2; bfalse; u2, v2, g2. 'g1 'v2}}} 'e2)
prim_rw unfold_if_eq_label : if_eq_label{'e1; 'e2; 'e3; 'e4} <-->
ifthenelse{eq_label{'e1; 'e2}; 'e3; 'e4}
interactive_rw reduce_eq_label1 : eq_label{first; first} <--> btrue
interactive_rw reduce_eq_label2 : eq_label{next{'l1}; first} <--> bfalse
interactive_rw reduce_eq_label3 : eq_label{first; next{'l1}} <--> bfalse
interactive_rw reduce_eq_label4 : eq_label{next{'l1}; next{'l2}} <--> eq_label{'l1; 'l2}
let reduce_info =
[<< eq_label{first; first} >>, reduce_eq_label1;
<< eq_label{next{'l1}; first} >>, reduce_eq_label2;
<< eq_label{first; next{'l1}} >>, reduce_eq_label3;
<< eq_label{next{'l1}; next{'l2}} >>, reduce_eq_label4]
let reduce_resource = Top_conversionals.add_reduce_info reduce_resource reduce_info
interactive_rw reduce_if_eq_label1 : if_eq_label{first; first; 'e1; 'e2} <--> 'e1
interactive_rw reduce_if_eq_label2 : if_eq_label{next{'l1}; first; 'e1; 'e2} <--> 'e2
interactive_rw reduce_if_eq_label3 : if_eq_label{first; next{'l1}; 'e1; 'e2} <--> 'e2
interactive_rw reduce_if_eq_label4 : if_eq_label{next{'l1}; next{'l2}; 'e1; 'e2} <--> if_eq_label{'l1; 'l2; 'e1; 'e2}
* State is a record construction , with an allocation set as its first
* element .
* State is a record construction, with an allocation set as its first
* element.
*)
prim_rw unfold_empty : empty <--> lambda{l. next{first}}
prim_rw unfold_fetch : fetch{'s; 'l} <--> ('s 'l)
prim_rw unfold_store : store{'s; 'l1; 'v1} <-->
lambda{l2. ifthenelse{eq_label{'l2; 'l1}; 'v1; .'s 'l2}}
prim_rw unfold_alloc : alloc{'s; 'v1; s2, l. 'e['s2; 'l]} <-->
(lambda{l1. 'e[store{store{'s; first; 'l1}; 'l1; 'v1}; 'l1]} next{fetch{'s; first}})
(*
* Fetch operation.
*)
interactive_rw reduce_fetch : fetch{store{'s; 'l1; 'v}; 'l2} <--> ifthenelse{eq_label{'l2; 'l1}; 'v; fetch{'s; 'l2}}
(*
* Declarations are type functions with a well-ordering on the domain.
*)
prim_rw unfold_order_type : order_type <--> (label_type -> label_type -> univ[1:l])
prim_rw unfold_order_apply : order_apply{'order; 'a; 'b} <--> ('order 'a 'b)
prim_rw unfold_discrete_order : discrete_order <--> lambda{l1. lambda{l2. void}}
prim_rw unfold_next_order : next_order{'order; 'l} <-->
lambda{l1. lambda{l2. ifthenelse{eq_label{'l1; 'l}; void; ifthenelse{eq_label{'l2; 'l}; unit; order_apply{'order; 'l1; 'l2}}}}}
prim_rw unfold_empty_decl : empty_decl <--> (discrete_order, lambda{l. lambda{s. ifthenelse{eq_label{'l; first}; label_type; top}}})
prim_rw unfold_store_decl : store_decl{'decl; 'l; 'v} <-->
spread{'decl; order, f. ('order, lambda{l2. ifthenelse{eq_label{'l2; 'l}; 'v; .'f 'l2}})}
prim_rw unfold_alloc_decl : alloc_decl{'decl; 'l; 'v} <-->
spread{'decl; order, f. (next_order{'order; 'l}, lambda{l2. ifthenelse{eq_label{'l2; 'l}; 'v; .'f 'l2}})}
(************************************************************************
* DECLARATION RULES *
************************************************************************)
(*
* Labels are in their type.
*)
interactive label_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; label_type} }
interactive label_type_type {| intro [] |} :
sequent { <H> >- "type"{label_type} }
interactive first_member {| intro [] |} :
sequent { <H> >- member{label_type; first} }
interactive next_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{label_type; next{'l}} }
interactive label_elim {| elim [ThinOption thinT] |} 'H :
[main] sequent { <H>; l: label_type; <J['l]> >- 'C[first] } -->
[main] sequent { <H>; l: label_type; <J['l]>; v: label_type; w: 'C['v] >- 'C[next{'v}] } -->
sequent { <H>; l: label_type; <J['l]> >- 'C['l] }
interactive eq_label_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- member{bool; eq_label{'l1; 'l2}} }
interactive label_eq_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
[wf] sequent { <H>; v: "assert"{eq_label{'l1; 'l2}} >- member{'T; 'e1} } -->
[wf] sequent { <H>; v: "assert"{bnot{eq_label{'l1; 'l2}}} >- member{'T; 'e2} } -->
sequent { <H> >- member{'T; if_eq_label{'l1; 'l2; 'e1; 'e2}} }
* .
* Orderings.
*)
interactive order_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; order_type} }
interactive order_type_type {| intro [] |} :
sequent { <H> >- "type"{order_type} }
interactive order_apply_member {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- member{univ[i:l]; order_apply{'order; 'l1; 'l2}} }
interactive order_apply_type {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- "type"{order_apply{'order; 'l1; 'l2}} }
(*
* Discrete order is well-formed, and well-founded.
*)
interactive discrete_order_wf {| intro [] |} :
sequent { <H> >- member{order_type; discrete_order} }
interactive discrete_order_well_founded {| intro [] |} :
sequent { <H> >- well_founded{label_type; l1, l2. discrete_order 'l1 'l2} }
(*
* Next order is well-formed and well-founded.
*)
interactive next_order_wf {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{order_type; next_order{'order; 'l}} }
interactive next_order_anti_ref {| intro [] |} 'a :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H>; a: label_type >- not{order_apply{'order; 'a; 'a}} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- not{order_apply{next{'order; 'l1}; 'l2; 'l2}} }
* -*-
* Local Variables :
* Caml - master : " nl "
* End :
* -*-
* -*-
* Local Variables:
* Caml-master: "nl"
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/sil/sil_state_model.ml | ocaml | ***********************************************************************
* SYNTAX *
***********************************************************************
* Type of labels.
* We carry alnong the type, but we don't use it right now.
* Declarations are syntax for constructing state types.
* Well-orderings.
***********************************************************************
* DISPLAY FORMS *
***********************************************************************
***********************************************************************
* DEFINITIONS *
***********************************************************************
* We'll model labels as lists, because we'll otherwise run into trouble with wf.
* Comparsions.
* Fetch operation.
* Declarations are type functions with a well-ordering on the domain.
***********************************************************************
* DECLARATION RULES *
***********************************************************************
* Labels are in their type.
* Discrete order is well-formed, and well-founded.
* Next order is well-formed and well-founded.
|
* Model the state as a dependant record .
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL , a modular , higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages .
*
* See the file doc / htmlman / default.html or visit /
* for more information .
*
* Copyright ( C ) 1999 , Cornell University
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author :
*
* Model the state as a dependant record.
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL, a modular, higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages.
*
* See the file doc/htmlman/default.html or visit /
* for more information.
*
* Copyright (C) 1999 Jason Hickey, Cornell University
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey
*
*)
extends Itt_theory
extends Sil_state
open Dtactic
open Itt_struct
declare label_type
declare eq_label{'l1; 'l2}
declare empty_decl
declare store_decl{'decl; 'l; 'v}
declare alloc_decl{'decl; 'l; 'v}
declare order_type
declare discrete_order
declare next_order{'order; 'l}
declare order_apply{'order; 'a; 'b}
prec prec_next_order
dform label_type_df : label_type =
`"Label"
dform eq_label_df : eq_label{'l1; 'l2} =
slot{'l1} `" =l " slot{'l2}
dform empty_decl_df : empty_decl =
`"[]"
dform store_decl_df : store_decl{'decl; 'l; 'v} =
slot{'decl} `"[" slot{'l} `" = " slot{'v} `"]"
dform alloc_decl_df : alloc_decl{'decl; 'l; 'v} =
slot{'decl} `"+[" slot{'l} `" = " slot{'v} `"]"
dform order_type_df : order_type =
`"OrderType"
dform discrete_order_df : discrete_order =
`"discrete"
dform next_order_df : parens :: "prec"[prec_next_order] :: next_order{'order; 'l} =
`"next(" slot{'order} `" < " slot{'l} `")"
dform order_apply_df : order_apply{'order; 'l1; 'l2} =
`"order_apply(" slot{'order} `", " slot{'l1} `", " slot{'l2} `")"
prim_rw unfold_label_type : label_type <--> list{unit}
prim_rw unfold_first : first <--> nil
prim_rw unfold_next : next{'l} <--> cons{it; 'l}
prim_rw unfold_eq_label : eq_label{'e1; 'e2} <-->
(list_ind{'e1; lambda{e2. list_ind{'e2; btrue; u, v, g. bfalse}};
u1, v1, g1. lambda{e2. list_ind{'e2; bfalse; u2, v2, g2. 'g1 'v2}}} 'e2)
prim_rw unfold_if_eq_label : if_eq_label{'e1; 'e2; 'e3; 'e4} <-->
ifthenelse{eq_label{'e1; 'e2}; 'e3; 'e4}
interactive_rw reduce_eq_label1 : eq_label{first; first} <--> btrue
interactive_rw reduce_eq_label2 : eq_label{next{'l1}; first} <--> bfalse
interactive_rw reduce_eq_label3 : eq_label{first; next{'l1}} <--> bfalse
interactive_rw reduce_eq_label4 : eq_label{next{'l1}; next{'l2}} <--> eq_label{'l1; 'l2}
let reduce_info =
[<< eq_label{first; first} >>, reduce_eq_label1;
<< eq_label{next{'l1}; first} >>, reduce_eq_label2;
<< eq_label{first; next{'l1}} >>, reduce_eq_label3;
<< eq_label{next{'l1}; next{'l2}} >>, reduce_eq_label4]
let reduce_resource = Top_conversionals.add_reduce_info reduce_resource reduce_info
interactive_rw reduce_if_eq_label1 : if_eq_label{first; first; 'e1; 'e2} <--> 'e1
interactive_rw reduce_if_eq_label2 : if_eq_label{next{'l1}; first; 'e1; 'e2} <--> 'e2
interactive_rw reduce_if_eq_label3 : if_eq_label{first; next{'l1}; 'e1; 'e2} <--> 'e2
interactive_rw reduce_if_eq_label4 : if_eq_label{next{'l1}; next{'l2}; 'e1; 'e2} <--> if_eq_label{'l1; 'l2; 'e1; 'e2}
* State is a record construction , with an allocation set as its first
* element .
* State is a record construction, with an allocation set as its first
* element.
*)
prim_rw unfold_empty : empty <--> lambda{l. next{first}}
prim_rw unfold_fetch : fetch{'s; 'l} <--> ('s 'l)
prim_rw unfold_store : store{'s; 'l1; 'v1} <-->
lambda{l2. ifthenelse{eq_label{'l2; 'l1}; 'v1; .'s 'l2}}
prim_rw unfold_alloc : alloc{'s; 'v1; s2, l. 'e['s2; 'l]} <-->
(lambda{l1. 'e[store{store{'s; first; 'l1}; 'l1; 'v1}; 'l1]} next{fetch{'s; first}})
interactive_rw reduce_fetch : fetch{store{'s; 'l1; 'v}; 'l2} <--> ifthenelse{eq_label{'l2; 'l1}; 'v; fetch{'s; 'l2}}
prim_rw unfold_order_type : order_type <--> (label_type -> label_type -> univ[1:l])
prim_rw unfold_order_apply : order_apply{'order; 'a; 'b} <--> ('order 'a 'b)
prim_rw unfold_discrete_order : discrete_order <--> lambda{l1. lambda{l2. void}}
prim_rw unfold_next_order : next_order{'order; 'l} <-->
lambda{l1. lambda{l2. ifthenelse{eq_label{'l1; 'l}; void; ifthenelse{eq_label{'l2; 'l}; unit; order_apply{'order; 'l1; 'l2}}}}}
prim_rw unfold_empty_decl : empty_decl <--> (discrete_order, lambda{l. lambda{s. ifthenelse{eq_label{'l; first}; label_type; top}}})
prim_rw unfold_store_decl : store_decl{'decl; 'l; 'v} <-->
spread{'decl; order, f. ('order, lambda{l2. ifthenelse{eq_label{'l2; 'l}; 'v; .'f 'l2}})}
prim_rw unfold_alloc_decl : alloc_decl{'decl; 'l; 'v} <-->
spread{'decl; order, f. (next_order{'order; 'l}, lambda{l2. ifthenelse{eq_label{'l2; 'l}; 'v; .'f 'l2}})}
interactive label_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; label_type} }
interactive label_type_type {| intro [] |} :
sequent { <H> >- "type"{label_type} }
interactive first_member {| intro [] |} :
sequent { <H> >- member{label_type; first} }
interactive next_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{label_type; next{'l}} }
interactive label_elim {| elim [ThinOption thinT] |} 'H :
[main] sequent { <H>; l: label_type; <J['l]> >- 'C[first] } -->
[main] sequent { <H>; l: label_type; <J['l]>; v: label_type; w: 'C['v] >- 'C[next{'v}] } -->
sequent { <H>; l: label_type; <J['l]> >- 'C['l] }
interactive eq_label_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- member{bool; eq_label{'l1; 'l2}} }
interactive label_eq_member {| intro [] |} :
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
[wf] sequent { <H>; v: "assert"{eq_label{'l1; 'l2}} >- member{'T; 'e1} } -->
[wf] sequent { <H>; v: "assert"{bnot{eq_label{'l1; 'l2}}} >- member{'T; 'e2} } -->
sequent { <H> >- member{'T; if_eq_label{'l1; 'l2; 'e1; 'e2}} }
* .
* Orderings.
*)
interactive order_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; order_type} }
interactive order_type_type {| intro [] |} :
sequent { <H> >- "type"{order_type} }
interactive order_apply_member {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- member{univ[i:l]; order_apply{'order; 'l1; 'l2}} }
interactive order_apply_type {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- "type"{order_apply{'order; 'l1; 'l2}} }
interactive discrete_order_wf {| intro [] |} :
sequent { <H> >- member{order_type; discrete_order} }
interactive discrete_order_well_founded {| intro [] |} :
sequent { <H> >- well_founded{label_type; l1, l2. discrete_order 'l1 'l2} }
interactive next_order_wf {| intro [] |} :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{order_type; next_order{'order; 'l}} }
interactive next_order_anti_ref {| intro [] |} 'a :
[wf] sequent { <H> >- member{order_type; 'order} } -->
[wf] sequent { <H>; a: label_type >- not{order_apply{'order; 'a; 'a}} } -->
[wf] sequent { <H> >- member{label_type; 'l1} } -->
[wf] sequent { <H> >- member{label_type; 'l2} } -->
sequent { <H> >- not{order_apply{next{'order; 'l1}; 'l2; 'l2}} }
* -*-
* Local Variables :
* Caml - master : " nl "
* End :
* -*-
* -*-
* Local Variables:
* Caml-master: "nl"
* End:
* -*-
*)
|
e4dd174fc2a5ad57777c0a8d6afa3abca61437aa7c56bc5c2f6e56bd24610ee5 | parsimony-ide/parsimony | server.clj | (ns parsimony.server
(:require [com.stuartsierra.component :as component]
[environ.core :refer [env]]
[fipp.edn :refer [pprint]]
[parsimony.log :as log]
[parsimony.server.handler :refer [all-routes]]
[parsimony.server.algo :refer [new-algo]]
[parsimony.server.compiler :refer [new-compiler]]
[parsimony.server.sente-handler :refer [sente-handler-fn]]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[schema.core :as s]
[system.components.endpoint :refer [new-endpoint]]
[system.components.middleware :refer [new-middleware]]
[system.components.handler :refer [new-handler]]
[system.components.immutant-web :refer [new-web-server]]
[system.components.sente :refer [new-channel-socket-server]]
[taoensso.sente.server-adapters.immutant :refer [sente-web-server-adapter]]))
(defn int-or-nil
"Convert a String in base 10 format to an Integer if possible, otherwise return nil"
[x]
(when (and x (string? x))
(try
(Integer/parseInt x)
(catch NumberFormatException _
nil))))
(defn int-env [x]
(int-or-nil (env x)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
System
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/defschema config-schema
{:port s/Int
:db-path s/Str})
(def default-config
{:port (or (int-env :parsimony-port) 9254)
:db-path (or (env :parsimony-db-path) "rdag.db")})
(defn system
([]
(system {}))
([config]
(let [config (merge default-config config)
startup-msg (str "Starting system with options: " config)]
(log/info startup-msg)
(s/validate config-schema config)
(component/system-map
:routes (component/using
(new-endpoint all-routes)
[:algo :sente])
:middleware (new-middleware {:middleware [[wrap-defaults api-defaults]]})
:handler (component/using
(new-handler)
[:routes :middleware])
:sente (component/using
(new-channel-socket-server sente-handler-fn sente-web-server-adapter {:wrap-component? true})
[:algo :compiler])
:http (component/using
(new-web-server (:port config))
[:handler])
:algo (new-algo config)
:compiler (new-compiler config)))))
| null | https://raw.githubusercontent.com/parsimony-ide/parsimony/1744e8b4a921a50dfbd0815499cf3af1059590c8/src/parsimony/server.clj | clojure | (ns parsimony.server
(:require [com.stuartsierra.component :as component]
[environ.core :refer [env]]
[fipp.edn :refer [pprint]]
[parsimony.log :as log]
[parsimony.server.handler :refer [all-routes]]
[parsimony.server.algo :refer [new-algo]]
[parsimony.server.compiler :refer [new-compiler]]
[parsimony.server.sente-handler :refer [sente-handler-fn]]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[schema.core :as s]
[system.components.endpoint :refer [new-endpoint]]
[system.components.middleware :refer [new-middleware]]
[system.components.handler :refer [new-handler]]
[system.components.immutant-web :refer [new-web-server]]
[system.components.sente :refer [new-channel-socket-server]]
[taoensso.sente.server-adapters.immutant :refer [sente-web-server-adapter]]))
(defn int-or-nil
"Convert a String in base 10 format to an Integer if possible, otherwise return nil"
[x]
(when (and x (string? x))
(try
(Integer/parseInt x)
(catch NumberFormatException _
nil))))
(defn int-env [x]
(int-or-nil (env x)))
System
(s/defschema config-schema
{:port s/Int
:db-path s/Str})
(def default-config
{:port (or (int-env :parsimony-port) 9254)
:db-path (or (env :parsimony-db-path) "rdag.db")})
(defn system
([]
(system {}))
([config]
(let [config (merge default-config config)
startup-msg (str "Starting system with options: " config)]
(log/info startup-msg)
(s/validate config-schema config)
(component/system-map
:routes (component/using
(new-endpoint all-routes)
[:algo :sente])
:middleware (new-middleware {:middleware [[wrap-defaults api-defaults]]})
:handler (component/using
(new-handler)
[:routes :middleware])
:sente (component/using
(new-channel-socket-server sente-handler-fn sente-web-server-adapter {:wrap-component? true})
[:algo :compiler])
:http (component/using
(new-web-server (:port config))
[:handler])
:algo (new-algo config)
:compiler (new-compiler config)))))
| |
c0423f253ceb557c49e8e037e552cf4ec180549a2ba3a8a9accfad65315a7a92 | nikodemus/SBCL | alloc.lisp | ;;;; allocating simple objects
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!VM")
;;;; Signed and unsigned bignums from word-sized integers. Argument
;;;; and return in the same register. No VOPs, as these are only used
;;;; as out-of-line versions: MOVE-FROM-[UN]SIGNED VOPs handle the
;;;; fixnum cases inline.
# + SB - ASSEMBLING as we do n't need , just the asm routines :
;;; these are out-of-line versions called by VOPs.
#+sb-assembling
(macrolet
((def (reg)
`(define-assembly-routine (,(symbolicate "ALLOC-SIGNED-BIGNUM-IN-" reg))
((:temp number unsigned-reg ,(symbolicate reg "-OFFSET")))
(inst push number)
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 1))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
#+sb-assembling
(macrolet
((def (reg)
`(define-assembly-routine (,(symbolicate "ALLOC-UNSIGNED-BIGNUM-IN-" reg))
((:temp number unsigned-reg ,(symbolicate reg "-OFFSET")))
(inst push number)
(inst jmp :ns one-word-bignum)
Two word bignum
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 2))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret)
ONE-WORD-BIGNUM
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 1))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
#+sb-assembling
(macrolet
((def (reg)
(declare (ignorable reg))
#!+sb-thread
(let* ((name (intern (format nil "ALLOC-TLS-INDEX-IN-~A" reg)))
(target-offset (intern (format nil "~A-OFFSET" reg)))
(other-offset (if (eql 'rax reg)
'rcx-offset
'rax-offset)))
Symbol starts in TARGET , where the TLS - INDEX ends up in .
`(define-assembly-routine ,name
((:temp other descriptor-reg ,other-offset)
(:temp target descriptor-reg ,target-offset))
(let ((get-tls-index-lock (gen-label))
(release-tls-index-lock (gen-label)))
(pseudo-atomic
Save OTHER & push the symbol . RAX is either one of the two .
(inst push other)
(inst push target)
(emit-label get-tls-index-lock)
(let ((not-rax ,(if (eql 'rax reg) 'other 'target)))
(inst mov not-rax 1)
(zeroize rax-tn)
(inst cmpxchg (make-ea-for-symbol-value *tls-index-lock*)
not-rax :lock)
(inst jmp :ne get-tls-index-lock))
;; The symbol is now in OTHER.
(inst pop other)
;; Now with the lock held, see if the symbol's tls index has been
;; set in the meantime.
(loadw target other symbol-tls-index-slot other-pointer-lowtag)
(inst test target target)
(inst jmp :ne release-tls-index-lock)
;; Allocate a new tls-index.
(load-symbol-value target *free-tls-index*)
(let ((not-error (gen-label))
(error (generate-error-code nil 'tls-exhausted-error)))
(inst cmp target (ash tls-size word-shift))
(inst jmp :l not-error)
(%clear-pseudo-atomic)
(inst jmp error)
(emit-label not-error))
(inst add (make-ea-for-symbol-value *free-tls-index*)
n-word-bytes)
(storew target other symbol-tls-index-slot other-pointer-lowtag)
(emit-label release-tls-index-lock)
;; No need for barriers on x86/x86-64 on unlock.
(store-symbol-value 0 *tls-index-lock*)
;; Restore OTHER.
(inst pop other))
(inst ret))))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
| null | https://raw.githubusercontent.com/nikodemus/SBCL/3c11847d1e12db89b24a7887b18a137c45ed4661/src/assembly/x86-64/alloc.lisp | lisp | allocating simple objects
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Signed and unsigned bignums from word-sized integers. Argument
and return in the same register. No VOPs, as these are only used
as out-of-line versions: MOVE-FROM-[UN]SIGNED VOPs handle the
fixnum cases inline.
these are out-of-line versions called by VOPs.
The symbol is now in OTHER.
Now with the lock held, see if the symbol's tls index has been
set in the meantime.
Allocate a new tls-index.
No need for barriers on x86/x86-64 on unlock.
Restore OTHER. |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!VM")
# + SB - ASSEMBLING as we do n't need , just the asm routines :
#+sb-assembling
(macrolet
((def (reg)
`(define-assembly-routine (,(symbolicate "ALLOC-SIGNED-BIGNUM-IN-" reg))
((:temp number unsigned-reg ,(symbolicate reg "-OFFSET")))
(inst push number)
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 1))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
#+sb-assembling
(macrolet
((def (reg)
`(define-assembly-routine (,(symbolicate "ALLOC-UNSIGNED-BIGNUM-IN-" reg))
((:temp number unsigned-reg ,(symbolicate reg "-OFFSET")))
(inst push number)
(inst jmp :ns one-word-bignum)
Two word bignum
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 2))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret)
ONE-WORD-BIGNUM
(with-fixed-allocation (number bignum-widetag (+ bignum-digits-offset 1))
(popw number bignum-digits-offset other-pointer-lowtag))
(inst ret))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
#+sb-assembling
(macrolet
((def (reg)
(declare (ignorable reg))
#!+sb-thread
(let* ((name (intern (format nil "ALLOC-TLS-INDEX-IN-~A" reg)))
(target-offset (intern (format nil "~A-OFFSET" reg)))
(other-offset (if (eql 'rax reg)
'rcx-offset
'rax-offset)))
Symbol starts in TARGET , where the TLS - INDEX ends up in .
`(define-assembly-routine ,name
((:temp other descriptor-reg ,other-offset)
(:temp target descriptor-reg ,target-offset))
(let ((get-tls-index-lock (gen-label))
(release-tls-index-lock (gen-label)))
(pseudo-atomic
Save OTHER & push the symbol . RAX is either one of the two .
(inst push other)
(inst push target)
(emit-label get-tls-index-lock)
(let ((not-rax ,(if (eql 'rax reg) 'other 'target)))
(inst mov not-rax 1)
(zeroize rax-tn)
(inst cmpxchg (make-ea-for-symbol-value *tls-index-lock*)
not-rax :lock)
(inst jmp :ne get-tls-index-lock))
(inst pop other)
(loadw target other symbol-tls-index-slot other-pointer-lowtag)
(inst test target target)
(inst jmp :ne release-tls-index-lock)
(load-symbol-value target *free-tls-index*)
(let ((not-error (gen-label))
(error (generate-error-code nil 'tls-exhausted-error)))
(inst cmp target (ash tls-size word-shift))
(inst jmp :l not-error)
(%clear-pseudo-atomic)
(inst jmp error)
(emit-label not-error))
(inst add (make-ea-for-symbol-value *free-tls-index*)
n-word-bytes)
(storew target other symbol-tls-index-slot other-pointer-lowtag)
(emit-label release-tls-index-lock)
(store-symbol-value 0 *tls-index-lock*)
(inst pop other))
(inst ret))))))
(def rax)
(def rcx)
(def rdx)
(def rbx)
(def rsi)
(def rdi)
(def r8)
(def r9)
(def r10)
(def r12)
(def r13)
(def r14)
(def r15))
|
6e163639797e913f94a73ae4941b601163f6b3c1c7123077a884d2d719bdedb0 | nuscr/nuscr | codegen.mli | module Fstar = Fstarcodegen
module Go = Gocodegen
module Ocaml = Ocamlcodegen
| null | https://raw.githubusercontent.com/nuscr/nuscr/c268be6f40d689f0834a2c04de6bcc23d1307aea/lib/codegen/codegen.mli | ocaml | module Fstar = Fstarcodegen
module Go = Gocodegen
module Ocaml = Ocamlcodegen
| |
ddb2d279dad7640e21d9b0db511073edbc89e7f71bb8824d1bd62d9adb1ef94f | thheller/shadow-cljs | b.cljs | (ns demo.esm.b)
(def bar (str :bar "demo.esm.b/bar"))
| null | https://raw.githubusercontent.com/thheller/shadow-cljs/1a76102e05d9ec51115585fdb66087ee5fd7a3d9/src/dev/demo/esm/b.cljs | clojure | (ns demo.esm.b)
(def bar (str :bar "demo.esm.b/bar"))
| |
27066f18d0dd5560456441b135b82368ca48715c98a870e3ec22be5d7a4efe6e | herd/herdtools7 | Interpreter.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2015 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
(* Authors: *)
, University College London , UK .
(****************************************************************************)
open AST
let fatal = Error.fatal
module type S = sig
module B : Backend.S
type body = B.value list -> B.value list B.m
type primitive = (body, type_desc) func_skeleton
val run : t -> primitive list -> B.value list B.m
end
module Make (B : Backend.S) = struct
module B = B
module IMap = ASTUtils.IMap
module ISet = ASTUtils.ISet
type 'a m = 'a B.m
type body = B.value list -> B.value list m
type primitive = (body, type_desc) func_skeleton
let ( let* ) = B.bind_data
let ( and* ) = B.prod
let return = B.return
let prod_map f =
let one acc elt =
let* v = f elt and* li = acc in
return (v :: li)
in
fun li ->
let* li = List.fold_left one (return []) li in
return (List.rev li)
let list_index f =
let rec aux i = function
| [] -> None
| h :: t -> if f h then Some i else aux (i + 1) t
in
aux 0
(*****************************************************************************)
(* *)
(* Records handling *)
(* *)
(*****************************************************************************)
let make_record ty fields =
let ty_fields =
match ty with
| T_Record ty_fields -> ASTUtils.canonical_fields ty_fields
| _ -> assert false
in
let fields = ASTUtils.canonical_fields fields in
let values = List.map snd fields in
let eq_field (x, _) (y, _) = String.equal x y in
if
List.compare_lengths ty_fields fields == 0
&& List.for_all2 eq_field ty_fields fields
then B.create_vector ty values
else fatal @@ Error.BadFields (List.map fst fields, ty)
let record_index_of_field x li =
match list_index (fun (y, _) -> String.equal x y) li with
| Some i -> i
| None -> fatal @@ Error.BadField (x, T_Record li)
(*****************************************************************************)
(* *)
(* Global constants environment *)
(* *)
(*****************************************************************************)
type func = Func of int ref * AST.func | Primitive of primitive
type genv = { consts : value IMap.t; funcs : func IMap.t }
type lenv = B.value IMap.t
type env = genv * lenv
let add_primitives primitives funcs =
let one_primitive primitive = (primitive.name, Primitive primitive) in
primitives |> List.to_seq |> Seq.map one_primitive
|> Fun.flip IMap.add_seq funcs
(*****************************************************************************)
(* *)
(* Type annotations handling *)
(* *)
(*****************************************************************************)
let type_of_ta = function
| TA_None -> fatal Error.TypeInferenceNeeded
| TA_InferredStructure ty -> ty
let type_annotation ast sfuncs =
let add_fake_primitives =
let fake_funcs =
let one_sfunc { name; args; return_type; _ } =
let one_arg i ty = ("arg" ^ string_of_int i, ty) in
let args = List.mapi one_arg args in
D_Func { name; args; body = S_Pass; return_type }
in
List.map one_sfunc sfuncs
in
List.rev_append fake_funcs
in
let remove_fake_primitives =
let primitive_names =
let one_sfunc { name; _ } = name in
sfuncs |> List.to_seq |> Seq.map one_sfunc |> ASTUtils.ISet.of_seq
in
let is_primitive = function
| D_Func AST.{ name; _ } -> not (ASTUtils.ISet.mem name primitive_names)
| _ -> true
in
List.filter is_primitive
in
ast |> add_fake_primitives |> Typing.annotate_ast |> remove_fake_primitives
(*****************************************************************************)
(* *)
(* Construction of the initial env *)
(* *)
(*****************************************************************************)
let build_enums (ast : t) globals =
let build_one (counter, globals) name =
let globals = IMap.add name (V_Int counter) globals in
(counter + 1, globals)
in
let build_decl acc = function
| D_TypeDecl (_name, T_Enum ids) -> List.fold_left build_one acc ids
| _ -> acc
in
let _, genv = List.fold_left build_decl (0, globals) ast in
genv
type build_status = NotYetEvaluated of expr | AlreadyEvaluated of value
(* build every constant and make an global env *)
let build_consts (ast : t) globals =
In the following , acc is the current status of evaluation , i.e. it maps
every global variable to either its build_status , that is its value if
it has been evaluated , or its expression otherwise . This is why we have
to use it every time we could use a variable .
every global variable to either its build_status, that is its value if
it has been evaluated, or its expression otherwise. This is why we have
to use it every time we could use a variable. *)
let acc =
let one_decl = function
| D_GlobalConst (name, _ty, e) -> Some (name, NotYetEvaluated e)
| _ -> None
in
let add_decls =
ast |> List.to_seq |> Seq.filter_map one_decl |> IMap.add_seq
in
let one_glob v = AlreadyEvaluated v in
globals |> IMap.map one_glob |> add_decls |> ref
in
let rec env_lookup name =
match IMap.find_opt name !acc with
| Some (AlreadyEvaluated v) -> v
| Some (NotYetEvaluated e) ->
let v = eval_expr e in
acc := IMap.add name (AlreadyEvaluated v) !acc;
v
| None -> fatal @@ Error.UndefinedIdentifier name
and eval_expr e = StaticInterpreter.static_eval env_lookup e in
let one_decl = function
| D_GlobalConst (name, _, _) -> Some (name, env_lookup name)
| _ -> None
in
ast |> List.to_seq |> Seq.filter_map one_decl |> IMap.of_seq
let build_funcs ast funcs =
List.to_seq ast
|> Seq.filter_map (function
| D_Func func -> Some (func.name, Func (ref 0, func))
| _ -> None)
|> Fun.flip IMap.add_seq funcs
(*****************************************************************************)
(* *)
(* Main interpretation functions *)
(* *)
(*****************************************************************************)
type eval_res = Returning of B.value list | Continuing of lenv
let continue ((_genv, lenv) : env) = return (Continuing lenv)
let one_return_value name = function
| [ v ] -> return v
| _ -> fatal @@ Error.MismatchedReturnValue name
let lexpr_is_var = function LE_Var _ | LE_Ignore -> true | _ -> false
let rec eval_expr (env : env) scope is_data =
let genv, lenv = env in
function
| E_Literal v -> B.v_of_parsed_v v |> return
| E_Var x -> (
match IMap.find_opt x genv.consts with
| Some v -> B.v_of_parsed_v v |> return
| None -> (
match IMap.find_opt x lenv with
| Some v ->
let* () = B.on_read_identifier x scope v in
return v
| None -> fatal @@ Error.UndefinedIdentifier x))
| E_Binop (op, e1, e2) ->
let* v1 = eval_expr env scope is_data e1
and* v2 = eval_expr env scope is_data e2 in
B.binop op v1 v2
| E_Unop (op, e) ->
let* v = eval_expr env scope is_data e in
B.unop op v
| E_Cond (e1, e2, e3) ->
let eval_ = eval_expr env scope is_data in
B.choice (eval_ e1) (eval_ e2) (eval_ e3)
| E_Slice (e, slices) ->
let positions = eval_slices env slices in
let* v = eval_expr env scope is_data e in
B.read_from_bitvector positions v
| E_Call (name, args) ->
let vargs = List.map (eval_expr env scope is_data) args in
let* returned = eval_func genv name vargs in
one_return_value name returned
| E_Record (_, li, ta) ->
let one_field (x, e) =
let* v = eval_expr env scope is_data e in
return (x, v)
in
let* fields = prod_map one_field li in
make_record (type_of_ta ta) fields
| E_GetField (e, x, ta) -> (
match type_of_ta ta with
| T_Record li ->
let i = record_index_of_field x li in
let* vec = eval_expr env scope is_data e in
B.get_i i vec
| T_Bits (_, Some fields) as ty -> (
match List.find_opt (fun (_, y) -> String.equal x y) fields with
| Some (slices, _) ->
eval_expr env scope is_data (E_Slice (e, slices))
| None -> fatal @@ Error.BadField (x, ty))
| ty -> fatal @@ Error.BadField (x, ty))
| E_Concat es ->
let* values = prod_map (eval_expr env scope is_data) es in
B.concat_bitvectors values
| E_Tuple _ -> fatal @@ Error.NotYetImplemented "tuple construction"
and eval_slices (genv, _lenv) =
let si_env s =
match IMap.find_opt s genv.consts with
| Some v -> v
| None -> fatal @@ Error.UndefinedIdentifier s
in
StaticInterpreter.slices_to_positions si_env
and eval_lexpr (env : env) scope =
let genv, lenv = env in
function
| LE_Ignore -> fun _ -> return env
| LE_Var x ->
fun v ->
let* v = v in
let* () = B.on_write_identifier x scope v in
let lenv = IMap.add x v lenv in
return (genv, lenv)
| LE_Slice (le, slices) ->
let setter = eval_lexpr env scope le in
let positions = eval_slices env slices in
fun m ->
let* v = m
and* bv =
let e = ASTUtils.expr_of_lexpr le in
eval_expr env scope true e
in
B.write_to_bitvector positions v bv |> setter
| LE_SetField (le, x, ta) -> (
match type_of_ta ta with
| T_Record li ->
let setter = eval_lexpr env scope le in
let i = record_index_of_field x li in
fun m ->
let* new_v = m
and* vec =
let e = ASTUtils.expr_of_lexpr le in
eval_expr env scope true e
in
B.set_i i new_v vec |> setter
| T_Bits (_, Some fields) as ty -> (
match List.find_opt (fun (_, y) -> String.equal x y) fields with
| Some (slices, _) -> eval_lexpr env scope (LE_Slice (le, slices))
| None -> fatal @@ Error.BadField (x, ty))
| ty -> fatal @@ Error.BadField (x, ty))
| LE_TupleUnpack les ->
fun v ->
let* v = v in
let mapper (i, le) =
let setter = eval_lexpr env scope le in
let w = B.get_i i v in
setter w
in
let* envs = prod_map mapper (List.mapi (fun i le -> (i, le)) les) in
let on_conflict _x v1 _v2 = Some v1 in
(* TODO: handle union of genv *)
let folder lenv (_genv, lenv2) = IMap.union on_conflict lenv lenv2 in
let lenv = List.fold_left folder lenv envs in
return (fst env, lenv)
and multi_assign env scope les values =
if List.compare_lengths les values != 0 then
fatal
@@ Error.BadArity
("tuple construction", List.length les, List.length values)
else
let mapper x v =
let x =
match x with LE_Var x -> x | LE_Ignore -> "-" | _ -> assert false
in
let* () = B.on_write_identifier x scope v in
return (x, v)
in
let assignments = List.map2 mapper les values in
let* assignments = prod_map Fun.id assignments in
let add_to_lenv lenv (x, v) = IMap.add x v lenv in
let lenv = List.fold_left add_to_lenv (snd env) assignments in
continue (fst env, lenv)
and eval_stmt (env : env) scope = function
| S_Pass -> continue env
| S_Assign (LE_TupleUnpack les, E_Call (name, args))
when List.for_all lexpr_is_var les ->
let vargs = List.map (eval_expr env scope true) args in
let* returned_values = eval_func (fst env) name vargs in
multi_assign env scope les returned_values
| S_Assign (LE_TupleUnpack les, E_Tuple exprs)
when List.for_all lexpr_is_var les ->
let* values = prod_map (eval_expr env scope true) exprs in
multi_assign env scope les values
| S_Assign (le, e) ->
let v = eval_expr env scope true e
and setter = eval_lexpr env scope le in
let* env = setter v in
continue env
| S_Return (Some (E_Tuple es)) ->
let* vs = prod_map (eval_expr env scope true) es in
return (Returning vs)
| S_Return (Some e) ->
let* v = eval_expr env scope true e in
return (Returning [ v ])
| S_Return None -> return (Returning [])
| S_Then (s1, s2) ->
B.bind_seq (eval_stmt env scope s1) (fun r1 ->
match r1 with
| Continuing lenv -> eval_stmt (fst env, lenv) scope s2
| Returning vs -> return (Returning vs))
| S_Call (name, args) ->
let vargs = List.map (eval_expr env scope true) args in
let* _ = eval_func (fst env) name vargs in
continue env
| S_Cond (e, s1, s2) ->
let* s =
B.choice (eval_expr env scope true e) (return s1) (return s2)
in
eval_stmt env scope s
| S_Case (e, cases) -> ASTUtils.case_to_conds e cases |> eval_stmt env scope
| S_Assert e ->
let v = eval_expr env scope true e in
let* b = B.choice v (return true) (return false) in
if b then continue env else fatal @@ Error.AssertionFailed e
and eval_func (genv : genv) name (args : B.value m list) : B.value list m =
match IMap.find_opt name genv.funcs with
| None -> fatal @@ Error.UndefinedIdentifier name
| Some (Primitive { body; _ }) ->
let* args = prod_map Fun.id args in
body args
| Some (Func (_, { args = arg_decls; _ }))
when List.compare_lengths args arg_decls <> 0 ->
fatal @@ Error.BadArity (name, List.length arg_decls, List.length args)
| Some (Func (r, { args = arg_decls; body; _ })) -> (
let scope = (name, !r) in
let () = r := !r + 1 in
let one_arg (x, _type_desc) v =
let* v = v in
let* () = B.on_write_identifier x scope v in
return (x, v)
in
let* bindings = List.map2 one_arg arg_decls args |> prod_map Fun.id in
let lenv = List.to_seq bindings |> IMap.of_seq in
let* res = eval_stmt (genv, lenv) scope body in
match res with Continuing _ -> return [] | Returning vs -> return vs)
let run (ast : t) primitives : B.value list m =
let ast = type_annotation ast primitives in
let funcs = IMap.empty |> build_funcs ast |> add_primitives primitives in
let consts = IMap.empty |> build_enums ast |> build_consts ast in
eval_func { consts; funcs } "main" []
end
| null | https://raw.githubusercontent.com/herd/herdtools7/daedd7431cb00884afea2ec39749222e2c367c6b/asllib/Interpreter.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
Authors:
**************************************************************************
***************************************************************************
Records handling
***************************************************************************
***************************************************************************
Global constants environment
***************************************************************************
***************************************************************************
Type annotations handling
***************************************************************************
***************************************************************************
Construction of the initial env
***************************************************************************
build every constant and make an global env
***************************************************************************
Main interpretation functions
***************************************************************************
TODO: handle union of genv | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2015 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
, University College London , UK .
open AST
let fatal = Error.fatal
module type S = sig
module B : Backend.S
type body = B.value list -> B.value list B.m
type primitive = (body, type_desc) func_skeleton
val run : t -> primitive list -> B.value list B.m
end
module Make (B : Backend.S) = struct
module B = B
module IMap = ASTUtils.IMap
module ISet = ASTUtils.ISet
type 'a m = 'a B.m
type body = B.value list -> B.value list m
type primitive = (body, type_desc) func_skeleton
let ( let* ) = B.bind_data
let ( and* ) = B.prod
let return = B.return
let prod_map f =
let one acc elt =
let* v = f elt and* li = acc in
return (v :: li)
in
fun li ->
let* li = List.fold_left one (return []) li in
return (List.rev li)
let list_index f =
let rec aux i = function
| [] -> None
| h :: t -> if f h then Some i else aux (i + 1) t
in
aux 0
let make_record ty fields =
let ty_fields =
match ty with
| T_Record ty_fields -> ASTUtils.canonical_fields ty_fields
| _ -> assert false
in
let fields = ASTUtils.canonical_fields fields in
let values = List.map snd fields in
let eq_field (x, _) (y, _) = String.equal x y in
if
List.compare_lengths ty_fields fields == 0
&& List.for_all2 eq_field ty_fields fields
then B.create_vector ty values
else fatal @@ Error.BadFields (List.map fst fields, ty)
let record_index_of_field x li =
match list_index (fun (y, _) -> String.equal x y) li with
| Some i -> i
| None -> fatal @@ Error.BadField (x, T_Record li)
type func = Func of int ref * AST.func | Primitive of primitive
type genv = { consts : value IMap.t; funcs : func IMap.t }
type lenv = B.value IMap.t
type env = genv * lenv
let add_primitives primitives funcs =
let one_primitive primitive = (primitive.name, Primitive primitive) in
primitives |> List.to_seq |> Seq.map one_primitive
|> Fun.flip IMap.add_seq funcs
let type_of_ta = function
| TA_None -> fatal Error.TypeInferenceNeeded
| TA_InferredStructure ty -> ty
let type_annotation ast sfuncs =
let add_fake_primitives =
let fake_funcs =
let one_sfunc { name; args; return_type; _ } =
let one_arg i ty = ("arg" ^ string_of_int i, ty) in
let args = List.mapi one_arg args in
D_Func { name; args; body = S_Pass; return_type }
in
List.map one_sfunc sfuncs
in
List.rev_append fake_funcs
in
let remove_fake_primitives =
let primitive_names =
let one_sfunc { name; _ } = name in
sfuncs |> List.to_seq |> Seq.map one_sfunc |> ASTUtils.ISet.of_seq
in
let is_primitive = function
| D_Func AST.{ name; _ } -> not (ASTUtils.ISet.mem name primitive_names)
| _ -> true
in
List.filter is_primitive
in
ast |> add_fake_primitives |> Typing.annotate_ast |> remove_fake_primitives
let build_enums (ast : t) globals =
let build_one (counter, globals) name =
let globals = IMap.add name (V_Int counter) globals in
(counter + 1, globals)
in
let build_decl acc = function
| D_TypeDecl (_name, T_Enum ids) -> List.fold_left build_one acc ids
| _ -> acc
in
let _, genv = List.fold_left build_decl (0, globals) ast in
genv
type build_status = NotYetEvaluated of expr | AlreadyEvaluated of value
let build_consts (ast : t) globals =
In the following , acc is the current status of evaluation , i.e. it maps
every global variable to either its build_status , that is its value if
it has been evaluated , or its expression otherwise . This is why we have
to use it every time we could use a variable .
every global variable to either its build_status, that is its value if
it has been evaluated, or its expression otherwise. This is why we have
to use it every time we could use a variable. *)
let acc =
let one_decl = function
| D_GlobalConst (name, _ty, e) -> Some (name, NotYetEvaluated e)
| _ -> None
in
let add_decls =
ast |> List.to_seq |> Seq.filter_map one_decl |> IMap.add_seq
in
let one_glob v = AlreadyEvaluated v in
globals |> IMap.map one_glob |> add_decls |> ref
in
let rec env_lookup name =
match IMap.find_opt name !acc with
| Some (AlreadyEvaluated v) -> v
| Some (NotYetEvaluated e) ->
let v = eval_expr e in
acc := IMap.add name (AlreadyEvaluated v) !acc;
v
| None -> fatal @@ Error.UndefinedIdentifier name
and eval_expr e = StaticInterpreter.static_eval env_lookup e in
let one_decl = function
| D_GlobalConst (name, _, _) -> Some (name, env_lookup name)
| _ -> None
in
ast |> List.to_seq |> Seq.filter_map one_decl |> IMap.of_seq
let build_funcs ast funcs =
List.to_seq ast
|> Seq.filter_map (function
| D_Func func -> Some (func.name, Func (ref 0, func))
| _ -> None)
|> Fun.flip IMap.add_seq funcs
type eval_res = Returning of B.value list | Continuing of lenv
let continue ((_genv, lenv) : env) = return (Continuing lenv)
let one_return_value name = function
| [ v ] -> return v
| _ -> fatal @@ Error.MismatchedReturnValue name
let lexpr_is_var = function LE_Var _ | LE_Ignore -> true | _ -> false
let rec eval_expr (env : env) scope is_data =
let genv, lenv = env in
function
| E_Literal v -> B.v_of_parsed_v v |> return
| E_Var x -> (
match IMap.find_opt x genv.consts with
| Some v -> B.v_of_parsed_v v |> return
| None -> (
match IMap.find_opt x lenv with
| Some v ->
let* () = B.on_read_identifier x scope v in
return v
| None -> fatal @@ Error.UndefinedIdentifier x))
| E_Binop (op, e1, e2) ->
let* v1 = eval_expr env scope is_data e1
and* v2 = eval_expr env scope is_data e2 in
B.binop op v1 v2
| E_Unop (op, e) ->
let* v = eval_expr env scope is_data e in
B.unop op v
| E_Cond (e1, e2, e3) ->
let eval_ = eval_expr env scope is_data in
B.choice (eval_ e1) (eval_ e2) (eval_ e3)
| E_Slice (e, slices) ->
let positions = eval_slices env slices in
let* v = eval_expr env scope is_data e in
B.read_from_bitvector positions v
| E_Call (name, args) ->
let vargs = List.map (eval_expr env scope is_data) args in
let* returned = eval_func genv name vargs in
one_return_value name returned
| E_Record (_, li, ta) ->
let one_field (x, e) =
let* v = eval_expr env scope is_data e in
return (x, v)
in
let* fields = prod_map one_field li in
make_record (type_of_ta ta) fields
| E_GetField (e, x, ta) -> (
match type_of_ta ta with
| T_Record li ->
let i = record_index_of_field x li in
let* vec = eval_expr env scope is_data e in
B.get_i i vec
| T_Bits (_, Some fields) as ty -> (
match List.find_opt (fun (_, y) -> String.equal x y) fields with
| Some (slices, _) ->
eval_expr env scope is_data (E_Slice (e, slices))
| None -> fatal @@ Error.BadField (x, ty))
| ty -> fatal @@ Error.BadField (x, ty))
| E_Concat es ->
let* values = prod_map (eval_expr env scope is_data) es in
B.concat_bitvectors values
| E_Tuple _ -> fatal @@ Error.NotYetImplemented "tuple construction"
and eval_slices (genv, _lenv) =
let si_env s =
match IMap.find_opt s genv.consts with
| Some v -> v
| None -> fatal @@ Error.UndefinedIdentifier s
in
StaticInterpreter.slices_to_positions si_env
and eval_lexpr (env : env) scope =
let genv, lenv = env in
function
| LE_Ignore -> fun _ -> return env
| LE_Var x ->
fun v ->
let* v = v in
let* () = B.on_write_identifier x scope v in
let lenv = IMap.add x v lenv in
return (genv, lenv)
| LE_Slice (le, slices) ->
let setter = eval_lexpr env scope le in
let positions = eval_slices env slices in
fun m ->
let* v = m
and* bv =
let e = ASTUtils.expr_of_lexpr le in
eval_expr env scope true e
in
B.write_to_bitvector positions v bv |> setter
| LE_SetField (le, x, ta) -> (
match type_of_ta ta with
| T_Record li ->
let setter = eval_lexpr env scope le in
let i = record_index_of_field x li in
fun m ->
let* new_v = m
and* vec =
let e = ASTUtils.expr_of_lexpr le in
eval_expr env scope true e
in
B.set_i i new_v vec |> setter
| T_Bits (_, Some fields) as ty -> (
match List.find_opt (fun (_, y) -> String.equal x y) fields with
| Some (slices, _) -> eval_lexpr env scope (LE_Slice (le, slices))
| None -> fatal @@ Error.BadField (x, ty))
| ty -> fatal @@ Error.BadField (x, ty))
| LE_TupleUnpack les ->
fun v ->
let* v = v in
let mapper (i, le) =
let setter = eval_lexpr env scope le in
let w = B.get_i i v in
setter w
in
let* envs = prod_map mapper (List.mapi (fun i le -> (i, le)) les) in
let on_conflict _x v1 _v2 = Some v1 in
let folder lenv (_genv, lenv2) = IMap.union on_conflict lenv lenv2 in
let lenv = List.fold_left folder lenv envs in
return (fst env, lenv)
and multi_assign env scope les values =
if List.compare_lengths les values != 0 then
fatal
@@ Error.BadArity
("tuple construction", List.length les, List.length values)
else
let mapper x v =
let x =
match x with LE_Var x -> x | LE_Ignore -> "-" | _ -> assert false
in
let* () = B.on_write_identifier x scope v in
return (x, v)
in
let assignments = List.map2 mapper les values in
let* assignments = prod_map Fun.id assignments in
let add_to_lenv lenv (x, v) = IMap.add x v lenv in
let lenv = List.fold_left add_to_lenv (snd env) assignments in
continue (fst env, lenv)
and eval_stmt (env : env) scope = function
| S_Pass -> continue env
| S_Assign (LE_TupleUnpack les, E_Call (name, args))
when List.for_all lexpr_is_var les ->
let vargs = List.map (eval_expr env scope true) args in
let* returned_values = eval_func (fst env) name vargs in
multi_assign env scope les returned_values
| S_Assign (LE_TupleUnpack les, E_Tuple exprs)
when List.for_all lexpr_is_var les ->
let* values = prod_map (eval_expr env scope true) exprs in
multi_assign env scope les values
| S_Assign (le, e) ->
let v = eval_expr env scope true e
and setter = eval_lexpr env scope le in
let* env = setter v in
continue env
| S_Return (Some (E_Tuple es)) ->
let* vs = prod_map (eval_expr env scope true) es in
return (Returning vs)
| S_Return (Some e) ->
let* v = eval_expr env scope true e in
return (Returning [ v ])
| S_Return None -> return (Returning [])
| S_Then (s1, s2) ->
B.bind_seq (eval_stmt env scope s1) (fun r1 ->
match r1 with
| Continuing lenv -> eval_stmt (fst env, lenv) scope s2
| Returning vs -> return (Returning vs))
| S_Call (name, args) ->
let vargs = List.map (eval_expr env scope true) args in
let* _ = eval_func (fst env) name vargs in
continue env
| S_Cond (e, s1, s2) ->
let* s =
B.choice (eval_expr env scope true e) (return s1) (return s2)
in
eval_stmt env scope s
| S_Case (e, cases) -> ASTUtils.case_to_conds e cases |> eval_stmt env scope
| S_Assert e ->
let v = eval_expr env scope true e in
let* b = B.choice v (return true) (return false) in
if b then continue env else fatal @@ Error.AssertionFailed e
and eval_func (genv : genv) name (args : B.value m list) : B.value list m =
match IMap.find_opt name genv.funcs with
| None -> fatal @@ Error.UndefinedIdentifier name
| Some (Primitive { body; _ }) ->
let* args = prod_map Fun.id args in
body args
| Some (Func (_, { args = arg_decls; _ }))
when List.compare_lengths args arg_decls <> 0 ->
fatal @@ Error.BadArity (name, List.length arg_decls, List.length args)
| Some (Func (r, { args = arg_decls; body; _ })) -> (
let scope = (name, !r) in
let () = r := !r + 1 in
let one_arg (x, _type_desc) v =
let* v = v in
let* () = B.on_write_identifier x scope v in
return (x, v)
in
let* bindings = List.map2 one_arg arg_decls args |> prod_map Fun.id in
let lenv = List.to_seq bindings |> IMap.of_seq in
let* res = eval_stmt (genv, lenv) scope body in
match res with Continuing _ -> return [] | Returning vs -> return vs)
let run (ast : t) primitives : B.value list m =
let ast = type_annotation ast primitives in
let funcs = IMap.empty |> build_funcs ast |> add_primitives primitives in
let consts = IMap.empty |> build_enums ast |> build_consts ast in
eval_func { consts; funcs } "main" []
end
|
d9d37af470fe0dd36ba9c58a2b8de290034479e94a660a9e47fac3ec0f998126 | bburdette/chordster | OscDests.hs | module Handler.OscDests where
import Import
import qualified Data.Text as T
oscDestForm :: Maybe OSCDest -> Form OSCDest
oscDestForm mbod = renderTable $ OSCDest
<$> areq textField "Name" (oSCDestName <$> mbod)
<*> areq textField "IP" (oSCDestIp <$> mbod)
<*> areq intField "Port" (oSCDestPort <$> mbod)
<*> areq (selectFieldList [(T.pack "chords", T.pack "chords"),(T.pack "lights",T.pack "lights")]) "Type" (oSCDestType <$> mbod)
getOscDestsR :: Handler Html
getOscDestsR = do
dests <- runDB $ selectList [] []
(widg,etype) <- generateFormPost $ oscDestForm Nothing
defaultLayout $ [whamlet|
<h1> OSC destinations
<table>
<tr>
<th>Name
<th>IP
<th>Port
<th>Type
$forall Entity odid od <- dests
<tr>
<td>
#{oSCDestName od}
<td>
#{show $ oSCDestIp od}
<td>
#{oSCDestPort od}
<td>
#{oSCDestType od}
<td>
<a href=@{DeleteOscDestR odid}> delete
<form method=post entype=#{etype}>
^{widg}
<input type=submit value="add dest">
|]
postOscDestsR :: Handler Html
postOscDestsR = do
((res,_),_) <- runFormPost $ oscDestForm Nothing
case res of
FormSuccess oscdest -> do
_ <- runDB $ insert oscdest
redirect OscDestsR
_ -> error "insert fail"
| null | https://raw.githubusercontent.com/bburdette/chordster/70d235f1ca379e5ecd4a8f39dc1e734e2d50978b/Handler/OscDests.hs | haskell | module Handler.OscDests where
import Import
import qualified Data.Text as T
oscDestForm :: Maybe OSCDest -> Form OSCDest
oscDestForm mbod = renderTable $ OSCDest
<$> areq textField "Name" (oSCDestName <$> mbod)
<*> areq textField "IP" (oSCDestIp <$> mbod)
<*> areq intField "Port" (oSCDestPort <$> mbod)
<*> areq (selectFieldList [(T.pack "chords", T.pack "chords"),(T.pack "lights",T.pack "lights")]) "Type" (oSCDestType <$> mbod)
getOscDestsR :: Handler Html
getOscDestsR = do
dests <- runDB $ selectList [] []
(widg,etype) <- generateFormPost $ oscDestForm Nothing
defaultLayout $ [whamlet|
<h1> OSC destinations
<table>
<tr>
<th>Name
<th>IP
<th>Port
<th>Type
$forall Entity odid od <- dests
<tr>
<td>
#{oSCDestName od}
<td>
#{show $ oSCDestIp od}
<td>
#{oSCDestPort od}
<td>
#{oSCDestType od}
<td>
<a href=@{DeleteOscDestR odid}> delete
<form method=post entype=#{etype}>
^{widg}
<input type=submit value="add dest">
|]
postOscDestsR :: Handler Html
postOscDestsR = do
((res,_),_) <- runFormPost $ oscDestForm Nothing
case res of
FormSuccess oscdest -> do
_ <- runDB $ insert oscdest
redirect OscDestsR
_ -> error "insert fail"
| |
4b32132eb31877ab4a79355f979634fcf8cb5b2b2dfd3c4df80f9411138ae4b4 | 3b/3bgl-misc | state-helper.lisp | (in-package #:scenegraph)
(defparameter *v* nil)
(defclass scenegraph-state-helper ()
())
(defmethod basecode::run-main-loop :around ((w scenegraph-state-helper))
(let ((*runtime-values-cache* (make-hash-table))
(*known-states* (make-hash-table :test #'equalp))
(*state-defaults* (make-hash-table)))
(setf *v* (list :cache *runtime-values-cache*
:known *known-states*
:defaults *state-defaults*))
(init-defaults *state-defaults*)
(call-next-method)))
| null | https://raw.githubusercontent.com/3b/3bgl-misc/e3bf2781d603feb6b44e5c4ec20f06225648ffd9/scenegraph/state-helper.lisp | lisp | (in-package #:scenegraph)
(defparameter *v* nil)
(defclass scenegraph-state-helper ()
())
(defmethod basecode::run-main-loop :around ((w scenegraph-state-helper))
(let ((*runtime-values-cache* (make-hash-table))
(*known-states* (make-hash-table :test #'equalp))
(*state-defaults* (make-hash-table)))
(setf *v* (list :cache *runtime-values-cache*
:known *known-states*
:defaults *state-defaults*))
(init-defaults *state-defaults*)
(call-next-method)))
| |
fa8868daf881e11cedba454ca70a812c5b3f4c67a43f384e8fed702e61a5b827 | tonsky/Clojure-Sublimed | stacktraces.cljc | (ns stacktraces)
(defn f []
(throw (ex-info "Error" {:data :data})))
(defn g []
(f))
(defn h []
(g))
(g)
(h)
(/ 1 0)
(meta #'h)
(try
(h)
(catch Exception e
(with-out-str
(.printStackTrace e (java.io.PrintWriter. *out*)))))
(defn -main [& args]
(h))
| null | https://raw.githubusercontent.com/tonsky/Clojure-Sublimed/5ff0e6126adce5cbca4fa6b7f69398834bba8544/test_repl/stacktraces.cljc | clojure | (ns stacktraces)
(defn f []
(throw (ex-info "Error" {:data :data})))
(defn g []
(f))
(defn h []
(g))
(g)
(h)
(/ 1 0)
(meta #'h)
(try
(h)
(catch Exception e
(with-out-str
(.printStackTrace e (java.io.PrintWriter. *out*)))))
(defn -main [& args]
(h))
| |
ebe37dc9f0b717140fc05d6117e7472e7abb3471d88b57e090b407c82c2b010e | nikita-volkov/graph-db | Log.hs | # LANGUAGE UndecidableInstances #
module GraphDB.Persistent.Log where
import GraphDB.Util.Prelude
import qualified GraphDB.Action as A
import qualified GraphDB.Graph as G
import qualified GraphDB.Util.FileSystem
import qualified GraphDB.Util.DIOVector as V
-- * Log
-------------------------
type Log s = [Entry s]
-- |
-- A serializable representation of a granular transaction action.
-- Essential for persistence.
data Entry s =
GetRoot |
NewNode (G.Value s) |
GetTargets Node (G.Index s) |
AddTarget Node Node |
RemoveTarget Node Node |
Remove Node |
SetValue Node (G.Value s)
deriving (Generic)
instance (Serializable m (G.Value s), Serializable m (G.Index s)) => Serializable m (Entry s)
type Node = Int
-- * Action
-------------------------
toAction :: MonadIO m => Log s -> A.Action n (G.Value s) (G.Index s) m ()
toAction log = do
refs <- liftIO $ V.new
let
appendRef = liftIO . void . V.append refs
resolveRef = liftIO . V.unsafeLookup refs
applyEntry = \case
GetRoot -> A.getRoot >>= appendRef
NewNode v -> A.newNode v >>= appendRef
GetTargets r i -> resolveRef r >>= flip A.getTargets i >>= mapM_ appendRef
AddTarget s t -> void $ join $ A.addTarget <$> resolveRef s <*> resolveRef t
RemoveTarget s t -> void $ join $ A.removeTarget <$> resolveRef s <*> resolveRef t
Remove r -> A.remove =<< resolveRef r
SetValue r v -> void $ join $ A.setValue <$> resolveRef r <*> pure v
mapM_ applyEntry log
| null | https://raw.githubusercontent.com/nikita-volkov/graph-db/3e886f6b298d2b2b09eb94c2818a7b648f42cb0a/library/GraphDB/Persistent/Log.hs | haskell | * Log
-----------------------
|
A serializable representation of a granular transaction action.
Essential for persistence.
* Action
----------------------- | # LANGUAGE UndecidableInstances #
module GraphDB.Persistent.Log where
import GraphDB.Util.Prelude
import qualified GraphDB.Action as A
import qualified GraphDB.Graph as G
import qualified GraphDB.Util.FileSystem
import qualified GraphDB.Util.DIOVector as V
type Log s = [Entry s]
data Entry s =
GetRoot |
NewNode (G.Value s) |
GetTargets Node (G.Index s) |
AddTarget Node Node |
RemoveTarget Node Node |
Remove Node |
SetValue Node (G.Value s)
deriving (Generic)
instance (Serializable m (G.Value s), Serializable m (G.Index s)) => Serializable m (Entry s)
type Node = Int
toAction :: MonadIO m => Log s -> A.Action n (G.Value s) (G.Index s) m ()
toAction log = do
refs <- liftIO $ V.new
let
appendRef = liftIO . void . V.append refs
resolveRef = liftIO . V.unsafeLookup refs
applyEntry = \case
GetRoot -> A.getRoot >>= appendRef
NewNode v -> A.newNode v >>= appendRef
GetTargets r i -> resolveRef r >>= flip A.getTargets i >>= mapM_ appendRef
AddTarget s t -> void $ join $ A.addTarget <$> resolveRef s <*> resolveRef t
RemoveTarget s t -> void $ join $ A.removeTarget <$> resolveRef s <*> resolveRef t
Remove r -> A.remove =<< resolveRef r
SetValue r v -> void $ join $ A.setValue <$> resolveRef r <*> pure v
mapM_ applyEntry log
|
2fe4c71b9cd5ea94382f7eae1151a27cdbac083720b02821e26648faafb64272 | mmottl/gsl-ocaml | blas_gen.mli | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
type order = Blas.order =
| RowMajor
| ColMajor
type transpose = Blas.transpose =
| NoTrans
| Trans
| ConjTrans
type uplo = Blas.uplo =
| Upper
| Lower
type diag = Blas.diag =
| NonUnit
| Unit
type side = Blas.side =
| Left
| Right
open Vectmat
LEVEL 1
external dot : [< vec] -> [< vec] -> float = "ml_gsl_blas_ddot"
external nrm2 : [< vec] -> float = "ml_gsl_blas_dnrm2"
external asum : [< vec] -> float = "ml_gsl_blas_dasum"
external iamax : [< vec] -> int = "ml_gsl_blas_idamax"
external swap : [< vec] -> [< vec] -> unit = "ml_gsl_blas_dswap"
external copy : [< vec] -> [< vec] -> unit = "ml_gsl_blas_dcopy"
external axpy : float -> [< vec] -> [< vec] -> unit = "ml_gsl_blas_daxpy"
external rot : [< vec] -> [< vec] -> float -> float -> unit = "ml_gsl_blas_drot"
external scal : float -> [< vec] -> unit = "ml_gsl_blas_dscal"
LEVEL 2
external gemv : transpose -> alpha:float -> a:[< mat] ->
x:[< vec] -> beta:float -> y:[< vec] -> unit
= "ml_gsl_blas_dgemv_bc" "ml_gsl_blas_dgemv"
external trmv : uplo -> transpose -> diag ->
a:[< mat] -> x:[< vec] -> unit
= "ml_gsl_blas_dtrmv"
external trsv : uplo -> transpose -> diag ->
a:[< mat] -> x:[< vec] -> unit
= "ml_gsl_blas_dtrsv"
external symv : uplo -> alpha:float -> a:[< mat] ->
x:[< vec] -> beta:float -> y:[< vec] -> unit
= "ml_gsl_blas_dsymv_bc" "ml_gsl_blas_dsymv"
external dger : alpha:float -> x:[< vec] ->
y:[< vec] -> a:[< mat] -> unit
= "ml_gsl_blas_dger"
external syr : uplo -> alpha:float -> x:[< vec] ->
a:[< mat] -> unit
= "ml_gsl_blas_dsyr"
external syr2 : uplo -> alpha:float -> x:[< vec] ->
y:[< vec] -> a:[< mat] -> unit
= "ml_gsl_blas_dsyr2"
LEVEL 3
external gemm : ta:transpose -> tb:transpose ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dgemm_bc" "ml_gsl_blas_dgemm"
external symm : side -> uplo ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsymm_bc" "ml_gsl_blas_dsymm"
external trmm : side -> uplo -> transpose -> diag ->
alpha:float -> a:[< mat] -> b:[< mat] -> unit
= "ml_gsl_blas_dtrmm_bc" "ml_gsl_blas_dtrmm"
external trsm : side -> uplo -> transpose -> diag ->
alpha:float -> a:[< mat] -> b:[< mat] -> unit
= "ml_gsl_blas_dtrsm_bc" "ml_gsl_blas_dtrsm"
external syrk : uplo -> transpose ->
alpha:float -> a:[< mat] -> beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsyrk_bc" "ml_gsl_blas_dsyrk"
external syr2k : uplo -> transpose ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsyr2k_bc" "ml_gsl_blas_dsyr2k"
open Gsl_complex
module Complex :
sig
LEVEL 1
external dotu : [< cvec] -> [< cvec] -> complex = "ml_gsl_blas_zdotu"
external dotc : [< cvec] -> [< cvec] -> complex = "ml_gsl_blas_zdotc"
external nrm2 : [< cvec] -> float = "ml_gsl_blas_znrm2"
external asum : [< cvec] -> float = "ml_gsl_blas_zasum"
external iamax : [< cvec] -> int = "ml_gsl_blas_izamax"
external swap : [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zswap"
external copy : [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zcopy"
external axpy : complex -> [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zaxpy"
external scal : complex -> [< cvec] -> unit = "ml_gsl_blas_zscal"
external zdscal : float -> [< cvec] -> unit = "ml_gsl_blas_zdscal"
LEVEL 2
external gemv : transpose -> alpha:complex -> a:[< cmat] ->
x:[< cvec] -> beta:complex -> y:[< cvec] -> unit
= "ml_gsl_blas_zgemv_bc" "ml_gsl_blas_zgemv"
external trmv : uplo -> transpose -> diag ->
a:[< cmat] -> x:[< cvec] -> unit
= "ml_gsl_blas_ztrmv"
external trsv : uplo -> transpose -> diag ->
a:[< cmat] -> x:[< cvec] -> unit
= "ml_gsl_blas_ztrsv"
external hemv : uplo -> alpha:complex -> a:[< cmat] ->
x:[< cvec] -> beta:complex -> y:[< cvec] -> unit
= "ml_gsl_blas_zhemv_bc" "ml_gsl_blas_zhemv"
external geru : alpha:complex -> x:[< cvec] ->
y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zgeru"
external gerc : alpha:complex -> x:[< cvec] ->
y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zgerc"
external her : uplo -> alpha:float ->
x:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zher"
external her2 : uplo -> alpha:complex ->
x:[< cvec] -> y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zher2"
LEVEL 3
external gemm : ta:transpose -> tb:transpose ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zgemm_bc" "ml_gsl_blas_zgemm"
external symm : side -> uplo ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsymm_bc" "ml_gsl_blas_zsymm"
external syrk : uplo -> transpose ->
alpha:complex -> a:[< cmat] -> beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsyrk_bc" "ml_gsl_blas_zsyrk"
external syr2k : uplo -> transpose ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsyr2k_bc" "ml_gsl_blas_zsyr2k"
external trmm : side -> uplo -> transpose -> diag ->
alpha:complex -> a:[< cmat] -> b:[< cmat] -> unit
= "ml_gsl_blas_ztrmm_bc" "ml_gsl_blas_ztrmm"
external trsm : side -> uplo -> transpose -> diag ->
alpha:complex -> a:[< cmat] -> b:[< cmat] -> unit
= "ml_gsl_blas_ztrsm_bc" "ml_gsl_blas_ztrsm"
external hemm : side -> uplo -> alpha:complex ->
a:[< cmat] -> b:[< cmat] -> beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zhemm_bc" "ml_gsl_blas_zhemm"
external herk : uplo -> transpose -> alpha:float ->
a:[< cmat] -> beta:float -> c:[< cmat] -> unit
= "ml_gsl_blas_zherk_bc" "ml_gsl_blas_zherk"
external her2k : uplo -> transpose -> alpha:complex ->
a:[< cmat] -> b:[< cmat] -> beta:float -> c:[< cmat] -> unit
= "ml_gsl_blas_zher2k_bc" "ml_gsl_blas_zher2k"
end
| null | https://raw.githubusercontent.com/mmottl/gsl-ocaml/76f8d93cccc1f23084f4a33d3e0a8f1289450580/src/blas_gen.mli | ocaml | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
type order = Blas.order =
| RowMajor
| ColMajor
type transpose = Blas.transpose =
| NoTrans
| Trans
| ConjTrans
type uplo = Blas.uplo =
| Upper
| Lower
type diag = Blas.diag =
| NonUnit
| Unit
type side = Blas.side =
| Left
| Right
open Vectmat
LEVEL 1
external dot : [< vec] -> [< vec] -> float = "ml_gsl_blas_ddot"
external nrm2 : [< vec] -> float = "ml_gsl_blas_dnrm2"
external asum : [< vec] -> float = "ml_gsl_blas_dasum"
external iamax : [< vec] -> int = "ml_gsl_blas_idamax"
external swap : [< vec] -> [< vec] -> unit = "ml_gsl_blas_dswap"
external copy : [< vec] -> [< vec] -> unit = "ml_gsl_blas_dcopy"
external axpy : float -> [< vec] -> [< vec] -> unit = "ml_gsl_blas_daxpy"
external rot : [< vec] -> [< vec] -> float -> float -> unit = "ml_gsl_blas_drot"
external scal : float -> [< vec] -> unit = "ml_gsl_blas_dscal"
LEVEL 2
external gemv : transpose -> alpha:float -> a:[< mat] ->
x:[< vec] -> beta:float -> y:[< vec] -> unit
= "ml_gsl_blas_dgemv_bc" "ml_gsl_blas_dgemv"
external trmv : uplo -> transpose -> diag ->
a:[< mat] -> x:[< vec] -> unit
= "ml_gsl_blas_dtrmv"
external trsv : uplo -> transpose -> diag ->
a:[< mat] -> x:[< vec] -> unit
= "ml_gsl_blas_dtrsv"
external symv : uplo -> alpha:float -> a:[< mat] ->
x:[< vec] -> beta:float -> y:[< vec] -> unit
= "ml_gsl_blas_dsymv_bc" "ml_gsl_blas_dsymv"
external dger : alpha:float -> x:[< vec] ->
y:[< vec] -> a:[< mat] -> unit
= "ml_gsl_blas_dger"
external syr : uplo -> alpha:float -> x:[< vec] ->
a:[< mat] -> unit
= "ml_gsl_blas_dsyr"
external syr2 : uplo -> alpha:float -> x:[< vec] ->
y:[< vec] -> a:[< mat] -> unit
= "ml_gsl_blas_dsyr2"
LEVEL 3
external gemm : ta:transpose -> tb:transpose ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dgemm_bc" "ml_gsl_blas_dgemm"
external symm : side -> uplo ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsymm_bc" "ml_gsl_blas_dsymm"
external trmm : side -> uplo -> transpose -> diag ->
alpha:float -> a:[< mat] -> b:[< mat] -> unit
= "ml_gsl_blas_dtrmm_bc" "ml_gsl_blas_dtrmm"
external trsm : side -> uplo -> transpose -> diag ->
alpha:float -> a:[< mat] -> b:[< mat] -> unit
= "ml_gsl_blas_dtrsm_bc" "ml_gsl_blas_dtrsm"
external syrk : uplo -> transpose ->
alpha:float -> a:[< mat] -> beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsyrk_bc" "ml_gsl_blas_dsyrk"
external syr2k : uplo -> transpose ->
alpha:float -> a:[< mat] -> b:[< mat] ->
beta:float -> c:[< mat] -> unit
= "ml_gsl_blas_dsyr2k_bc" "ml_gsl_blas_dsyr2k"
open Gsl_complex
module Complex :
sig
LEVEL 1
external dotu : [< cvec] -> [< cvec] -> complex = "ml_gsl_blas_zdotu"
external dotc : [< cvec] -> [< cvec] -> complex = "ml_gsl_blas_zdotc"
external nrm2 : [< cvec] -> float = "ml_gsl_blas_znrm2"
external asum : [< cvec] -> float = "ml_gsl_blas_zasum"
external iamax : [< cvec] -> int = "ml_gsl_blas_izamax"
external swap : [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zswap"
external copy : [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zcopy"
external axpy : complex -> [< cvec] -> [< cvec] -> unit = "ml_gsl_blas_zaxpy"
external scal : complex -> [< cvec] -> unit = "ml_gsl_blas_zscal"
external zdscal : float -> [< cvec] -> unit = "ml_gsl_blas_zdscal"
LEVEL 2
external gemv : transpose -> alpha:complex -> a:[< cmat] ->
x:[< cvec] -> beta:complex -> y:[< cvec] -> unit
= "ml_gsl_blas_zgemv_bc" "ml_gsl_blas_zgemv"
external trmv : uplo -> transpose -> diag ->
a:[< cmat] -> x:[< cvec] -> unit
= "ml_gsl_blas_ztrmv"
external trsv : uplo -> transpose -> diag ->
a:[< cmat] -> x:[< cvec] -> unit
= "ml_gsl_blas_ztrsv"
external hemv : uplo -> alpha:complex -> a:[< cmat] ->
x:[< cvec] -> beta:complex -> y:[< cvec] -> unit
= "ml_gsl_blas_zhemv_bc" "ml_gsl_blas_zhemv"
external geru : alpha:complex -> x:[< cvec] ->
y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zgeru"
external gerc : alpha:complex -> x:[< cvec] ->
y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zgerc"
external her : uplo -> alpha:float ->
x:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zher"
external her2 : uplo -> alpha:complex ->
x:[< cvec] -> y:[< cvec] -> a:[< cmat] -> unit
= "ml_gsl_blas_zher2"
LEVEL 3
external gemm : ta:transpose -> tb:transpose ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zgemm_bc" "ml_gsl_blas_zgemm"
external symm : side -> uplo ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsymm_bc" "ml_gsl_blas_zsymm"
external syrk : uplo -> transpose ->
alpha:complex -> a:[< cmat] -> beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsyrk_bc" "ml_gsl_blas_zsyrk"
external syr2k : uplo -> transpose ->
alpha:complex -> a:[< cmat] -> b:[< cmat] ->
beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zsyr2k_bc" "ml_gsl_blas_zsyr2k"
external trmm : side -> uplo -> transpose -> diag ->
alpha:complex -> a:[< cmat] -> b:[< cmat] -> unit
= "ml_gsl_blas_ztrmm_bc" "ml_gsl_blas_ztrmm"
external trsm : side -> uplo -> transpose -> diag ->
alpha:complex -> a:[< cmat] -> b:[< cmat] -> unit
= "ml_gsl_blas_ztrsm_bc" "ml_gsl_blas_ztrsm"
external hemm : side -> uplo -> alpha:complex ->
a:[< cmat] -> b:[< cmat] -> beta:complex -> c:[< cmat] -> unit
= "ml_gsl_blas_zhemm_bc" "ml_gsl_blas_zhemm"
external herk : uplo -> transpose -> alpha:float ->
a:[< cmat] -> beta:float -> c:[< cmat] -> unit
= "ml_gsl_blas_zherk_bc" "ml_gsl_blas_zherk"
external her2k : uplo -> transpose -> alpha:complex ->
a:[< cmat] -> b:[< cmat] -> beta:float -> c:[< cmat] -> unit
= "ml_gsl_blas_zher2k_bc" "ml_gsl_blas_zher2k"
end
| |
2ee18201f80d624bd765cc1bf8d7684645e74b2fc298c090fbaf7d5bacbf96cf | apache/couchdb-rebar | rebar_otp_app.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2009 ( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
-module(rebar_otp_app).
-export([compile/2,
clean/2]).
%% for internal use only
-export([info/2]).
-include("rebar.hrl").
%% ===================================================================
%% Public API
%% ===================================================================
compile(Config, File) ->
%% If we get an .app.src file, it needs to be pre-processed and
%% written out as a ebin/*.app file. That resulting file will then
%% be validated as usual.
{Config1, AppFile} = case rebar_app_utils:is_app_src(File) of
true ->
preprocess(Config, File);
false ->
{Config, File}
end,
%% Load the app file and validate it.
case rebar_app_utils:load_app_file(Config1, AppFile) of
{ok, Config2, AppName, AppData} ->
validate_name(AppName, AppFile),
%% In general, the list of modules is an important thing to validate
for compliance with OTP guidelines and upgrade procedures .
%% However, some people prefer not to validate this list.
case rebar_config:get_local(Config1, validate_app_modules, true) of
true ->
Modules = proplists:get_value(modules, AppData),
{validate_modules(AppName, Modules), Config2};
false ->
{ok, Config2}
end;
{error, Reason} ->
?ABORT("Failed to load app file ~s: ~p\n", [AppFile, Reason])
end.
clean(_Config, File) ->
%% If the app file is a .app.src, delete the generated .app file
case rebar_app_utils:is_app_src(File) of
true ->
case file:delete(rebar_app_utils:app_src_to_app(File)) of
ok ->
ok;
{error, enoent} ->
%% The file not existing is OK, we can ignore the error.
ok;
Other ->
Other
end;
false ->
ok
end.
%% ===================================================================
Internal functions
%% ===================================================================
info(help, compile) ->
info_help("Validate .app file");
info(help, clean) ->
info_help("Delete .app file if generated from .app.src").
info_help(Description) ->
?CONSOLE(
"~s.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
Description,
{validate_app_modules, true}
]).
preprocess(Config, AppSrcFile) ->
case rebar_app_utils:load_app_file(Config, AppSrcFile) of
{ok, Config1, AppName, AppData} ->
%% Look for a configuration file with vars we want to
%% substitute. Note that we include the list of modules available in
%% ebin/ and update the app data accordingly.
AppVars = load_app_vars(Config1) ++ [{modules, ebin_modules()}],
A1 = apply_app_vars(AppVars, AppData),
AppSrcFile may contain instructions for generating a vsn number
{Config2, Vsn} = rebar_app_utils:app_vsn(Config1, AppSrcFile),
A2 = lists:keystore(vsn, 1, A1, {vsn, Vsn}),
%% systools:make_relup/4 fails with {missing_param, registered}
%% without a 'registered' value.
A3 = ensure_registered(A2),
%% Build the final spec as a string
Spec = io_lib:format("~p.\n", [{application, AppName, A3}]),
%% Setup file .app filename and write new contents
AppFile = rebar_app_utils:app_src_to_app(AppSrcFile),
ok = rebar_file_utils:write_file_if_contents_differ(AppFile, Spec),
%% Make certain that the ebin/ directory is available
%% on the code path
true = code:add_path(filename:absname(filename:dirname(AppFile))),
{Config2, AppFile};
{error, Reason} ->
?ABORT("Failed to read ~s for preprocessing: ~p\n",
[AppSrcFile, Reason])
end.
load_app_vars(Config) ->
case rebar_config:get_local(Config, app_vars_file, undefined) of
undefined ->
?INFO("No app_vars_file defined.\n", []),
[];
Filename ->
?INFO("Loading app vars from ~p\n", [Filename]),
{ok, Vars} = file:consult(Filename),
Vars
end.
apply_app_vars([], AppData) ->
AppData;
apply_app_vars([{Key, Value} | Rest], AppData) ->
AppData2 = lists:keystore(Key, 1, AppData, {Key, Value}),
apply_app_vars(Rest, AppData2).
validate_name(AppName, File) ->
%% Convert the .app file name to an atom -- check it against the
%% identifier within the file
ExpApp = list_to_atom(filename:basename(File, ".app")),
case ExpApp == AppName of
true ->
ok;
false ->
?ERROR("Invalid ~s: name of application (~p) "
"must match filename.\n", [File, AppName]),
?FAIL
end.
validate_modules(AppName, undefined) ->
?ERROR("Missing modules declaration in ~p.app~n", [AppName]),
?FAIL;
validate_modules(AppName, Mods) ->
Construct two sets -- one for the actual .beam files in ebin/
%% and one for the modules
%% listed in the .app file
EbinSet = ordsets:from_list(ebin_modules()),
ModSet = ordsets:from_list(Mods),
%% Identify .beam files listed in the .app, but not present in ebin/
case ordsets:subtract(ModSet, EbinSet) of
[] ->
ok;
MissingBeams ->
Msg1 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
M <- MissingBeams]),
?ERROR("One or more modules listed in ~p.app are not "
"present in ebin/*.beam:\n~s", [AppName, Msg1]),
?FAIL
end,
%% Identify .beam files NOT list in the .app, but present in ebin/
case ordsets:subtract(EbinSet, ModSet) of
[] ->
ok;
MissingMods ->
Msg2 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
M <- MissingMods]),
?ERROR("One or more .beam files exist that are not "
"listed in ~p.app:\n~s", [AppName, Msg2]),
?FAIL
end.
ebin_modules() ->
lists:sort([rebar_utils:beam_to_mod("ebin", N) ||
N <- rebar_utils:beams("ebin")]).
ensure_registered(AppData) ->
case lists:keyfind(registered, 1, AppData) of
false ->
[{registered, []} | AppData];
{registered, _} ->
%% We could further check whether the value is a list of atoms.
AppData
end.
| null | https://raw.githubusercontent.com/apache/couchdb-rebar/8578221c20d0caa3deb724e5622a924045ffa8bf/src/rebar_otp_app.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
for internal use only
===================================================================
Public API
===================================================================
If we get an .app.src file, it needs to be pre-processed and
written out as a ebin/*.app file. That resulting file will then
be validated as usual.
Load the app file and validate it.
In general, the list of modules is an important thing to validate
However, some people prefer not to validate this list.
If the app file is a .app.src, delete the generated .app file
The file not existing is OK, we can ignore the error.
===================================================================
===================================================================
Look for a configuration file with vars we want to
substitute. Note that we include the list of modules available in
ebin/ and update the app data accordingly.
systools:make_relup/4 fails with {missing_param, registered}
without a 'registered' value.
Build the final spec as a string
Setup file .app filename and write new contents
Make certain that the ebin/ directory is available
on the code path
Convert the .app file name to an atom -- check it against the
identifier within the file
and one for the modules
listed in the .app file
Identify .beam files listed in the .app, but not present in ebin/
Identify .beam files NOT list in the .app, but present in ebin/
We could further check whether the value is a list of atoms. | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2009 ( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(rebar_otp_app).
-export([compile/2,
clean/2]).
-export([info/2]).
-include("rebar.hrl").
compile(Config, File) ->
{Config1, AppFile} = case rebar_app_utils:is_app_src(File) of
true ->
preprocess(Config, File);
false ->
{Config, File}
end,
case rebar_app_utils:load_app_file(Config1, AppFile) of
{ok, Config2, AppName, AppData} ->
validate_name(AppName, AppFile),
for compliance with OTP guidelines and upgrade procedures .
case rebar_config:get_local(Config1, validate_app_modules, true) of
true ->
Modules = proplists:get_value(modules, AppData),
{validate_modules(AppName, Modules), Config2};
false ->
{ok, Config2}
end;
{error, Reason} ->
?ABORT("Failed to load app file ~s: ~p\n", [AppFile, Reason])
end.
clean(_Config, File) ->
case rebar_app_utils:is_app_src(File) of
true ->
case file:delete(rebar_app_utils:app_src_to_app(File)) of
ok ->
ok;
{error, enoent} ->
ok;
Other ->
Other
end;
false ->
ok
end.
Internal functions
info(help, compile) ->
info_help("Validate .app file");
info(help, clean) ->
info_help("Delete .app file if generated from .app.src").
info_help(Description) ->
?CONSOLE(
"~s.~n"
"~n"
"Valid rebar.config options:~n"
" ~p~n",
[
Description,
{validate_app_modules, true}
]).
preprocess(Config, AppSrcFile) ->
case rebar_app_utils:load_app_file(Config, AppSrcFile) of
{ok, Config1, AppName, AppData} ->
AppVars = load_app_vars(Config1) ++ [{modules, ebin_modules()}],
A1 = apply_app_vars(AppVars, AppData),
AppSrcFile may contain instructions for generating a vsn number
{Config2, Vsn} = rebar_app_utils:app_vsn(Config1, AppSrcFile),
A2 = lists:keystore(vsn, 1, A1, {vsn, Vsn}),
A3 = ensure_registered(A2),
Spec = io_lib:format("~p.\n", [{application, AppName, A3}]),
AppFile = rebar_app_utils:app_src_to_app(AppSrcFile),
ok = rebar_file_utils:write_file_if_contents_differ(AppFile, Spec),
true = code:add_path(filename:absname(filename:dirname(AppFile))),
{Config2, AppFile};
{error, Reason} ->
?ABORT("Failed to read ~s for preprocessing: ~p\n",
[AppSrcFile, Reason])
end.
load_app_vars(Config) ->
case rebar_config:get_local(Config, app_vars_file, undefined) of
undefined ->
?INFO("No app_vars_file defined.\n", []),
[];
Filename ->
?INFO("Loading app vars from ~p\n", [Filename]),
{ok, Vars} = file:consult(Filename),
Vars
end.
apply_app_vars([], AppData) ->
AppData;
apply_app_vars([{Key, Value} | Rest], AppData) ->
AppData2 = lists:keystore(Key, 1, AppData, {Key, Value}),
apply_app_vars(Rest, AppData2).
validate_name(AppName, File) ->
ExpApp = list_to_atom(filename:basename(File, ".app")),
case ExpApp == AppName of
true ->
ok;
false ->
?ERROR("Invalid ~s: name of application (~p) "
"must match filename.\n", [File, AppName]),
?FAIL
end.
validate_modules(AppName, undefined) ->
?ERROR("Missing modules declaration in ~p.app~n", [AppName]),
?FAIL;
validate_modules(AppName, Mods) ->
Construct two sets -- one for the actual .beam files in ebin/
EbinSet = ordsets:from_list(ebin_modules()),
ModSet = ordsets:from_list(Mods),
case ordsets:subtract(ModSet, EbinSet) of
[] ->
ok;
MissingBeams ->
Msg1 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
M <- MissingBeams]),
?ERROR("One or more modules listed in ~p.app are not "
"present in ebin/*.beam:\n~s", [AppName, Msg1]),
?FAIL
end,
case ordsets:subtract(EbinSet, ModSet) of
[] ->
ok;
MissingMods ->
Msg2 = lists:flatten([io_lib:format("\t* ~p\n", [M]) ||
M <- MissingMods]),
?ERROR("One or more .beam files exist that are not "
"listed in ~p.app:\n~s", [AppName, Msg2]),
?FAIL
end.
ebin_modules() ->
lists:sort([rebar_utils:beam_to_mod("ebin", N) ||
N <- rebar_utils:beams("ebin")]).
ensure_registered(AppData) ->
case lists:keyfind(registered, 1, AppData) of
false ->
[{registered, []} | AppData];
{registered, _} ->
AppData
end.
|
9087cd9740a14d8c19ff772e2f4aa2b3a25336d508109ee88710599a6c6d20a3 | skogsbaer/HTF | JsonOutput.hs | {-# LANGUAGE OverloadedStrings #-}
--
Copyright ( c ) 2005 - 2022 -
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 , USA
--
|
HTF 's machine - readable output is a sequence of JSON messages . Each message is terminated
by a newline followed by two semicolons followed again by a newline .
There are four types of JSON messages . Each JSON object has a " type " attribute denoting
this type . The types are : @test - start@ , @test - end@ , and @test - list@ , @test - results@.
Their haskell representations are ' ' , ' TestEndEventObj ' , ' TestListObj ' , and
' TestResultsObj ' . The corresponding JSON rendering is defined in this module .
* The @test - start@ message denotes the start of a single test case . Example ( whitespace inserted for better readability ):
> { " test " : { " flatName " : " Main : nonEmpty " ,
> " location " : { " file " : " Tutorial.hs " , " line " : 17 } ,
> " path " : [ " Main","nonEmpty " ] ,
> " sort " : " unit - test " } ,
> " type":"test - start " }
* The @test - end@ message denotes the end of a single test case . It contains information about the outcome of the test . Example :
> { " result " : " pass " ,
> " message " : " " ,
> " test":{"flatName " : " Main : nonEmpty " ,
> " location " : { " file " : " Tutorial.hs " , " line " : 17 } ,
> " path " : [ " Main","nonEmpty " ] ,
> " sort " : " unit - test " } ,
> " wallTime " : 0 , // in milliseconds
> " type " : " test - end " ,
> " location":null }
* The @test - results@ message occurs after all tests have been run and summarizes their results . Example :
> { " failures " : 0 ,
> " passed " : 4 ,
> " pending " : 0 ,
> " wallTime " : 39 , // in milliseconds
> " errors " : 0 ,
> " type":"test - results " }
* The @test - list@ message contains all tests defined . It is used for the --list commandline options . Example :
> { " tests " : [ { " flatName":"Main : nonEmpty","location":{"file":"Tutorial.hs","line":17},"path":["Main","nonEmpty"],"sort":"unit - test " } ,
> { " flatName":"Main : empty","location":{"file":"Tutorial.hs","line":19},"path":["Main","empty"],"sort":"unit - test " } ,
> { " flatName":"Main : reverse","location":{"file":"Tutorial.hs","line":22},"path":["Main","reverse"],"sort":"quickcheck - property " } ,
> { " flatName":"Main : reverseReplay","location":{"file":"Tutorial.hs","line":24},"path":["Main","reverseReplay"],"sort":"quickcheck - property " } ] ,
> " type":"test - list " }
For an exact specification , please have a look at the code of this module .
HTF's machine-readable output is a sequence of JSON messages. Each message is terminated
by a newline followed by two semicolons followed again by a newline.
There are four types of JSON messages. Each JSON object has a "type" attribute denoting
this type. The types are: @test-start@, @test-end@, and @test-list@, @test-results@.
Their haskell representations are 'TestStartEventObj', 'TestEndEventObj', 'TestListObj', and
'TestResultsObj'. The corresponding JSON rendering is defined in this module.
* The @test-start@ message denotes the start of a single test case. Example (whitespace inserted for better readability):
> {"test": {"flatName": "Main:nonEmpty",
> "location": {"file": "Tutorial.hs", "line": 17},
> "path": ["Main","nonEmpty"],
> "sort": "unit-test"},
> "type":"test-start"}
* The @test-end@ message denotes the end of a single test case. It contains information about the outcome of the test. Example:
> {"result": "pass",
> "message":"",
> "test":{"flatName": "Main:nonEmpty",
> "location": {"file": "Tutorial.hs", "line": 17},
> "path": ["Main","nonEmpty"],
> "sort": "unit-test"},
> "wallTime": 0, // in milliseconds
> "type": "test-end",
> "location":null}
* The @test-results@ message occurs after all tests have been run and summarizes their results. Example:
> {"failures": 0,
> "passed": 4,
> "pending": 0,
> "wallTime": 39, // in milliseconds
> "errors": 0,
> "type":"test-results"}
* The @test-list@ message contains all tests defined. It is used for the --list commandline options. Example:
> {"tests": [{"flatName":"Main:nonEmpty","location":{"file":"Tutorial.hs","line":17},"path":["Main","nonEmpty"],"sort":"unit-test"},
> {"flatName":"Main:empty","location":{"file":"Tutorial.hs","line":19},"path":["Main","empty"],"sort":"unit-test"},
> {"flatName":"Main:reverse","location":{"file":"Tutorial.hs","line":22},"path":["Main","reverse"],"sort":"quickcheck-property"},
> {"flatName":"Main:reverseReplay","location":{"file":"Tutorial.hs","line":24},"path":["Main","reverseReplay"],"sort":"quickcheck-property"}],
> "type":"test-list"}
For an exact specification, please have a look at the code of this module.
-}
module Test.Framework.JsonOutput (
TestStartEventObj, TestEndEventObj, TestListObj, TestObj, TestResultsObj,
mkTestStartEventObj, mkTestEndEventObj, mkTestListObj, mkTestResultsObj,
decodeObj, HTFJsonObj
) where
import Test.Framework.TestTypes
import Test.Framework.Location
import Test.Framework.Colors
import Test.Framework.TestInterface
import qualified Data.Aeson as J
import Data.Aeson ((.=))
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Char8 as BSLC
import qualified Data.Text as T
class J.ToJSON a => HTFJsonObj a
-- "test-start" message
data TestStartEventObj
= TestStartEventObj
{ ts_test :: TestObj }
instance J.ToJSON TestStartEventObj where
toJSON ts =
J.object ["type" .= J.String "test-start"
,"test" .= J.toJSON (ts_test ts)]
instance HTFJsonObj TestStartEventObj
-- "test-end" message
data TestEndEventObj
= TestEndEventObj
{ te_test :: TestObj
, te_result :: TestResult
, te_stack :: HtfStack
, te_message :: T.Text
, te_wallTimeMs :: Int
, te_timedOut :: Bool
}
instance J.ToJSON TestEndEventObj where
toJSON te =
J.object ["type" .= J.String "test-end"
,"test" .= J.toJSON (te_test te)
,"location" .= J.toJSON (failureLocationFromStack (te_stack te))
,"callers" .=
J.toJSON (map (\entry -> J.object ["location" .= J.toJSON (hse_location entry)
,"message" .= J.toJSON (hse_message entry)])
(restCallStack (te_stack te)))
,"result" .= J.toJSON (te_result te)
,"message" .= J.toJSON (te_message te)
,"wallTime" .= J.toJSON (te_wallTimeMs te)
,"timedOut" .= J.toJSON (te_timedOut te)]
instance HTFJsonObj TestEndEventObj
-- "test-list" message
data TestListObj
= TestListObj
{ tlm_tests :: [TestObj]
}
instance J.ToJSON TestListObj where
toJSON tl =
J.object ["type" .= J.String "test-list"
,"tests" .= J.toJSON (tlm_tests tl)]
instance HTFJsonObj TestListObj
-- "test-results"
data TestResultsObj
= TestResultsObj
{ tr_wallTimeMs :: Int
, tr_passed :: Int
, tr_pending :: Int
, tr_failed :: Int
, tr_errors :: Int
, tr_timedOut :: Int
, tr_filtered :: Int
}
instance J.ToJSON TestResultsObj where
toJSON r = J.object ["type" .= J.String "test-results"
,"passed" .= J.toJSON (tr_passed r)
,"pending" .= J.toJSON (tr_pending r)
,"failures" .= J.toJSON (tr_failed r)
,"errors" .= J.toJSON (tr_errors r)
,"timedOut" .= J.toJSON (tr_timedOut r)
,"filtered" .= J.toJSON (tr_filtered r)
,"wallTime" .= J.toJSON (tr_wallTimeMs r)]
instance HTFJsonObj TestResultsObj
data TestObj
= TestObj
{ to_flatName :: String
, to_path :: TestPath
, to_location :: Maybe Location
, to_sort :: TestSort
}
instance J.ToJSON TestObj where
toJSON t = J.object (["flatName" .= J.toJSON (to_flatName t)
,"path" .= J.toJSON (to_path t)
,"sort" .= J.toJSON (to_sort t)] ++
(case to_location t of
Just loc -> ["location" .= J.toJSON loc]
Nothing -> []))
instance J.ToJSON TestPath where
toJSON p = J.toJSON (testPathToList p)
instance J.ToJSON TestSort where
toJSON s =
case s of
UnitTest -> J.String "unit-test"
QuickCheckTest -> J.String "quickcheck-property"
BlackBoxTest -> J.String "blackbox-test"
instance J.ToJSON Location where
toJSON loc = J.object ["file" .= J.toJSON (fileName loc)
,"line" .= J.toJSON (lineNumber loc)]
mkTestObj :: GenFlatTest a -> String -> TestObj
mkTestObj ft flatName =
TestObj flatName (ft_path ft) (ft_location ft) (ft_sort ft)
mkTestStartEventObj :: FlatTest -> String -> TestStartEventObj
mkTestStartEventObj ft flatName =
TestStartEventObj (mkTestObj ft flatName)
mkTestEndEventObj :: FlatTestResult -> String -> TestEndEventObj
mkTestEndEventObj ftr flatName =
let r = ft_payload ftr
msg = renderColorString (rr_message r) False
in TestEndEventObj (mkTestObj ftr flatName) (rr_result r) (rr_stack r)
msg (rr_wallTimeMs r) (rr_timeout r)
mkTestListObj :: [(FlatTest, String)] -> TestListObj
mkTestListObj l =
TestListObj (map (\(ft, flatName) -> mkTestObj ft flatName) l)
mkTestResultsObj :: ReportGlobalResultsArg -> TestResultsObj
mkTestResultsObj arg =
TestResultsObj
{ tr_wallTimeMs = rgra_timeMs arg
, tr_passed = length (rgra_passed arg)
, tr_pending = length (rgra_pending arg)
, tr_failed = length (rgra_failed arg)
, tr_errors = length (rgra_errors arg)
, tr_timedOut = length (rgra_timedOut arg)
, tr_filtered = length (rgra_filtered arg)
}
decodeObj :: HTFJsonObj a => a -> BSL.ByteString
decodeObj x =
J.encode x `BSL.append` (BSLC.pack "\n;;\n")
| null | https://raw.githubusercontent.com/skogsbaer/HTF/a42450c89b7a3a3a50e381f36de3ac28faab2a16/Test/Framework/JsonOutput.hs | haskell | # LANGUAGE OverloadedStrings #
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
list commandline options . Example :
list commandline options. Example:
"test-start" message
"test-end" message
"test-list" message
"test-results" | Copyright ( c ) 2005 - 2022 -
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 , USA
|
HTF 's machine - readable output is a sequence of JSON messages . Each message is terminated
by a newline followed by two semicolons followed again by a newline .
There are four types of JSON messages . Each JSON object has a " type " attribute denoting
this type . The types are : @test - start@ , @test - end@ , and @test - list@ , @test - results@.
Their haskell representations are ' ' , ' TestEndEventObj ' , ' TestListObj ' , and
' TestResultsObj ' . The corresponding JSON rendering is defined in this module .
* The @test - start@ message denotes the start of a single test case . Example ( whitespace inserted for better readability ):
> { " test " : { " flatName " : " Main : nonEmpty " ,
> " location " : { " file " : " Tutorial.hs " , " line " : 17 } ,
> " path " : [ " Main","nonEmpty " ] ,
> " sort " : " unit - test " } ,
> " type":"test - start " }
* The @test - end@ message denotes the end of a single test case . It contains information about the outcome of the test . Example :
> { " result " : " pass " ,
> " message " : " " ,
> " test":{"flatName " : " Main : nonEmpty " ,
> " location " : { " file " : " Tutorial.hs " , " line " : 17 } ,
> " path " : [ " Main","nonEmpty " ] ,
> " sort " : " unit - test " } ,
> " wallTime " : 0 , // in milliseconds
> " type " : " test - end " ,
> " location":null }
* The @test - results@ message occurs after all tests have been run and summarizes their results . Example :
> { " failures " : 0 ,
> " passed " : 4 ,
> " pending " : 0 ,
> " wallTime " : 39 , // in milliseconds
> " errors " : 0 ,
> " type":"test - results " }
> { " tests " : [ { " flatName":"Main : nonEmpty","location":{"file":"Tutorial.hs","line":17},"path":["Main","nonEmpty"],"sort":"unit - test " } ,
> { " flatName":"Main : empty","location":{"file":"Tutorial.hs","line":19},"path":["Main","empty"],"sort":"unit - test " } ,
> { " flatName":"Main : reverse","location":{"file":"Tutorial.hs","line":22},"path":["Main","reverse"],"sort":"quickcheck - property " } ,
> { " flatName":"Main : reverseReplay","location":{"file":"Tutorial.hs","line":24},"path":["Main","reverseReplay"],"sort":"quickcheck - property " } ] ,
> " type":"test - list " }
For an exact specification , please have a look at the code of this module .
HTF's machine-readable output is a sequence of JSON messages. Each message is terminated
by a newline followed by two semicolons followed again by a newline.
There are four types of JSON messages. Each JSON object has a "type" attribute denoting
this type. The types are: @test-start@, @test-end@, and @test-list@, @test-results@.
Their haskell representations are 'TestStartEventObj', 'TestEndEventObj', 'TestListObj', and
'TestResultsObj'. The corresponding JSON rendering is defined in this module.
* The @test-start@ message denotes the start of a single test case. Example (whitespace inserted for better readability):
> {"test": {"flatName": "Main:nonEmpty",
> "location": {"file": "Tutorial.hs", "line": 17},
> "path": ["Main","nonEmpty"],
> "sort": "unit-test"},
> "type":"test-start"}
* The @test-end@ message denotes the end of a single test case. It contains information about the outcome of the test. Example:
> {"result": "pass",
> "message":"",
> "test":{"flatName": "Main:nonEmpty",
> "location": {"file": "Tutorial.hs", "line": 17},
> "path": ["Main","nonEmpty"],
> "sort": "unit-test"},
> "wallTime": 0, // in milliseconds
> "type": "test-end",
> "location":null}
* The @test-results@ message occurs after all tests have been run and summarizes their results. Example:
> {"failures": 0,
> "passed": 4,
> "pending": 0,
> "wallTime": 39, // in milliseconds
> "errors": 0,
> "type":"test-results"}
> {"tests": [{"flatName":"Main:nonEmpty","location":{"file":"Tutorial.hs","line":17},"path":["Main","nonEmpty"],"sort":"unit-test"},
> {"flatName":"Main:empty","location":{"file":"Tutorial.hs","line":19},"path":["Main","empty"],"sort":"unit-test"},
> {"flatName":"Main:reverse","location":{"file":"Tutorial.hs","line":22},"path":["Main","reverse"],"sort":"quickcheck-property"},
> {"flatName":"Main:reverseReplay","location":{"file":"Tutorial.hs","line":24},"path":["Main","reverseReplay"],"sort":"quickcheck-property"}],
> "type":"test-list"}
For an exact specification, please have a look at the code of this module.
-}
module Test.Framework.JsonOutput (
TestStartEventObj, TestEndEventObj, TestListObj, TestObj, TestResultsObj,
mkTestStartEventObj, mkTestEndEventObj, mkTestListObj, mkTestResultsObj,
decodeObj, HTFJsonObj
) where
import Test.Framework.TestTypes
import Test.Framework.Location
import Test.Framework.Colors
import Test.Framework.TestInterface
import qualified Data.Aeson as J
import Data.Aeson ((.=))
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Char8 as BSLC
import qualified Data.Text as T
class J.ToJSON a => HTFJsonObj a
data TestStartEventObj
= TestStartEventObj
{ ts_test :: TestObj }
instance J.ToJSON TestStartEventObj where
toJSON ts =
J.object ["type" .= J.String "test-start"
,"test" .= J.toJSON (ts_test ts)]
instance HTFJsonObj TestStartEventObj
data TestEndEventObj
= TestEndEventObj
{ te_test :: TestObj
, te_result :: TestResult
, te_stack :: HtfStack
, te_message :: T.Text
, te_wallTimeMs :: Int
, te_timedOut :: Bool
}
instance J.ToJSON TestEndEventObj where
toJSON te =
J.object ["type" .= J.String "test-end"
,"test" .= J.toJSON (te_test te)
,"location" .= J.toJSON (failureLocationFromStack (te_stack te))
,"callers" .=
J.toJSON (map (\entry -> J.object ["location" .= J.toJSON (hse_location entry)
,"message" .= J.toJSON (hse_message entry)])
(restCallStack (te_stack te)))
,"result" .= J.toJSON (te_result te)
,"message" .= J.toJSON (te_message te)
,"wallTime" .= J.toJSON (te_wallTimeMs te)
,"timedOut" .= J.toJSON (te_timedOut te)]
instance HTFJsonObj TestEndEventObj
data TestListObj
= TestListObj
{ tlm_tests :: [TestObj]
}
instance J.ToJSON TestListObj where
toJSON tl =
J.object ["type" .= J.String "test-list"
,"tests" .= J.toJSON (tlm_tests tl)]
instance HTFJsonObj TestListObj
data TestResultsObj
= TestResultsObj
{ tr_wallTimeMs :: Int
, tr_passed :: Int
, tr_pending :: Int
, tr_failed :: Int
, tr_errors :: Int
, tr_timedOut :: Int
, tr_filtered :: Int
}
instance J.ToJSON TestResultsObj where
toJSON r = J.object ["type" .= J.String "test-results"
,"passed" .= J.toJSON (tr_passed r)
,"pending" .= J.toJSON (tr_pending r)
,"failures" .= J.toJSON (tr_failed r)
,"errors" .= J.toJSON (tr_errors r)
,"timedOut" .= J.toJSON (tr_timedOut r)
,"filtered" .= J.toJSON (tr_filtered r)
,"wallTime" .= J.toJSON (tr_wallTimeMs r)]
instance HTFJsonObj TestResultsObj
data TestObj
= TestObj
{ to_flatName :: String
, to_path :: TestPath
, to_location :: Maybe Location
, to_sort :: TestSort
}
instance J.ToJSON TestObj where
toJSON t = J.object (["flatName" .= J.toJSON (to_flatName t)
,"path" .= J.toJSON (to_path t)
,"sort" .= J.toJSON (to_sort t)] ++
(case to_location t of
Just loc -> ["location" .= J.toJSON loc]
Nothing -> []))
instance J.ToJSON TestPath where
toJSON p = J.toJSON (testPathToList p)
instance J.ToJSON TestSort where
toJSON s =
case s of
UnitTest -> J.String "unit-test"
QuickCheckTest -> J.String "quickcheck-property"
BlackBoxTest -> J.String "blackbox-test"
instance J.ToJSON Location where
toJSON loc = J.object ["file" .= J.toJSON (fileName loc)
,"line" .= J.toJSON (lineNumber loc)]
mkTestObj :: GenFlatTest a -> String -> TestObj
mkTestObj ft flatName =
TestObj flatName (ft_path ft) (ft_location ft) (ft_sort ft)
mkTestStartEventObj :: FlatTest -> String -> TestStartEventObj
mkTestStartEventObj ft flatName =
TestStartEventObj (mkTestObj ft flatName)
mkTestEndEventObj :: FlatTestResult -> String -> TestEndEventObj
mkTestEndEventObj ftr flatName =
let r = ft_payload ftr
msg = renderColorString (rr_message r) False
in TestEndEventObj (mkTestObj ftr flatName) (rr_result r) (rr_stack r)
msg (rr_wallTimeMs r) (rr_timeout r)
mkTestListObj :: [(FlatTest, String)] -> TestListObj
mkTestListObj l =
TestListObj (map (\(ft, flatName) -> mkTestObj ft flatName) l)
mkTestResultsObj :: ReportGlobalResultsArg -> TestResultsObj
mkTestResultsObj arg =
TestResultsObj
{ tr_wallTimeMs = rgra_timeMs arg
, tr_passed = length (rgra_passed arg)
, tr_pending = length (rgra_pending arg)
, tr_failed = length (rgra_failed arg)
, tr_errors = length (rgra_errors arg)
, tr_timedOut = length (rgra_timedOut arg)
, tr_filtered = length (rgra_filtered arg)
}
decodeObj :: HTFJsonObj a => a -> BSL.ByteString
decodeObj x =
J.encode x `BSL.append` (BSLC.pack "\n;;\n")
|
dc8070f22976e287c96cc623b1bd4b91343f8ba0b3104f0764d1c3c918e52a8f | sadiqj/ocaml-esp32 | inline_and_simplify_aux.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, OCamlPro
and ,
(* *)
(* Copyright 2013--2016 OCamlPro SAS *)
Copyright 2014 - -2016 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
[@@@ocaml.warning "+a-4-9-30-40-41-42"]
(** Environments and result structures used during inlining and
simplification. (See inline_and_simplify.ml.) *)
module Env : sig
(** Environments follow the lexical scopes of the program. *)
type t
(** Create a new environment. If [never_inline] is true then the returned
environment will prevent [Inline_and_simplify] from inlining. The
[backend] parameter is used for passing information about the compiler
backend being used.
Newly-created environments have inactive [Freshening]s (see below) and do
not initially hold any approximation information. *)
val create
: never_inline:bool
-> backend:(module Backend_intf.S)
-> round:int
-> t
* Obtain the first - class module that gives information about the
compiler backend being used for compilation .
compiler backend being used for compilation. *)
val backend : t -> (module Backend_intf.S)
(** Obtain the really_import_approx function from the backend module. *)
val really_import_approx
: t
-> (Simple_value_approx.t -> Simple_value_approx.t)
(** Which simplification round we are currently in. *)
val round : t -> int
(** Add the approximation of a variable---that is to say, some knowledge
about the value(s) the variable may take on at runtime---to the
environment. *)
val add : t -> Variable.t -> Simple_value_approx.t -> t
val add_outer_scope : t -> Variable.t -> Simple_value_approx.t -> t
(** Like [add], but for mutable variables. *)
val add_mutable : t -> Mutable_variable.t -> Simple_value_approx.t -> t
* Find the approximation of a given variable , raising a fatal error if
the environment does not know about the variable . Use [ find_opt ]
instead if you need to catch the failure case .
the environment does not know about the variable. Use [find_opt]
instead if you need to catch the failure case. *)
val find_exn : t -> Variable.t -> Simple_value_approx.t
(** Like [find_exn], but for mutable variables. *)
val find_mutable_exn : t -> Mutable_variable.t -> Simple_value_approx.t
type scope = Current | Outer
val find_with_scope_exn : t -> Variable.t -> scope * Simple_value_approx.t
(** Like [find_exn], but intended for use where the "not present in
environment" case is to be handled by the caller. *)
val find_opt : t -> Variable.t -> Simple_value_approx.t option
(** Like [find_exn], but for a list of variables. *)
val find_list_exn : t -> Variable.t list -> Simple_value_approx.t list
val does_not_bind : t -> Variable.t list -> bool
val does_not_freshen : t -> Variable.t list -> bool
val add_symbol : t -> Symbol.t -> Simple_value_approx.t -> t
val redefine_symbol : t -> Symbol.t -> Simple_value_approx.t -> t
val find_symbol_exn : t -> Symbol.t -> Simple_value_approx.t
val find_symbol_opt : t -> Symbol.t -> Simple_value_approx.t option
val find_symbol_fatal : t -> Symbol.t -> Simple_value_approx.t
(* Like [find_symbol_exn], but load the symbol approximation using
the backend if not available in the environment. *)
val find_or_load_symbol : t -> Symbol.t -> Simple_value_approx.t
(** Note that the given [bound_to] holds the given [projection]. *)
val add_projection
: t
-> projection:Projection.t
-> bound_to:Variable.t
-> t
(** Determine if the environment knows about a variable that is bound
to the given [projection]. *)
val find_projection
: t
-> projection:Projection.t
-> Variable.t option
(** Whether the environment has an approximation for the given variable. *)
val mem : t -> Variable.t -> bool
(** Return the freshening that should be applied to variables when
rewriting code (in [Inline_and_simplify], etc.) using the given
environment. *)
val freshening : t -> Freshening.t
(** Set the freshening that should be used as per [freshening], above. *)
val set_freshening : t -> Freshening.t -> t
(** Causes every bound variable in code rewritten during inlining and
simplification, using the given environment, to be freshened. This is
used when descending into subexpressions substituted into existing
expressions. *)
val activate_freshening : t -> t
(** Erase all variable approximation information and freshening information
from the given environment. However, the freshening activation state
is preserved. This function is used when rewriting inside a function
declaration, to avoid (due to a compiler bug) accidental use of
variables from outer scopes that are not accessible. *)
val local : t -> t
(** Note that the inliner is descending into a function body from the given
set of closures. A set of such descents is maintained. *)
(* CR-someday mshinwell: consider changing name to remove "declaration".
Also, isn't this the inlining stack? Maybe we can use that instead. *)
val enter_set_of_closures_declaration : Set_of_closures_origin.t -> t -> t
(** Determine whether the inliner is currently inside a function body from
the given set of closures. This is used to detect whether a given
function call refers to a function which exists somewhere on the current
inlining stack. *)
val inside_set_of_closures_declaration : Set_of_closures_origin.t -> t -> bool
* Not inside a closure declaration .
Toplevel code is the one evaluated when the compilation unit is
loaded
Toplevel code is the one evaluated when the compilation unit is
loaded *)
val at_toplevel : t -> bool
val is_inside_branch : t -> bool
val branch_depth : t -> int
val inside_branch : t -> t
val increase_closure_depth : t -> t
* that call sites contained within code rewritten using the given
environment should never be replaced by inlined ( or unrolled ) versions
of the callee(s ) .
environment should never be replaced by inlined (or unrolled) versions
of the callee(s). *)
val set_never_inline : t -> t
(** Equivalent to [set_never_inline] but only applies to code inside
a set of closures. *)
val set_never_inline_inside_closures : t -> t
(** Unset the restriction from [set_never_inline_inside_closures] *)
val unset_never_inline_inside_closures : t -> t
(** Equivalent to [set_never_inline] but does not apply to code inside
a set of closures. *)
val set_never_inline_outside_closures : t -> t
* Unset the restriction from [ set_never_inline_outside_closures ]
val unset_never_inline_outside_closures : t -> t
(** Return whether [set_never_inline] is currently in effect on the given
environment. *)
val never_inline : t -> bool
val inlining_level : t -> int
* that this environment is used to rewrite code for inlining . This is
used by the inlining heuristics to decide whether to continue .
Unconditionally inlined does not take this into account .
used by the inlining heuristics to decide whether to continue.
Unconditionally inlined does not take this into account. *)
val inlining_level_up : t -> t
(** Whether we are actively unrolling a given function. *)
val actively_unrolling : t -> Set_of_closures_origin.t -> int option
(** Start actively unrolling a given function [n] times. *)
val start_actively_unrolling : t -> Set_of_closures_origin.t -> int -> t
(** Unroll a function currently actively being unrolled. *)
val continue_actively_unrolling : t -> Set_of_closures_origin.t -> t
(** Whether it is permissible to unroll a call to a recursive function
in the given environment. *)
val unrolling_allowed : t -> Set_of_closures_origin.t -> bool
(** Whether the given environment is currently being used to rewrite the
body of an unrolled recursive function. *)
val inside_unrolled_function : t -> Set_of_closures_origin.t -> t
(** Whether it is permissible to inline a call to a function in the given
environment. *)
val inlining_allowed : t -> Closure_id.t -> bool
(** Whether the given environment is currently being used to rewrite the
body of an inlined function. *)
val inside_inlined_function : t -> Closure_id.t -> t
(** If collecting inlining statistics, record that the inliner is about to
descend into [closure_id]. This information enables us to produce a
stack of closures that form a kind of context around an inlining
decision point. *)
val note_entering_closure
: t
-> closure_id:Closure_id.t
-> dbg:Debuginfo.t
-> t
(** If collecting inlining statistics, record that the inliner is about to
descend into a call to [closure_id]. This information enables us to
produce a stack of closures that form a kind of context around an
inlining decision point. *)
val note_entering_call
: t
-> closure_id:Closure_id.t
-> dbg:Debuginfo.t
-> t
(** If collecting inlining statistics, record that the inliner is about to
descend into an inlined function call. This requires that the inliner
has already entered the call with [note_entering_call]. *)
val note_entering_inlined : t -> t
* If collecting inlining statistics , record that the inliner is about to
descend into a specialised function definition . This requires that the
inliner has already entered the call with [ note_entering_call ] .
descend into a specialised function definition. This requires that the
inliner has already entered the call with [note_entering_call]. *)
val note_entering_specialised : t -> closure_ids:Closure_id.Set.t -> t
(** Update a given environment to record that the inliner is about to
descend into [closure_id] and pass the resulting environment to [f].
If [inline_inside] is [false] then the environment passed to [f] will be
marked as [never_inline] (see above). *)
val enter_closure
: t
-> closure_id:Closure_id.t
-> inline_inside:bool
-> dbg:Debuginfo.t
-> f:(t -> 'a)
-> 'a
(** If collecting inlining statistics, record an inlining decision for the
call at the top of the closure stack stored inside the given
environment. *)
val record_decision
: t
-> Inlining_stats_types.Decision.t
-> unit
(** Print a human-readable version of the given environment. *)
val print : Format.formatter -> t -> unit
(** The environment stores the call-site being inlined to produce
precise location information. This function sets the current
call-site being inlined. *)
val set_inline_debuginfo : t -> dbg:Debuginfo.t -> t
(** Appends the locations of inlined call-sites to the [~dbg] argument *)
val add_inlined_debuginfo : t -> dbg:Debuginfo.t -> Debuginfo.t
end
module Result : sig
* Result structures approximately follow the evaluation order of the
program . They are returned by the simplification algorithm acting on
an Flambda subexpression .
program. They are returned by the simplification algorithm acting on
an Flambda subexpression. *)
type t
val create : unit -> t
(** The approximation of the subexpression that has just been
simplified. *)
val approx : t -> Simple_value_approx.t
(** Set the approximation of the subexpression that has just been
simplified. Typically used just before returning from a case of the
simplification algorithm. *)
val set_approx : t -> Simple_value_approx.t -> t
(** Set the approximation of the subexpression to the meet of the
current return approximation and the provided one. Typically
used just before returning from a branch case of the
simplification algorithm. *)
val meet_approx : t -> Env.t -> Simple_value_approx.t -> t
(** All static exceptions for which [use_staticfail] has been called on
the given result structure. *)
val used_static_exceptions : t -> Static_exception.Set.t
* that the given static exception has been used .
val use_static_exception : t -> Static_exception.t -> t
* that we are moving up out of the scope of a static - catch block
that catches the given static exception identifier . This has the effect
of removing the identifier from the [ used_staticfail ] set .
that catches the given static exception identifier. This has the effect
of removing the identifier from the [used_staticfail] set. *)
val exit_scope_catch : t -> Static_exception.t -> t
(** The benefit to be gained by inlining the subexpression whose
simplification yielded the given result structure. *)
val benefit : t -> Inlining_cost.Benefit.t
(** Apply a transformation to the inlining benefit stored within the
given result structure. *)
val map_benefit
: t
-> (Inlining_cost.Benefit.t -> Inlining_cost.Benefit.t)
-> t
(** Add some benefit to the inlining benefit stored within the
given result structure. *)
val add_benefit : t -> Inlining_cost.Benefit.t -> t
* Set the benefit of inlining the subexpression corresponding to the
given result structure to zero .
given result structure to zero. *)
val reset_benefit : t -> t
val set_inlining_threshold :
t -> Inlining_cost.Threshold.t option -> t
val add_inlining_threshold :
t -> Inlining_cost.Threshold.t -> t
val sub_inlining_threshold :
t -> Inlining_cost.Threshold.t -> t
val inlining_threshold : t -> Inlining_cost.Threshold.t option
val seen_direct_application : t -> t
val num_direct_applications : t -> int
end
(** Command line argument -inline *)
val initial_inlining_threshold : round:int -> Inlining_cost.Threshold.t
(** Command line argument -inline-toplevel *)
val initial_inlining_toplevel_threshold
: round:int -> Inlining_cost.Threshold.t
val prepare_to_simplify_set_of_closures
: env:Env.t
-> set_of_closures:Flambda.set_of_closures
-> function_decls:Flambda.function_declarations
-> freshen:bool
-> only_for_function_decl:Flambda.function_declaration option
fvs
* Flambda.specialised_to Variable.Map.t (* specialised arguments *)
* Flambda.function_declarations
* Simple_value_approx.t Variable.Map.t (* parameter approximations *)
* Simple_value_approx.value_set_of_closures
* Env.t
val prepare_to_simplify_closure
: function_decl:Flambda.function_declaration
-> free_vars:(Flambda.specialised_to * Simple_value_approx.t) Variable.Map.t
-> specialised_args:Flambda.specialised_to Variable.Map.t
-> parameter_approximations:Simple_value_approx.t Variable.Map.t
-> set_of_closures_env:Env.t
-> Env.t
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/middle_end/inline_and_simplify_aux.mli | ocaml | ************************************************************************
OCaml
Copyright 2013--2016 OCamlPro SAS
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Environments and result structures used during inlining and
simplification. (See inline_and_simplify.ml.)
* Environments follow the lexical scopes of the program.
* Create a new environment. If [never_inline] is true then the returned
environment will prevent [Inline_and_simplify] from inlining. The
[backend] parameter is used for passing information about the compiler
backend being used.
Newly-created environments have inactive [Freshening]s (see below) and do
not initially hold any approximation information.
* Obtain the really_import_approx function from the backend module.
* Which simplification round we are currently in.
* Add the approximation of a variable---that is to say, some knowledge
about the value(s) the variable may take on at runtime---to the
environment.
* Like [add], but for mutable variables.
* Like [find_exn], but for mutable variables.
* Like [find_exn], but intended for use where the "not present in
environment" case is to be handled by the caller.
* Like [find_exn], but for a list of variables.
Like [find_symbol_exn], but load the symbol approximation using
the backend if not available in the environment.
* Note that the given [bound_to] holds the given [projection].
* Determine if the environment knows about a variable that is bound
to the given [projection].
* Whether the environment has an approximation for the given variable.
* Return the freshening that should be applied to variables when
rewriting code (in [Inline_and_simplify], etc.) using the given
environment.
* Set the freshening that should be used as per [freshening], above.
* Causes every bound variable in code rewritten during inlining and
simplification, using the given environment, to be freshened. This is
used when descending into subexpressions substituted into existing
expressions.
* Erase all variable approximation information and freshening information
from the given environment. However, the freshening activation state
is preserved. This function is used when rewriting inside a function
declaration, to avoid (due to a compiler bug) accidental use of
variables from outer scopes that are not accessible.
* Note that the inliner is descending into a function body from the given
set of closures. A set of such descents is maintained.
CR-someday mshinwell: consider changing name to remove "declaration".
Also, isn't this the inlining stack? Maybe we can use that instead.
* Determine whether the inliner is currently inside a function body from
the given set of closures. This is used to detect whether a given
function call refers to a function which exists somewhere on the current
inlining stack.
* Equivalent to [set_never_inline] but only applies to code inside
a set of closures.
* Unset the restriction from [set_never_inline_inside_closures]
* Equivalent to [set_never_inline] but does not apply to code inside
a set of closures.
* Return whether [set_never_inline] is currently in effect on the given
environment.
* Whether we are actively unrolling a given function.
* Start actively unrolling a given function [n] times.
* Unroll a function currently actively being unrolled.
* Whether it is permissible to unroll a call to a recursive function
in the given environment.
* Whether the given environment is currently being used to rewrite the
body of an unrolled recursive function.
* Whether it is permissible to inline a call to a function in the given
environment.
* Whether the given environment is currently being used to rewrite the
body of an inlined function.
* If collecting inlining statistics, record that the inliner is about to
descend into [closure_id]. This information enables us to produce a
stack of closures that form a kind of context around an inlining
decision point.
* If collecting inlining statistics, record that the inliner is about to
descend into a call to [closure_id]. This information enables us to
produce a stack of closures that form a kind of context around an
inlining decision point.
* If collecting inlining statistics, record that the inliner is about to
descend into an inlined function call. This requires that the inliner
has already entered the call with [note_entering_call].
* Update a given environment to record that the inliner is about to
descend into [closure_id] and pass the resulting environment to [f].
If [inline_inside] is [false] then the environment passed to [f] will be
marked as [never_inline] (see above).
* If collecting inlining statistics, record an inlining decision for the
call at the top of the closure stack stored inside the given
environment.
* Print a human-readable version of the given environment.
* The environment stores the call-site being inlined to produce
precise location information. This function sets the current
call-site being inlined.
* Appends the locations of inlined call-sites to the [~dbg] argument
* The approximation of the subexpression that has just been
simplified.
* Set the approximation of the subexpression that has just been
simplified. Typically used just before returning from a case of the
simplification algorithm.
* Set the approximation of the subexpression to the meet of the
current return approximation and the provided one. Typically
used just before returning from a branch case of the
simplification algorithm.
* All static exceptions for which [use_staticfail] has been called on
the given result structure.
* The benefit to be gained by inlining the subexpression whose
simplification yielded the given result structure.
* Apply a transformation to the inlining benefit stored within the
given result structure.
* Add some benefit to the inlining benefit stored within the
given result structure.
* Command line argument -inline
* Command line argument -inline-toplevel
specialised arguments
parameter approximations | , OCamlPro
and ,
Copyright 2014 - -2016 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
[@@@ocaml.warning "+a-4-9-30-40-41-42"]
module Env : sig
type t
val create
: never_inline:bool
-> backend:(module Backend_intf.S)
-> round:int
-> t
* Obtain the first - class module that gives information about the
compiler backend being used for compilation .
compiler backend being used for compilation. *)
val backend : t -> (module Backend_intf.S)
val really_import_approx
: t
-> (Simple_value_approx.t -> Simple_value_approx.t)
val round : t -> int
val add : t -> Variable.t -> Simple_value_approx.t -> t
val add_outer_scope : t -> Variable.t -> Simple_value_approx.t -> t
val add_mutable : t -> Mutable_variable.t -> Simple_value_approx.t -> t
* Find the approximation of a given variable , raising a fatal error if
the environment does not know about the variable . Use [ find_opt ]
instead if you need to catch the failure case .
the environment does not know about the variable. Use [find_opt]
instead if you need to catch the failure case. *)
val find_exn : t -> Variable.t -> Simple_value_approx.t
val find_mutable_exn : t -> Mutable_variable.t -> Simple_value_approx.t
type scope = Current | Outer
val find_with_scope_exn : t -> Variable.t -> scope * Simple_value_approx.t
val find_opt : t -> Variable.t -> Simple_value_approx.t option
val find_list_exn : t -> Variable.t list -> Simple_value_approx.t list
val does_not_bind : t -> Variable.t list -> bool
val does_not_freshen : t -> Variable.t list -> bool
val add_symbol : t -> Symbol.t -> Simple_value_approx.t -> t
val redefine_symbol : t -> Symbol.t -> Simple_value_approx.t -> t
val find_symbol_exn : t -> Symbol.t -> Simple_value_approx.t
val find_symbol_opt : t -> Symbol.t -> Simple_value_approx.t option
val find_symbol_fatal : t -> Symbol.t -> Simple_value_approx.t
val find_or_load_symbol : t -> Symbol.t -> Simple_value_approx.t
val add_projection
: t
-> projection:Projection.t
-> bound_to:Variable.t
-> t
val find_projection
: t
-> projection:Projection.t
-> Variable.t option
val mem : t -> Variable.t -> bool
val freshening : t -> Freshening.t
val set_freshening : t -> Freshening.t -> t
val activate_freshening : t -> t
val local : t -> t
val enter_set_of_closures_declaration : Set_of_closures_origin.t -> t -> t
val inside_set_of_closures_declaration : Set_of_closures_origin.t -> t -> bool
* Not inside a closure declaration .
Toplevel code is the one evaluated when the compilation unit is
loaded
Toplevel code is the one evaluated when the compilation unit is
loaded *)
val at_toplevel : t -> bool
val is_inside_branch : t -> bool
val branch_depth : t -> int
val inside_branch : t -> t
val increase_closure_depth : t -> t
* that call sites contained within code rewritten using the given
environment should never be replaced by inlined ( or unrolled ) versions
of the callee(s ) .
environment should never be replaced by inlined (or unrolled) versions
of the callee(s). *)
val set_never_inline : t -> t
val set_never_inline_inside_closures : t -> t
val unset_never_inline_inside_closures : t -> t
val set_never_inline_outside_closures : t -> t
* Unset the restriction from [ set_never_inline_outside_closures ]
val unset_never_inline_outside_closures : t -> t
val never_inline : t -> bool
val inlining_level : t -> int
* that this environment is used to rewrite code for inlining . This is
used by the inlining heuristics to decide whether to continue .
Unconditionally inlined does not take this into account .
used by the inlining heuristics to decide whether to continue.
Unconditionally inlined does not take this into account. *)
val inlining_level_up : t -> t
val actively_unrolling : t -> Set_of_closures_origin.t -> int option
val start_actively_unrolling : t -> Set_of_closures_origin.t -> int -> t
val continue_actively_unrolling : t -> Set_of_closures_origin.t -> t
val unrolling_allowed : t -> Set_of_closures_origin.t -> bool
val inside_unrolled_function : t -> Set_of_closures_origin.t -> t
val inlining_allowed : t -> Closure_id.t -> bool
val inside_inlined_function : t -> Closure_id.t -> t
val note_entering_closure
: t
-> closure_id:Closure_id.t
-> dbg:Debuginfo.t
-> t
val note_entering_call
: t
-> closure_id:Closure_id.t
-> dbg:Debuginfo.t
-> t
val note_entering_inlined : t -> t
* If collecting inlining statistics , record that the inliner is about to
descend into a specialised function definition . This requires that the
inliner has already entered the call with [ note_entering_call ] .
descend into a specialised function definition. This requires that the
inliner has already entered the call with [note_entering_call]. *)
val note_entering_specialised : t -> closure_ids:Closure_id.Set.t -> t
val enter_closure
: t
-> closure_id:Closure_id.t
-> inline_inside:bool
-> dbg:Debuginfo.t
-> f:(t -> 'a)
-> 'a
val record_decision
: t
-> Inlining_stats_types.Decision.t
-> unit
val print : Format.formatter -> t -> unit
val set_inline_debuginfo : t -> dbg:Debuginfo.t -> t
val add_inlined_debuginfo : t -> dbg:Debuginfo.t -> Debuginfo.t
end
module Result : sig
* Result structures approximately follow the evaluation order of the
program . They are returned by the simplification algorithm acting on
an Flambda subexpression .
program. They are returned by the simplification algorithm acting on
an Flambda subexpression. *)
type t
val create : unit -> t
val approx : t -> Simple_value_approx.t
val set_approx : t -> Simple_value_approx.t -> t
val meet_approx : t -> Env.t -> Simple_value_approx.t -> t
val used_static_exceptions : t -> Static_exception.Set.t
* that the given static exception has been used .
val use_static_exception : t -> Static_exception.t -> t
* that we are moving up out of the scope of a static - catch block
that catches the given static exception identifier . This has the effect
of removing the identifier from the [ used_staticfail ] set .
that catches the given static exception identifier. This has the effect
of removing the identifier from the [used_staticfail] set. *)
val exit_scope_catch : t -> Static_exception.t -> t
val benefit : t -> Inlining_cost.Benefit.t
val map_benefit
: t
-> (Inlining_cost.Benefit.t -> Inlining_cost.Benefit.t)
-> t
val add_benefit : t -> Inlining_cost.Benefit.t -> t
* Set the benefit of inlining the subexpression corresponding to the
given result structure to zero .
given result structure to zero. *)
val reset_benefit : t -> t
val set_inlining_threshold :
t -> Inlining_cost.Threshold.t option -> t
val add_inlining_threshold :
t -> Inlining_cost.Threshold.t -> t
val sub_inlining_threshold :
t -> Inlining_cost.Threshold.t -> t
val inlining_threshold : t -> Inlining_cost.Threshold.t option
val seen_direct_application : t -> t
val num_direct_applications : t -> int
end
val initial_inlining_threshold : round:int -> Inlining_cost.Threshold.t
val initial_inlining_toplevel_threshold
: round:int -> Inlining_cost.Threshold.t
val prepare_to_simplify_set_of_closures
: env:Env.t
-> set_of_closures:Flambda.set_of_closures
-> function_decls:Flambda.function_declarations
-> freshen:bool
-> only_for_function_decl:Flambda.function_declaration option
fvs
* Flambda.function_declarations
* Simple_value_approx.value_set_of_closures
* Env.t
val prepare_to_simplify_closure
: function_decl:Flambda.function_declaration
-> free_vars:(Flambda.specialised_to * Simple_value_approx.t) Variable.Map.t
-> specialised_args:Flambda.specialised_to Variable.Map.t
-> parameter_approximations:Simple_value_approx.t Variable.Map.t
-> set_of_closures_env:Env.t
-> Env.t
|
ccbb2c1348ce9e8f44ca1321c78bf3fb952149334c82bf92ffaca3a0be57e053 | CardanoSolutions/kupo | MonadAsync.hs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Kupo.Control.MonadAsync
( MonadAsync (..)
, concurrently4
, forConcurrently_
, mapConcurrently_
) where
import Control.Monad.Class.MonadAsync
( MonadAsync (..)
, forConcurrently_
, mapConcurrently_
)
concurrently4 :: MonadAsync m => m a -> m b -> m c -> m d -> m ()
concurrently4 a b c d =
concurrently_ a
( concurrently_ b
( concurrently_ c
d
)
)
| null | https://raw.githubusercontent.com/CardanoSolutions/kupo/fa37a0e569cc5d8faee925bf19f0349dac7b3990/src/Kupo/Control/MonadAsync.hs | haskell | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Kupo.Control.MonadAsync
( MonadAsync (..)
, concurrently4
, forConcurrently_
, mapConcurrently_
) where
import Control.Monad.Class.MonadAsync
( MonadAsync (..)
, forConcurrently_
, mapConcurrently_
)
concurrently4 :: MonadAsync m => m a -> m b -> m c -> m d -> m ()
concurrently4 a b c d =
concurrently_ a
( concurrently_ b
( concurrently_ c
d
)
)
| |
4412b737e3862d00b4572a3c837fd5c6a383cd704e204870b263a6c21f57a946 | nomeata/haskell-via-sokoban | 03ex-without-winning.hs | {-# LANGUAGE OverloadedStrings #-}
import CodeWorld
-- Lists
data List a = Empty | Entry a (List a)
mapList :: (a -> b) -> List a -> List b
mapList _ Empty = Empty
mapList f (Entry c cs) = Entry (f c) (mapList f cs)
combine :: List Picture -> Picture
combine Empty = blank
combine (Entry p ps) = p & combine ps
allList :: List Bool -> Bool
allList Empty = True
allList (Entry b bs) = b && allList bs
-- Coordinates
data Coord = C Integer Integer
data Direction = R | U | L | D
eqCoord :: Coord -> Coord -> Bool
eqCoord (C x1 y1) (C x2 y2) = x1 == x2 && y1 == y2
adjacentCoord :: Direction -> Coord -> Coord
adjacentCoord R (C x y) = C (x+1) y
adjacentCoord U (C x y) = C x (y+1)
adjacentCoord L (C x y) = C (x-1) y
adjacentCoord D (C x y) = C x (y-1)
moveFromTo :: Coord -> Coord -> Coord -> Coord
moveFromTo c1 c2 c | c1 `eqCoord` c = c2
| otherwise = c
-- The maze
data Tile = Wall | Ground | Storage | Box | Blank
maze :: Coord -> Tile
maze (C x y)
| abs x > 4 || abs y > 4 = Blank
| abs x == 4 || abs y == 4 = Wall
| x == 2 && y <= 0 = Wall
| x == 3 && y <= 0 = Storage
| x >= -2 && y == 0 = Box
| otherwise = Ground
noBoxMaze :: Coord -> Tile
noBoxMaze c = case maze c of
Box -> Ground
t -> t
mazeWithBoxes :: List Coord -> Coord -> Tile
mazeWithBoxes Empty c' = noBoxMaze c'
mazeWithBoxes (Entry c cs) c'
| eqCoord c c' = Box
| otherwise = mazeWithBoxes cs c'
-- The state
data State = State Coord Direction (List Coord)
initialBoxes :: List Coord
initialBoxes = go (-10) (-10)
where
go 11 11 = Empty
go x 11 = go (x+1) (-10)
go x y = case maze (C x y) of
Box -> Entry (C x y) (go x (y+1))
_ -> go x (y+1)
initialState :: State
initialState = State (C 0 1) R initialBoxes
-- Event handling
tryGoTo :: State -> Direction -> State
tryGoTo (State from _ bx) d
= case currentMaze to of
Box -> case currentMaze beyond of
Ground -> movedState
Storage -> movedState
_ -> didn'tMove
Ground -> movedState
Storage -> movedState
_ -> didn'tMove
where to = adjacentCoord d from
beyond = adjacentCoord d to
currentMaze = mazeWithBoxes bx
movedState = State to d movedBx
movedBx = mapList (moveFromTo to beyond) bx
didn'tMove = State from d bx -- Yes, ' may be part of an identifier
handleEvent :: Event -> State -> State
handleEvent (KeyPress key) s
| key == "Right" = tryGoTo s R
| key == "Up" = tryGoTo s U
| key == "Left" = tryGoTo s L
| key == "Down" = tryGoTo s D
handleEvent _ s = s
-- Drawing
wall, ground, storage, box :: Picture
wall = colored grey (solidRectangle 1 1)
ground = colored yellow (solidRectangle 1 1)
storage = colored white (solidCircle 0.3) & ground
box = colored brown (solidRectangle 1 1)
drawTile :: Tile -> Picture
drawTile Wall = wall
drawTile Ground = ground
drawTile Storage = storage
drawTile Box = box
drawTile Blank = blank
pictureOfMaze :: Picture
pictureOfMaze = draw21times (\r -> draw21times (\c -> drawTileAt (C r c)))
draw21times :: (Integer -> Picture) -> Picture
draw21times something = go (-10)
where
go :: Integer -> Picture
go 11 = blank
go n = something n & go (n+1)
drawTileAt :: Coord -> Picture
drawTileAt c = atCoord c (drawTile (noBoxMaze c))
atCoord :: Coord -> Picture -> Picture
atCoord (C x y) pic = translated (fromIntegral x) (fromIntegral y) pic
player :: Direction -> Picture
player R = translated 0 0.3 cranium
& polyline [(0,0),(0.3,0.05)]
& polyline [(0,0),(0.3,-0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = circle 0.18
& sector (7/6*pi) (1/6*pi) 0.18
player L = scaled (-1) 1 (player R) -- Cunning!
player U = translated 0 0.3 cranium
& polyline [(0,0),(0.3,0.05)]
& polyline [(0,0),(-0.3,0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = solidCircle 0.18
player D = translated 0 0.3 cranium
& polyline [(0,0),(0.3,-0.05)]
& polyline [(0,0),(-0.3,-0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = circle 0.18
& translated 0.06 0.08 (solidCircle 0.04)
& translated (-0.06) 0.08 (solidCircle 0.04)
pictureOfBoxes :: List Coord -> Picture
pictureOfBoxes cs = combine (mapList (\c -> atCoord c (drawTile Box)) cs)
drawState :: State -> Picture
drawState (State c d boxes)
= atCoord c (player d)
& pictureOfBoxes boxes
& pictureOfMaze
-- The complete activity
sokoban :: Activity State
sokoban = Activity initialState handleEvent drawState
-- The general activity type
data Activity world = Activity
world
(Event -> world -> world)
(world -> Picture)
runActivity :: Activity s -> IO ()
runActivity (Activity state0 handle draw)
= activityOf state0 handle draw
-- Resetable activities
resetable :: Activity s -> Activity s
resetable (Activity state0 handle draw)
= Activity state0 handle' draw
where handle' (KeyPress key) _ | key == "Esc" = state0
handle' e s = handle e s
-- Start screen
startScreen :: Picture
startScreen = scaled 3 3 (lettering "Sokoban!")
data SSState world = StartScreen | Running world
withStartScreen :: Activity s -> Activity (SSState s)
withStartScreen (Activity state0 handle draw)
= Activity state0' handle' draw'
where
state0' = StartScreen
handle' (KeyPress key) StartScreen | key == " " = Running state0
handle' _ StartScreen = StartScreen
handle' e (Running s) = Running (handle e s)
draw' StartScreen = startScreen
draw' (Running s) = draw s
-- The main function
main :: IO ()
main = runActivity (resetable (withStartScreen sokoban))
| null | https://raw.githubusercontent.com/nomeata/haskell-via-sokoban/0ae9d6e120c2851eca158c01e08e2c4c7f2b06d0/code/03ex-without-winning.hs | haskell | # LANGUAGE OverloadedStrings #
Lists
Coordinates
The maze
The state
Event handling
Yes, ' may be part of an identifier
Drawing
Cunning!
The complete activity
The general activity type
Resetable activities
Start screen
The main function | import CodeWorld
data List a = Empty | Entry a (List a)
mapList :: (a -> b) -> List a -> List b
mapList _ Empty = Empty
mapList f (Entry c cs) = Entry (f c) (mapList f cs)
combine :: List Picture -> Picture
combine Empty = blank
combine (Entry p ps) = p & combine ps
allList :: List Bool -> Bool
allList Empty = True
allList (Entry b bs) = b && allList bs
data Coord = C Integer Integer
data Direction = R | U | L | D
eqCoord :: Coord -> Coord -> Bool
eqCoord (C x1 y1) (C x2 y2) = x1 == x2 && y1 == y2
adjacentCoord :: Direction -> Coord -> Coord
adjacentCoord R (C x y) = C (x+1) y
adjacentCoord U (C x y) = C x (y+1)
adjacentCoord L (C x y) = C (x-1) y
adjacentCoord D (C x y) = C x (y-1)
moveFromTo :: Coord -> Coord -> Coord -> Coord
moveFromTo c1 c2 c | c1 `eqCoord` c = c2
| otherwise = c
data Tile = Wall | Ground | Storage | Box | Blank
maze :: Coord -> Tile
maze (C x y)
| abs x > 4 || abs y > 4 = Blank
| abs x == 4 || abs y == 4 = Wall
| x == 2 && y <= 0 = Wall
| x == 3 && y <= 0 = Storage
| x >= -2 && y == 0 = Box
| otherwise = Ground
noBoxMaze :: Coord -> Tile
noBoxMaze c = case maze c of
Box -> Ground
t -> t
mazeWithBoxes :: List Coord -> Coord -> Tile
mazeWithBoxes Empty c' = noBoxMaze c'
mazeWithBoxes (Entry c cs) c'
| eqCoord c c' = Box
| otherwise = mazeWithBoxes cs c'
data State = State Coord Direction (List Coord)
initialBoxes :: List Coord
initialBoxes = go (-10) (-10)
where
go 11 11 = Empty
go x 11 = go (x+1) (-10)
go x y = case maze (C x y) of
Box -> Entry (C x y) (go x (y+1))
_ -> go x (y+1)
initialState :: State
initialState = State (C 0 1) R initialBoxes
tryGoTo :: State -> Direction -> State
tryGoTo (State from _ bx) d
= case currentMaze to of
Box -> case currentMaze beyond of
Ground -> movedState
Storage -> movedState
_ -> didn'tMove
Ground -> movedState
Storage -> movedState
_ -> didn'tMove
where to = adjacentCoord d from
beyond = adjacentCoord d to
currentMaze = mazeWithBoxes bx
movedState = State to d movedBx
movedBx = mapList (moveFromTo to beyond) bx
handleEvent :: Event -> State -> State
handleEvent (KeyPress key) s
| key == "Right" = tryGoTo s R
| key == "Up" = tryGoTo s U
| key == "Left" = tryGoTo s L
| key == "Down" = tryGoTo s D
handleEvent _ s = s
wall, ground, storage, box :: Picture
wall = colored grey (solidRectangle 1 1)
ground = colored yellow (solidRectangle 1 1)
storage = colored white (solidCircle 0.3) & ground
box = colored brown (solidRectangle 1 1)
drawTile :: Tile -> Picture
drawTile Wall = wall
drawTile Ground = ground
drawTile Storage = storage
drawTile Box = box
drawTile Blank = blank
pictureOfMaze :: Picture
pictureOfMaze = draw21times (\r -> draw21times (\c -> drawTileAt (C r c)))
draw21times :: (Integer -> Picture) -> Picture
draw21times something = go (-10)
where
go :: Integer -> Picture
go 11 = blank
go n = something n & go (n+1)
drawTileAt :: Coord -> Picture
drawTileAt c = atCoord c (drawTile (noBoxMaze c))
atCoord :: Coord -> Picture -> Picture
atCoord (C x y) pic = translated (fromIntegral x) (fromIntegral y) pic
player :: Direction -> Picture
player R = translated 0 0.3 cranium
& polyline [(0,0),(0.3,0.05)]
& polyline [(0,0),(0.3,-0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = circle 0.18
& sector (7/6*pi) (1/6*pi) 0.18
player U = translated 0 0.3 cranium
& polyline [(0,0),(0.3,0.05)]
& polyline [(0,0),(-0.3,0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = solidCircle 0.18
player D = translated 0 0.3 cranium
& polyline [(0,0),(0.3,-0.05)]
& polyline [(0,0),(-0.3,-0.05)]
& polyline [(0,-0.2),(0,0.1)]
& polyline [(0,-0.2),(0.1,-0.5)]
& polyline [(0,-0.2),(-0.1,-0.5)]
where cranium = circle 0.18
& translated 0.06 0.08 (solidCircle 0.04)
& translated (-0.06) 0.08 (solidCircle 0.04)
pictureOfBoxes :: List Coord -> Picture
pictureOfBoxes cs = combine (mapList (\c -> atCoord c (drawTile Box)) cs)
drawState :: State -> Picture
drawState (State c d boxes)
= atCoord c (player d)
& pictureOfBoxes boxes
& pictureOfMaze
sokoban :: Activity State
sokoban = Activity initialState handleEvent drawState
data Activity world = Activity
world
(Event -> world -> world)
(world -> Picture)
runActivity :: Activity s -> IO ()
runActivity (Activity state0 handle draw)
= activityOf state0 handle draw
resetable :: Activity s -> Activity s
resetable (Activity state0 handle draw)
= Activity state0 handle' draw
where handle' (KeyPress key) _ | key == "Esc" = state0
handle' e s = handle e s
startScreen :: Picture
startScreen = scaled 3 3 (lettering "Sokoban!")
data SSState world = StartScreen | Running world
withStartScreen :: Activity s -> Activity (SSState s)
withStartScreen (Activity state0 handle draw)
= Activity state0' handle' draw'
where
state0' = StartScreen
handle' (KeyPress key) StartScreen | key == " " = Running state0
handle' _ StartScreen = StartScreen
handle' e (Running s) = Running (handle e s)
draw' StartScreen = startScreen
draw' (Running s) = draw s
main :: IO ()
main = runActivity (resetable (withStartScreen sokoban))
|
cf1cdf67f66157b10e4c784ccf3bbcb40a2b51ac280c25a816560def9d5cb03f | huangz1990/SICP-answers | test-64-list-tree.scm | (load "test-manager/load.scm")
(load "64-list-tree.scm")
(define-each-check
(equal? (list->tree '(1 3 5 7 9 11))
'(5 (1 () (3 () ())) (9 (7 () ()) (11 () ()))))
)
(run-registered-tests)
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp2/code/test-64-list-tree.scm | scheme | (load "test-manager/load.scm")
(load "64-list-tree.scm")
(define-each-check
(equal? (list->tree '(1 3 5 7 9 11))
'(5 (1 () (3 () ())) (9 (7 () ()) (11 () ()))))
)
(run-registered-tests)
| |
332c5d33eaa17623556a115a0727297e8fbfa6b623208d2c543b511d7edee628 | anoma/juvix | Base.hs | {-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
{-# HLINT ignore "Avoid restricted extensions" #-}
{-# HLINT ignore "Avoid restricted flags" #-}
module Juvix.Prelude.Base
( module Juvix.Prelude.Base,
module Control.Applicative,
module Data.Map.Strict,
module Data.Set,
module Data.IntMap.Strict,
module Data.IntSet,
module Control.Monad.Extra,
module Control.Monad.Fix,
module Data.Bitraversable,
module Data.Bool,
module Data.Char,
module Data.Either.Extra,
module Data.Bifunctor,
module Data.Eq,
module Data.Foldable,
module Data.Function,
module Data.Functor,
module Safe.Exact,
module Safe.Foldable,
module Data.Hashable,
module Data.Int,
module Data.List.Extra,
module Data.List.NonEmpty.Extra,
module Data.Maybe,
module Data.Monoid,
module Data.Ord,
module Data.Semigroup,
module Prelude,
module Data.Singletons,
module Data.Singletons.Sigma,
module Data.Singletons.TH,
module Data.Stream,
module Data.String,
module Data.Text.Encoding,
module Data.Text.IO,
module Data.Traversable,
module Data.Tuple.Extra,
module Data.Typeable,
module Data.Void,
module Data.Word,
module GHC.Enum,
module GHC.Generics,
module GHC.Num,
module GHC.Real,
module Lens.Micro.Platform,
module Polysemy,
module Polysemy.Embed,
module Polysemy.Error,
module Polysemy.Fixpoint,
module Polysemy.Output,
module Polysemy.Reader,
module Polysemy.State,
module Language.Haskell.TH.Syntax,
module Prettyprinter,
module System.Exit,
module System.FilePath,
module System.IO,
module Text.Show,
module Control.Monad.Catch,
Data,
Text,
pack,
unpack,
strip,
HashMap,
ByteString,
HashSet,
IsString (..),
Alternative (..),
MonadIO (..),
)
where
import Control.Applicative
import Control.Monad.Catch (MonadMask, MonadThrow, throwM)
import Control.Monad.Extra hiding (fail)
import Control.Monad.Fix
import Control.Monad.IO.Class (MonadIO (..))
import Data.Bifunctor hiding (first, second)
import Data.Bitraversable
import Data.Bool
import Data.ByteString (ByteString)
import Data.Char
import Data.Char qualified as Char
import Data.Data
import Data.Either.Extra
import Data.Eq
import Data.Foldable hiding (minimum, minimumBy)
import Data.Function
import Data.Functor
import Data.HashMap.Strict (HashMap)
import Data.HashMap.Strict qualified as HashMap
import Data.HashSet (HashSet)
import Data.HashSet qualified as HashSet
import Data.Hashable
import Data.Int
import Data.IntMap.Strict (IntMap)
import Data.IntSet (IntSet)
import Data.List.Extra hiding (allSame, groupSortOn, head, last, mconcatMap)
import Data.List.Extra qualified as List
import Data.List.NonEmpty qualified as NonEmpty
import Data.List.NonEmpty.Extra
( NonEmpty (..),
head,
last,
maximum1,
maximumOn1,
minimum1,
minimumOn1,
nonEmpty,
some1,
(|:),
)
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Semigroup (Semigroup, (<>))
import Data.Set (Set)
import Data.Singletons hiding ((@@))
import Data.Singletons.Sigma
import Data.Singletons.TH (genSingletons, promoteOrdInstances, singOrdInstances)
import Data.Stream (Stream)
import Data.String
import Data.Text (Text, pack, strip, unpack)
import Data.Text.Encoding
import Data.Text.IO
import Data.Traversable
import Data.Tuple.Extra
import Data.Typeable hiding (TyCon)
import Data.Void
import Data.Word
import GHC.Enum
import GHC.Err qualified as Err
import GHC.Generics (Generic)
import GHC.Num
import GHC.Real
import GHC.Stack.Types
import Language.Haskell.TH.Syntax (Lift)
import Lens.Micro.Platform hiding (both)
import Path
import Path.IO qualified as Path
import Polysemy
import Polysemy.Embed
import Polysemy.Error hiding (fromEither)
import Polysemy.Fixpoint
import Polysemy.Output
import Polysemy.Reader
import Polysemy.State
import Prettyprinter (Doc, (<+>))
import Safe.Exact
import Safe.Foldable
import System.Exit
import System.FilePath (FilePath, dropTrailingPathSeparator, normalise, (<.>), (</>))
import System.IO hiding
( appendFile,
getContents,
getLine,
hGetContents,
hGetLine,
hPutStr,
hPutStrLn,
interact,
openBinaryTempFile,
openTempFile,
putStr,
putStrLn,
readFile,
readFile',
writeFile,
)
import System.IO.Error
import Text.Show (Show)
import Text.Show qualified as Show
import Prelude (Double)
--------------------------------------------------------------------------------
traverseM ::
(Monad m, Traversable m, Applicative f) =>
(a1 -> f (m a2)) ->
m a1 ->
f (m a2)
traverseM f = fmap join . traverse f
--------------------------------------------------------------------------------
-- String related util functions.
--------------------------------------------------------------------------------
show :: (Show a, IsString str) => a -> str
show = fromString . Show.show
toUpperFirst :: String -> String
toUpperFirst [] = []
toUpperFirst (x : xs) = Char.toUpper x : xs
--------------------------------------------------------------------------------
-- Foldable
--------------------------------------------------------------------------------
allSame :: forall t a. (Eq a, Foldable t) => t a -> Bool
allSame t
| null t = True
| otherwise = all (== h) t
where
h :: a
h = foldr1 const t
mconcatMap :: (Monoid c, Foldable t) => (a -> c) -> t a -> c
mconcatMap f = List.mconcatMap f . toList
concatWith :: (Foldable t, Monoid a) => (a -> a -> a) -> t a -> a
concatWith f ds
| null ds = mempty
| otherwise = foldr1 f ds
# INLINE concatWith #
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
tableInsert ::
(Hashable k) =>
(a -> v) ->
(a -> v -> v) ->
k ->
a ->
HashMap k v ->
HashMap k v
tableInsert s f k a = over (at k) (Just . aux)
where
aux = \case
Just v -> f a v
Nothing -> s a
tableNestedInsert ::
(Hashable k1, Hashable k2) =>
k1 ->
k2 ->
a ->
HashMap k1 (HashMap k2 a) ->
HashMap k1 (HashMap k2 a)
tableNestedInsert k1 k2 = tableInsert (HashMap.singleton k2) (HashMap.insert k2) k1
--------------------------------------------------------------------------------
-- List
--------------------------------------------------------------------------------
revAppend :: [a] -> [a] -> [a]
revAppend [] !ys = ys
revAppend (x : xs) !ys = revAppend xs (x : ys)
map' :: (a -> b) -> [a] -> [b]
map' _ [] = []
map' f (h : t) =
-- keeping the lets separate ensures that `v` is evaluated before `vs`
let !v = f h
in let !vs = map' f t
in v : vs
-- | longest common prefix
commonPrefix :: forall a. (Eq a) => [a] -> [a] -> [a]
commonPrefix a b = reverse (go [] a b)
where
go :: [a] -> [a] -> [a] -> [a]
go ac x y = case (x, y) of
(x' : xs, y' : ys)
| x' == y' -> go (x' : ac) xs ys
_ -> ac
zip4Exact :: [a] -> [b] -> [c] -> [d] -> [(a, b, c, d)]
zip4Exact [] [] [] [] = []
zip4Exact (x1 : t1) (x2 : t2) (x3 : t3) (x4 : t4) = (x1, x2, x3, x4) : zip4Exact t1 t2 t3 t4
zip4Exact _ _ _ _ = error "zip4Exact"
--------------------------------------------------------------------------------
NonEmpty
--------------------------------------------------------------------------------
nonEmptyUnsnoc :: NonEmpty a -> (Maybe (NonEmpty a), a)
nonEmptyUnsnoc e = (NonEmpty.nonEmpty (NonEmpty.init e), NonEmpty.last e)
nonEmpty' :: HasCallStack => [a] -> NonEmpty a
nonEmpty' = fromJust . nonEmpty
_nonEmpty :: Lens' [a] (Maybe (NonEmpty a))
_nonEmpty f x = maybe [] toList <$> f (nonEmpty x)
groupSortOn :: (Ord b) => (a -> b) -> [a] -> [NonEmpty a]
groupSortOn f = map (fromJust . nonEmpty) . List.groupSortOn f
groupSortOn' :: (Ord b) => (a -> b) -> [a] -> [[a]]
groupSortOn' = List.groupSortOn
--------------------------------------------------------------------------------
-- Errors
--------------------------------------------------------------------------------
error :: (HasCallStack) => Text -> a
error = Err.error . unpack
{-# DEPRECATED undefined "undefined" #-}
undefined :: (HasCallStack) => a
undefined = Err.error "undefined"
-- | Used to indicate impossible corner cases.
impossible :: (HasCallStack) => a
impossible = Err.error "impossible"
--------------------------------------------------------------------------------
infixl 7 <+?>
(<+?>) :: Doc ann -> Maybe (Doc ann) -> Doc ann
(<+?>) a = maybe a (a <+>)
infixr 7 <?+>
(<?+>) :: Maybe (Doc ann) -> Doc ann -> Doc ann
(<?+>) = \case
Nothing -> id
Just a -> (a <+>)
infixr 7 ?<>
(?<>) :: (Semigroup m) => Maybe m -> m -> m
(?<>) = maybe id (<>)
infixl 7 <>?
(<>?) :: (Semigroup m) => m -> Maybe m -> m
(<>?) a = maybe a (a <>)
data Indexed a = Indexed
{ _indexedIx :: Int,
_indexedThing :: a
}
deriving stock (Show, Eq, Ord, Foldable, Traversable)
instance Functor Indexed where
fmap f (Indexed i a) = Indexed i (f a)
indexFrom :: Int -> [a] -> [Indexed a]
indexFrom i = zipWith Indexed [i ..]
makeLenses ''Indexed
toTuple :: Indexed a -> (Int, a)
toTuple i = (i ^. indexedIx, i ^. indexedThing)
filterIndexed :: (a -> Bool) -> [Indexed a] -> [Indexed a]
filterIndexed f = filter (f . (^. indexedThing))
fromText :: (IsString a) => Text -> a
fromText = fromString . unpack
fromRightIO' :: (e -> IO ()) -> IO (Either e r) -> IO r
fromRightIO' pp = do
eitherM ifLeft return
where
ifLeft e = pp e >> exitFailure
fromRightIO :: (e -> Text) -> IO (Either e r) -> IO r
fromRightIO pp = fromRightIO' (putStrLn . pp)
--------------------------------------------------------------------------------
-- Misc
--------------------------------------------------------------------------------
-- | applies a function n times
iterateN :: Int -> (a -> a) -> a -> a
iterateN n f = (!! n) . iterate f
nubHashable :: (Hashable a) => [a] -> [a]
nubHashable = HashSet.toList . HashSet.fromList
allElements :: (Bounded a, Enum a) => [a]
allElements = [minBound .. maxBound]
infixr 3 .&&.
(.&&.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(f .&&. g) a = f a && g a
infixr 3 ..&&..
(..&&..) :: (a -> b -> Bool) -> (a -> b -> Bool) -> (a -> b -> Bool)
(f ..&&.. g) a = f a .&&. g a
infixr 2 .||.
(.||.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(a .||. b) c = a c || b c
eqOn :: (Eq b) => (a -> b) -> a -> a -> Bool
eqOn = ((==) `on`)
class CanonicalProjection a b where
project :: a -> b
instance CanonicalProjection a a where
project = id
instance CanonicalProjection Void a where
project = absurd
instance CanonicalProjection a () where
project = const ()
-- | 'project' with type arguments swapped. Useful for type application
project' :: forall b a. (CanonicalProjection a b) => a -> b
project' = project
ensureFile :: (MonadIO m, MonadThrow m) => Path Abs File -> m ()
ensureFile f =
unlessM
(Path.doesFileExist f)
(throwM (mkIOError doesNotExistErrorType "" Nothing (Just (toFilePath f))))
| null | https://raw.githubusercontent.com/anoma/juvix/0f29b3ee936c3498e3e7e22ed27fc0e6a38133e0/src/Juvix/Prelude/Base.hs | haskell | # LANGUAGE BangPatterns #
# OPTIONS_GHC -Wno-unrecognised-pragmas #
# HLINT ignore "Avoid restricted extensions" #
# HLINT ignore "Avoid restricted flags" #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
String related util functions.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Foldable
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
List
------------------------------------------------------------------------------
keeping the lets separate ensures that `v` is evaluated before `vs`
| longest common prefix
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Errors
------------------------------------------------------------------------------
# DEPRECATED undefined "undefined" #
| Used to indicate impossible corner cases.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Misc
------------------------------------------------------------------------------
| applies a function n times
| 'project' with type arguments swapped. Useful for type application |
module Juvix.Prelude.Base
( module Juvix.Prelude.Base,
module Control.Applicative,
module Data.Map.Strict,
module Data.Set,
module Data.IntMap.Strict,
module Data.IntSet,
module Control.Monad.Extra,
module Control.Monad.Fix,
module Data.Bitraversable,
module Data.Bool,
module Data.Char,
module Data.Either.Extra,
module Data.Bifunctor,
module Data.Eq,
module Data.Foldable,
module Data.Function,
module Data.Functor,
module Safe.Exact,
module Safe.Foldable,
module Data.Hashable,
module Data.Int,
module Data.List.Extra,
module Data.List.NonEmpty.Extra,
module Data.Maybe,
module Data.Monoid,
module Data.Ord,
module Data.Semigroup,
module Prelude,
module Data.Singletons,
module Data.Singletons.Sigma,
module Data.Singletons.TH,
module Data.Stream,
module Data.String,
module Data.Text.Encoding,
module Data.Text.IO,
module Data.Traversable,
module Data.Tuple.Extra,
module Data.Typeable,
module Data.Void,
module Data.Word,
module GHC.Enum,
module GHC.Generics,
module GHC.Num,
module GHC.Real,
module Lens.Micro.Platform,
module Polysemy,
module Polysemy.Embed,
module Polysemy.Error,
module Polysemy.Fixpoint,
module Polysemy.Output,
module Polysemy.Reader,
module Polysemy.State,
module Language.Haskell.TH.Syntax,
module Prettyprinter,
module System.Exit,
module System.FilePath,
module System.IO,
module Text.Show,
module Control.Monad.Catch,
Data,
Text,
pack,
unpack,
strip,
HashMap,
ByteString,
HashSet,
IsString (..),
Alternative (..),
MonadIO (..),
)
where
import Control.Applicative
import Control.Monad.Catch (MonadMask, MonadThrow, throwM)
import Control.Monad.Extra hiding (fail)
import Control.Monad.Fix
import Control.Monad.IO.Class (MonadIO (..))
import Data.Bifunctor hiding (first, second)
import Data.Bitraversable
import Data.Bool
import Data.ByteString (ByteString)
import Data.Char
import Data.Char qualified as Char
import Data.Data
import Data.Either.Extra
import Data.Eq
import Data.Foldable hiding (minimum, minimumBy)
import Data.Function
import Data.Functor
import Data.HashMap.Strict (HashMap)
import Data.HashMap.Strict qualified as HashMap
import Data.HashSet (HashSet)
import Data.HashSet qualified as HashSet
import Data.Hashable
import Data.Int
import Data.IntMap.Strict (IntMap)
import Data.IntSet (IntSet)
import Data.List.Extra hiding (allSame, groupSortOn, head, last, mconcatMap)
import Data.List.Extra qualified as List
import Data.List.NonEmpty qualified as NonEmpty
import Data.List.NonEmpty.Extra
( NonEmpty (..),
head,
last,
maximum1,
maximumOn1,
minimum1,
minimumOn1,
nonEmpty,
some1,
(|:),
)
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Semigroup (Semigroup, (<>))
import Data.Set (Set)
import Data.Singletons hiding ((@@))
import Data.Singletons.Sigma
import Data.Singletons.TH (genSingletons, promoteOrdInstances, singOrdInstances)
import Data.Stream (Stream)
import Data.String
import Data.Text (Text, pack, strip, unpack)
import Data.Text.Encoding
import Data.Text.IO
import Data.Traversable
import Data.Tuple.Extra
import Data.Typeable hiding (TyCon)
import Data.Void
import Data.Word
import GHC.Enum
import GHC.Err qualified as Err
import GHC.Generics (Generic)
import GHC.Num
import GHC.Real
import GHC.Stack.Types
import Language.Haskell.TH.Syntax (Lift)
import Lens.Micro.Platform hiding (both)
import Path
import Path.IO qualified as Path
import Polysemy
import Polysemy.Embed
import Polysemy.Error hiding (fromEither)
import Polysemy.Fixpoint
import Polysemy.Output
import Polysemy.Reader
import Polysemy.State
import Prettyprinter (Doc, (<+>))
import Safe.Exact
import Safe.Foldable
import System.Exit
import System.FilePath (FilePath, dropTrailingPathSeparator, normalise, (<.>), (</>))
import System.IO hiding
( appendFile,
getContents,
getLine,
hGetContents,
hGetLine,
hPutStr,
hPutStrLn,
interact,
openBinaryTempFile,
openTempFile,
putStr,
putStrLn,
readFile,
readFile',
writeFile,
)
import System.IO.Error
import Text.Show (Show)
import Text.Show qualified as Show
import Prelude (Double)
traverseM ::
(Monad m, Traversable m, Applicative f) =>
(a1 -> f (m a2)) ->
m a1 ->
f (m a2)
traverseM f = fmap join . traverse f
show :: (Show a, IsString str) => a -> str
show = fromString . Show.show
toUpperFirst :: String -> String
toUpperFirst [] = []
toUpperFirst (x : xs) = Char.toUpper x : xs
allSame :: forall t a. (Eq a, Foldable t) => t a -> Bool
allSame t
| null t = True
| otherwise = all (== h) t
where
h :: a
h = foldr1 const t
mconcatMap :: (Monoid c, Foldable t) => (a -> c) -> t a -> c
mconcatMap f = List.mconcatMap f . toList
concatWith :: (Foldable t, Monoid a) => (a -> a -> a) -> t a -> a
concatWith f ds
| null ds = mempty
| otherwise = foldr1 f ds
# INLINE concatWith #
tableInsert ::
(Hashable k) =>
(a -> v) ->
(a -> v -> v) ->
k ->
a ->
HashMap k v ->
HashMap k v
tableInsert s f k a = over (at k) (Just . aux)
where
aux = \case
Just v -> f a v
Nothing -> s a
tableNestedInsert ::
(Hashable k1, Hashable k2) =>
k1 ->
k2 ->
a ->
HashMap k1 (HashMap k2 a) ->
HashMap k1 (HashMap k2 a)
tableNestedInsert k1 k2 = tableInsert (HashMap.singleton k2) (HashMap.insert k2) k1
revAppend :: [a] -> [a] -> [a]
revAppend [] !ys = ys
revAppend (x : xs) !ys = revAppend xs (x : ys)
map' :: (a -> b) -> [a] -> [b]
map' _ [] = []
map' f (h : t) =
let !v = f h
in let !vs = map' f t
in v : vs
commonPrefix :: forall a. (Eq a) => [a] -> [a] -> [a]
commonPrefix a b = reverse (go [] a b)
where
go :: [a] -> [a] -> [a] -> [a]
go ac x y = case (x, y) of
(x' : xs, y' : ys)
| x' == y' -> go (x' : ac) xs ys
_ -> ac
zip4Exact :: [a] -> [b] -> [c] -> [d] -> [(a, b, c, d)]
zip4Exact [] [] [] [] = []
zip4Exact (x1 : t1) (x2 : t2) (x3 : t3) (x4 : t4) = (x1, x2, x3, x4) : zip4Exact t1 t2 t3 t4
zip4Exact _ _ _ _ = error "zip4Exact"
NonEmpty
nonEmptyUnsnoc :: NonEmpty a -> (Maybe (NonEmpty a), a)
nonEmptyUnsnoc e = (NonEmpty.nonEmpty (NonEmpty.init e), NonEmpty.last e)
nonEmpty' :: HasCallStack => [a] -> NonEmpty a
nonEmpty' = fromJust . nonEmpty
_nonEmpty :: Lens' [a] (Maybe (NonEmpty a))
_nonEmpty f x = maybe [] toList <$> f (nonEmpty x)
groupSortOn :: (Ord b) => (a -> b) -> [a] -> [NonEmpty a]
groupSortOn f = map (fromJust . nonEmpty) . List.groupSortOn f
groupSortOn' :: (Ord b) => (a -> b) -> [a] -> [[a]]
groupSortOn' = List.groupSortOn
error :: (HasCallStack) => Text -> a
error = Err.error . unpack
undefined :: (HasCallStack) => a
undefined = Err.error "undefined"
impossible :: (HasCallStack) => a
impossible = Err.error "impossible"
infixl 7 <+?>
(<+?>) :: Doc ann -> Maybe (Doc ann) -> Doc ann
(<+?>) a = maybe a (a <+>)
infixr 7 <?+>
(<?+>) :: Maybe (Doc ann) -> Doc ann -> Doc ann
(<?+>) = \case
Nothing -> id
Just a -> (a <+>)
infixr 7 ?<>
(?<>) :: (Semigroup m) => Maybe m -> m -> m
(?<>) = maybe id (<>)
infixl 7 <>?
(<>?) :: (Semigroup m) => m -> Maybe m -> m
(<>?) a = maybe a (a <>)
data Indexed a = Indexed
{ _indexedIx :: Int,
_indexedThing :: a
}
deriving stock (Show, Eq, Ord, Foldable, Traversable)
instance Functor Indexed where
fmap f (Indexed i a) = Indexed i (f a)
indexFrom :: Int -> [a] -> [Indexed a]
indexFrom i = zipWith Indexed [i ..]
makeLenses ''Indexed
toTuple :: Indexed a -> (Int, a)
toTuple i = (i ^. indexedIx, i ^. indexedThing)
filterIndexed :: (a -> Bool) -> [Indexed a] -> [Indexed a]
filterIndexed f = filter (f . (^. indexedThing))
fromText :: (IsString a) => Text -> a
fromText = fromString . unpack
fromRightIO' :: (e -> IO ()) -> IO (Either e r) -> IO r
fromRightIO' pp = do
eitherM ifLeft return
where
ifLeft e = pp e >> exitFailure
fromRightIO :: (e -> Text) -> IO (Either e r) -> IO r
fromRightIO pp = fromRightIO' (putStrLn . pp)
iterateN :: Int -> (a -> a) -> a -> a
iterateN n f = (!! n) . iterate f
nubHashable :: (Hashable a) => [a] -> [a]
nubHashable = HashSet.toList . HashSet.fromList
allElements :: (Bounded a, Enum a) => [a]
allElements = [minBound .. maxBound]
infixr 3 .&&.
(.&&.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(f .&&. g) a = f a && g a
infixr 3 ..&&..
(..&&..) :: (a -> b -> Bool) -> (a -> b -> Bool) -> (a -> b -> Bool)
(f ..&&.. g) a = f a .&&. g a
infixr 2 .||.
(.||.) :: (a -> Bool) -> (a -> Bool) -> a -> Bool
(a .||. b) c = a c || b c
eqOn :: (Eq b) => (a -> b) -> a -> a -> Bool
eqOn = ((==) `on`)
class CanonicalProjection a b where
project :: a -> b
instance CanonicalProjection a a where
project = id
instance CanonicalProjection Void a where
project = absurd
instance CanonicalProjection a () where
project = const ()
project' :: forall b a. (CanonicalProjection a b) => a -> b
project' = project
ensureFile :: (MonadIO m, MonadThrow m) => Path Abs File -> m ()
ensureFile f =
unlessM
(Path.doesFileExist f)
(throwM (mkIOError doesNotExistErrorType "" Nothing (Just (toFilePath f))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.